From 7df8c2935ab9486d45a4dbb8a1b7652fec1a4de4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89milie=20Feral?= Date: Mon, 6 Apr 2020 14:57:17 +0200 Subject: [PATCH] [python] upgrade to micropython 1.12 --- python/Makefile | 4 + python/port/genhdr/moduledefs.h | 8 +- python/port/port.cpp | 4 +- python/src/extmod/modurandom.c | 4 + python/src/py/asmarm.c | 6 +- python/src/py/asmbase.c | 4 +- python/src/py/asmbase.h | 2 +- python/src/py/asmxtensa.c | 42 +- python/src/py/asmxtensa.h | 80 ++- python/src/py/bc.c | 144 ++--- python/src/py/bc.h | 201 ++++++- python/src/py/bc0.h | 186 +++--- python/src/py/binary.c | 28 +- python/src/py/binary.h | 16 +- python/src/py/builtin.h | 3 +- python/src/py/builtinhelp.c | 4 - python/src/py/builtinimport.c | 39 +- python/src/py/compile.c | 104 ++-- python/src/py/compile.h | 4 +- python/src/py/emit.h | 13 +- python/src/py/emitbc.c | 335 +++++------ python/src/py/emitglue.c | 17 +- python/src/py/emitglue.h | 13 +- python/src/py/emitnative.c | 195 +++++-- python/src/py/emitnx86.c | 12 +- python/src/py/emitnxtensawin.c | 23 + python/src/py/grammar.h | 10 +- python/src/py/lexer.c | 6 +- python/src/py/lexer.h | 63 +- python/src/py/makeqstrdata.py | 6 +- python/src/py/modarray.c | 6 +- python/src/py/modio.c | 11 +- python/src/py/modmath.c | 39 ++ python/src/py/modmicropython.c | 2 +- python/src/py/modstruct.c | 8 +- python/src/py/modsys.c | 59 +- python/src/py/moduerrno.c | 4 +- python/src/py/mpconfig.h | 59 +- python/src/py/mphal.h | 4 + python/src/py/mpstate.h | 21 +- python/src/py/nativeglue.c | 141 ++++- python/src/py/nativeglue.h | 177 ++++++ python/src/py/nlr.h | 29 +- python/src/py/nlrpowerpc.c | 121 ++++ python/src/py/nlrthumb.c | 22 +- python/src/py/nlrx64.c | 2 +- python/src/py/nlrx86.c | 2 +- python/src/py/nlrxtensa.c | 2 +- python/src/py/obj.c | 4 +- python/src/py/obj.h | 1 - python/src/py/objarray.c | 3 +- python/src/py/objdict.c | 12 +- python/src/py/objenumerate.c | 2 +- python/src/py/objexcept.c | 7 +- python/src/py/objfun.c | 34 +- python/src/py/objfun.h | 3 + python/src/py/objgenerator.c | 81 ++- python/src/py/objint.c | 2 +- python/src/py/objmodule.c | 29 +- python/src/py/objmodule.h | 4 +- python/src/py/objstr.c | 9 +- python/src/py/objstringio.c | 14 +- python/src/py/objtuple.c | 8 +- python/src/py/objtype.c | 20 +- python/src/py/parse.c | 46 +- python/src/py/parsenum.c | 2 +- python/src/py/persistentcode.c | 295 ++++++---- python/src/py/persistentcode.h | 61 +- python/src/py/profile.c | 984 ++++++++++++++++++++++++++++++++ python/src/py/profile.h | 79 +++ python/src/py/qstr.h | 4 +- python/src/py/repl.c | 2 +- python/src/py/ringbuf.c | 73 +++ python/src/py/ringbuf.h | 18 +- python/src/py/runtime.c | 52 +- python/src/py/runtime.h | 4 - python/src/py/runtime0.h | 131 ++--- python/src/py/scheduler.c | 8 +- python/src/py/sequence.c | 2 +- python/src/py/showbc.c | 53 +- python/src/py/stream.h | 9 +- python/src/py/vm.c | 445 +++++++++------ python/src/py/vmentrytable.h | 14 +- 83 files changed, 3516 insertions(+), 1284 deletions(-) create mode 100644 python/src/py/emitnxtensawin.c create mode 100644 python/src/py/nativeglue.h create mode 100644 python/src/py/nlrpowerpc.c create mode 100644 python/src/py/profile.c create mode 100644 python/src/py/profile.h create mode 100644 python/src/py/ringbuf.c diff --git a/python/Makefile b/python/Makefile index 6f1094a10..503d7fad1 100644 --- a/python/Makefile +++ b/python/Makefile @@ -13,6 +13,7 @@ py_src = $(addprefix python/src/py/,\ nlrx86.c \ nlrx64.c \ nlrthumb.c \ + nlrpowerpc.c \ nlrxtensa.c \ nlrsetjmp.c \ malloc.c \ @@ -43,6 +44,7 @@ py_src = $(addprefix python/src/py/,\ asmxtensa.c \ emitnxtensa.c \ emitinlinextensa.c \ + emitnxtensawin.c \ formatfloat.c \ parsenumbase.c \ parsenum.c \ @@ -52,9 +54,11 @@ py_src = $(addprefix python/src/py/,\ runtime_utils.c \ scheduler.c \ nativeglue.c \ + ringbuf.c \ stackctrl.c \ argcheck.c \ warning.c \ + profile.c \ map.c \ obj.c \ objarray.c \ diff --git a/python/port/genhdr/moduledefs.h b/python/port/genhdr/moduledefs.h index 9f758732c..c86562df8 100644 --- a/python/port/genhdr/moduledefs.h +++ b/python/port/genhdr/moduledefs.h @@ -1,13 +1,13 @@ // Automatically generated by makemoduledefs.py. #if (MICROPY_PY_ARRAY) - extern const struct _mp_obj_module_t mp_module_array; - #define MODULE_DEF_MP_QSTR_ARRAY { MP_ROM_QSTR(MP_QSTR_array), MP_ROM_PTR(&mp_module_array) }, + extern const struct _mp_obj_module_t mp_module_uarray; + #define MODULE_DEF_MP_QSTR_UARRAY { MP_ROM_QSTR(MP_QSTR_uarray), MP_ROM_PTR(&mp_module_uarray) }, #else - #define MODULE_DEF_MP_QSTR_ARRAY + #define MODULE_DEF_MP_QSTR_UARRAY #endif #define MICROPY_REGISTERED_MODULES \ - MODULE_DEF_MP_QSTR_ARRAY \ + MODULE_DEF_MP_QSTR_UARRAY \ // MICROPY_REGISTERED_MODULES diff --git a/python/port/port.cpp b/python/port/port.cpp index be13fbe7a..12f5f5588 100644 --- a/python/port/port.cpp +++ b/python/port/port.cpp @@ -46,7 +46,7 @@ bool MicroPython::ExecutionEnvironment::runCode(const char * str) { * to be fed lines and not files. */ // TODO: add a parameter when other input types (file, eval) are required mp_parse_tree_t pt = mp_parse(lex, MP_PARSE_SINGLE_INPUT); - mp_obj_t module_fun = mp_compile(&pt, lex->source_name, MP_EMIT_OPT_NONE, true); + mp_obj_t module_fun = mp_compile(&pt, lex->source_name, true); mp_call_function_0(module_fun); nlr_pop(); } else { // Uncaught exception @@ -71,7 +71,7 @@ bool MicroPython::ExecutionEnvironment::runCode(const char * str) { #endif // the block name can be NULL if it's unknown qstr block = values[i + 2]; - if (block == MP_QSTR_NULL) { + if (block == MP_QSTRnull) { mp_print_str(&mp_plat_print, "\n"); } else { mp_printf(&mp_plat_print, ", in %q\n", block); diff --git a/python/src/extmod/modurandom.c b/python/src/extmod/modurandom.c index 2e667570d..e3c7fc7f5 100644 --- a/python/src/extmod/modurandom.c +++ b/python/src/extmod/modurandom.c @@ -36,8 +36,10 @@ // http://www.literatecode.com/yasmarang // Public Domain +#if !MICROPY_ENABLE_DYNRUNTIME STATIC uint32_t yasmarang_pad = 0xeda4baba, yasmarang_n = 69, yasmarang_d = 233; STATIC uint8_t yasmarang_dat = 0; +#endif STATIC uint32_t yasmarang(void) { @@ -208,6 +210,7 @@ STATIC mp_obj_t mod_urandom___init__() { STATIC MP_DEFINE_CONST_FUN_OBJ_0(mod_urandom___init___obj, mod_urandom___init__); #endif +#if !MICROPY_ENABLE_DYNRUNTIME STATIC const mp_rom_map_elem_t mp_module_urandom_globals_table[] = { { MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_urandom) }, #ifdef MICROPY_PY_URANDOM_SEED_INIT_FUNC @@ -232,5 +235,6 @@ const mp_obj_module_t mp_module_urandom = { .base = { &mp_type_module }, .globals = (mp_obj_dict_t*)&mp_module_urandom_globals, }; +#endif #endif //MICROPY_PY_URANDOM diff --git a/python/src/py/asmarm.c b/python/src/py/asmarm.c index f2221f8a9..59c661cc0 100644 --- a/python/src/py/asmarm.c +++ b/python/src/py/asmarm.c @@ -40,7 +40,11 @@ void asm_arm_end_pass(asm_arm_t *as) { if (as->base.pass == MP_ASM_PASS_EMIT) { -#ifdef __arm__ +#if defined(__linux__) && defined(__GNUC__) + char *start = mp_asm_base_get_code(&as->base); + char *end = start + mp_asm_base_get_code_size(&as->base); + __builtin___clear_cache(start, end); +#elif defined(__arm__) // flush I- and D-cache asm volatile( "0:" diff --git a/python/src/py/asmbase.c b/python/src/py/asmbase.c index 4c84c3b25..ab861da15 100644 --- a/python/src/py/asmbase.c +++ b/python/src/py/asmbase.c @@ -31,7 +31,7 @@ #include "py/misc.h" #include "py/asmbase.h" -#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM +#if MICROPY_EMIT_MACHINE_CODE void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels) { as->max_num_labels = max_num_labels; @@ -99,4 +99,4 @@ void mp_asm_base_data(mp_asm_base_t* as, unsigned int bytesize, uintptr_t val) { } } -#endif // MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM +#endif // MICROPY_EMIT_MACHINE_CODE diff --git a/python/src/py/asmbase.h b/python/src/py/asmbase.h index d2b403893..b5e259358 100644 --- a/python/src/py/asmbase.h +++ b/python/src/py/asmbase.h @@ -60,7 +60,7 @@ static inline size_t mp_asm_base_get_code_size(mp_asm_base_t *as) { static inline void *mp_asm_base_get_code(mp_asm_base_t *as) { #if defined(MP_PLAT_COMMIT_EXEC) - return MP_PLAT_COMMIT_EXEC(as->code_base, as->code_size); + return MP_PLAT_COMMIT_EXEC(as->code_base, as->code_size, NULL); #else return as->code_base; #endif diff --git a/python/src/py/asmxtensa.c b/python/src/py/asmxtensa.c index a269e5e7f..32e5e958a 100644 --- a/python/src/py/asmxtensa.c +++ b/python/src/py/asmxtensa.c @@ -30,14 +30,13 @@ #include "py/mpconfig.h" // wrapper around everything in this file -#if MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA +#if MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA || MICROPY_EMIT_XTENSAWIN #include "py/asmxtensa.h" #define WORD_SIZE (4) #define SIGNED_FIT8(x) ((((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)) #define SIGNED_FIT12(x) ((((x) & 0xfffff800) == 0) || (((x) & 0xfffff800) == 0xfffff800)) -#define NUM_REGS_SAVED (5) void asm_xtensa_end_pass(asm_xtensa_t *as) { as->num_const = as->cur_const; @@ -69,7 +68,7 @@ void asm_xtensa_entry(asm_xtensa_t *as, int num_locals) { as->const_table = (uint32_t*)mp_asm_base_get_cur_to_write_bytes(&as->base, as->num_const * 4); // adjust the stack-pointer to store a0, a12, a13, a14, a15 and locals, 16-byte aligned - as->stack_adjust = (((NUM_REGS_SAVED + num_locals) * WORD_SIZE) + 15) & ~15; + as->stack_adjust = (((ASM_XTENSA_NUM_REGS_SAVED + num_locals) * WORD_SIZE) + 15) & ~15; if (SIGNED_FIT8(-as->stack_adjust)) { asm_xtensa_op_addi(as, ASM_XTENSA_REG_A1, ASM_XTENSA_REG_A1, -as->stack_adjust); } else { @@ -79,14 +78,14 @@ void asm_xtensa_entry(asm_xtensa_t *as, int num_locals) { // save return value (a0) and callee-save registers (a12, a13, a14, a15) asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0); - for (int i = 1; i < NUM_REGS_SAVED; ++i) { + for (int i = 1; i < ASM_XTENSA_NUM_REGS_SAVED; ++i) { asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A11 + i, ASM_XTENSA_REG_A1, i); } } void asm_xtensa_exit(asm_xtensa_t *as) { // restore registers - for (int i = NUM_REGS_SAVED - 1; i >= 1; --i) { + for (int i = ASM_XTENSA_NUM_REGS_SAVED - 1; i >= 1; --i) { asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A11 + i, ASM_XTENSA_REG_A1, i); } asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0); @@ -102,6 +101,22 @@ void asm_xtensa_exit(asm_xtensa_t *as) { asm_xtensa_op_ret_n(as); } +void asm_xtensa_entry_win(asm_xtensa_t *as, int num_locals) { + // jump over the constants + asm_xtensa_op_j(as, as->num_const * WORD_SIZE + 4 - 4); + mp_asm_base_get_cur_to_write_bytes(&as->base, 1); // padding/alignment byte + as->const_table = (uint32_t*)mp_asm_base_get_cur_to_write_bytes(&as->base, as->num_const * 4); + + as->stack_adjust = 32 + ((((ASM_XTENSA_NUM_REGS_SAVED_WIN + num_locals) * WORD_SIZE) + 15) & ~15); + asm_xtensa_op_entry(as, ASM_XTENSA_REG_A1, as->stack_adjust); + asm_xtensa_op_s32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0); +} + +void asm_xtensa_exit_win(asm_xtensa_t *as) { + asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A0, ASM_XTENSA_REG_A1, 0); + asm_xtensa_op_retw_n(as); +} + STATIC uint32_t get_label_dest(asm_xtensa_t *as, uint label) { assert(label < as->base.max_num_labels); return as->base.label_offsets[label]; @@ -178,15 +193,15 @@ void asm_xtensa_mov_reg_i32_optimised(asm_xtensa_t *as, uint reg_dest, uint32_t } void asm_xtensa_mov_local_reg(asm_xtensa_t *as, int local_num, uint reg_src) { - asm_xtensa_op_s32i(as, reg_src, ASM_XTENSA_REG_A1, NUM_REGS_SAVED + local_num); + asm_xtensa_op_s32i(as, reg_src, ASM_XTENSA_REG_A1, local_num); } void asm_xtensa_mov_reg_local(asm_xtensa_t *as, uint reg_dest, int local_num) { - asm_xtensa_op_l32i(as, reg_dest, ASM_XTENSA_REG_A1, NUM_REGS_SAVED + local_num); + asm_xtensa_op_l32i(as, reg_dest, ASM_XTENSA_REG_A1, local_num); } void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_num) { - uint off = (NUM_REGS_SAVED + local_num) * WORD_SIZE; + uint off = local_num * WORD_SIZE; if (SIGNED_FIT8(off)) { asm_xtensa_op_addi(as, reg_dest, ASM_XTENSA_REG_A1, off); } else { @@ -226,4 +241,13 @@ void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx) { asm_xtensa_op_callx0(as, ASM_XTENSA_REG_A0); } -#endif // MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA +void asm_xtensa_call_ind_win(asm_xtensa_t *as, uint idx) { + if (idx < 16) { + asm_xtensa_op_l32i_n(as, ASM_XTENSA_REG_A8, ASM_XTENSA_REG_FUN_TABLE_WIN, idx); + } else { + asm_xtensa_op_l32i(as, ASM_XTENSA_REG_A8, ASM_XTENSA_REG_FUN_TABLE_WIN, idx); + } + asm_xtensa_op_callx8(as, ASM_XTENSA_REG_A8); +} + +#endif // MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA || MICROPY_EMIT_XTENSAWIN diff --git a/python/src/py/asmxtensa.h b/python/src/py/asmxtensa.h index d95af14a5..5eb40daf7 100644 --- a/python/src/py/asmxtensa.h +++ b/python/src/py/asmxtensa.h @@ -37,6 +37,16 @@ // callee save: a1, a12, a13, a14, a15 // caller save: a3 +// With windowed registers, size 8: +// - a0: return PC +// - a1: stack pointer, full descending, aligned to 16 bytes +// - a2-a7: incoming args, and essentially callee save +// - a2: return value +// - a8-a15: caller save temporaries +// - a10-a15: input args to called function +// - a10: return value of called function +// note: a0-a7 are saved automatically via window shift of called function + #define ASM_XTENSA_REG_A0 (0) #define ASM_XTENSA_REG_A1 (1) #define ASM_XTENSA_REG_A2 (2) @@ -96,6 +106,10 @@ #define ASM_XTENSA_ENCODE_RI7(op0, s, imm7) \ ((((imm7) & 0xf) << 12) | ((s) << 8) | ((imm7) & 0x70) | (op0)) +// Number of registers saved on the stack upon entry to function +#define ASM_XTENSA_NUM_REGS_SAVED (5) +#define ASM_XTENSA_NUM_REGS_SAVED_WIN (1) + typedef struct _asm_xtensa_t { mp_asm_base_t base; uint32_t cur_const; @@ -109,11 +123,18 @@ void asm_xtensa_end_pass(asm_xtensa_t *as); void asm_xtensa_entry(asm_xtensa_t *as, int num_locals); void asm_xtensa_exit(asm_xtensa_t *as); +void asm_xtensa_entry_win(asm_xtensa_t *as, int num_locals); +void asm_xtensa_exit_win(asm_xtensa_t *as); + void asm_xtensa_op16(asm_xtensa_t *as, uint16_t op); void asm_xtensa_op24(asm_xtensa_t *as, uint32_t op); // raw instructions +static inline void asm_xtensa_op_entry(asm_xtensa_t *as, uint reg_src, int32_t num_bytes) { + asm_xtensa_op24(as, ASM_XTENSA_ENCODE_BRI12(6, reg_src, 0, 3, (num_bytes / 8) & 0xfff)); +} + static inline void asm_xtensa_op_add_n(asm_xtensa_t *as, uint reg_dest, uint reg_src_a, uint reg_src_b) { asm_xtensa_op16(as, ASM_XTENSA_ENCODE_RRRN(10, reg_dest, reg_src_a, reg_src_b)); } @@ -142,6 +163,10 @@ static inline void asm_xtensa_op_callx0(asm_xtensa_t *as, uint reg) { asm_xtensa_op24(as, ASM_XTENSA_ENCODE_CALLX(0, 0, 0, 0, reg, 3, 0)); } +static inline void asm_xtensa_op_callx8(asm_xtensa_t *as, uint reg) { + asm_xtensa_op24(as, ASM_XTENSA_ENCODE_CALLX(0, 0, 0, 0, reg, 3, 2)); +} + static inline void asm_xtensa_op_j(asm_xtensa_t *as, int32_t rel18) { asm_xtensa_op24(as, ASM_XTENSA_ENCODE_CALL(6, 0, rel18 & 0x3ffff)); } @@ -194,6 +219,10 @@ static inline void asm_xtensa_op_ret_n(asm_xtensa_t *as) { asm_xtensa_op16(as, ASM_XTENSA_ENCODE_RRRN(13, 15, 0, 0)); } +static inline void asm_xtensa_op_retw_n(asm_xtensa_t *as) { + asm_xtensa_op16(as, ASM_XTENSA_ENCODE_RRRN(13, 15, 0, 1)); +} + static inline void asm_xtensa_op_s8i(asm_xtensa_t *as, uint reg_src, uint reg_base, uint byte_offset) { asm_xtensa_op24(as, ASM_XTENSA_ENCODE_RRI8(2, 4, reg_base, reg_src, byte_offset & 0xff)); } @@ -246,9 +275,11 @@ void asm_xtensa_mov_reg_local(asm_xtensa_t *as, uint reg_dest, int local_num); void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_num); void asm_xtensa_mov_reg_pcrel(asm_xtensa_t *as, uint reg_dest, uint label); void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx); +void asm_xtensa_call_ind_win(asm_xtensa_t *as, uint idx); // Holds a pointer to mp_fun_table #define ASM_XTENSA_REG_FUN_TABLE ASM_XTENSA_REG_A15 +#define ASM_XTENSA_REG_FUN_TABLE_WIN ASM_XTENSA_REG_A7 #if GENERIC_ASM_API @@ -257,6 +288,9 @@ void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx); #define ASM_WORD_SIZE (4) +#if !GENERIC_ASM_API_WIN +// Configuration for non-windowed calls + #define REG_RET ASM_XTENSA_REG_A2 #define REG_ARG_1 ASM_XTENSA_REG_A2 #define REG_ARG_2 ASM_XTENSA_REG_A3 @@ -273,12 +307,47 @@ void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx); #define REG_LOCAL_3 ASM_XTENSA_REG_A14 #define REG_LOCAL_NUM (3) +#define ASM_NUM_REGS_SAVED ASM_XTENSA_NUM_REGS_SAVED #define REG_FUN_TABLE ASM_XTENSA_REG_FUN_TABLE +#define ASM_ENTRY(as, nlocal) asm_xtensa_entry((as), (nlocal)) +#define ASM_EXIT(as) asm_xtensa_exit((as)) +#define ASM_CALL_IND(as, idx) asm_xtensa_call_ind((as), (idx)) + +#else +// Configuration for windowed calls with window size 8 + +#define REG_PARENT_RET ASM_XTENSA_REG_A2 +#define REG_PARENT_ARG_1 ASM_XTENSA_REG_A2 +#define REG_PARENT_ARG_2 ASM_XTENSA_REG_A3 +#define REG_PARENT_ARG_3 ASM_XTENSA_REG_A4 +#define REG_PARENT_ARG_4 ASM_XTENSA_REG_A5 +#define REG_RET ASM_XTENSA_REG_A10 +#define REG_ARG_1 ASM_XTENSA_REG_A10 +#define REG_ARG_2 ASM_XTENSA_REG_A11 +#define REG_ARG_3 ASM_XTENSA_REG_A12 +#define REG_ARG_4 ASM_XTENSA_REG_A13 + +#define REG_TEMP0 ASM_XTENSA_REG_A10 +#define REG_TEMP1 ASM_XTENSA_REG_A11 +#define REG_TEMP2 ASM_XTENSA_REG_A12 + +#define REG_LOCAL_1 ASM_XTENSA_REG_A4 +#define REG_LOCAL_2 ASM_XTENSA_REG_A5 +#define REG_LOCAL_3 ASM_XTENSA_REG_A6 +#define REG_LOCAL_NUM (3) + +#define ASM_NUM_REGS_SAVED ASM_XTENSA_NUM_REGS_SAVED_WIN +#define REG_FUN_TABLE ASM_XTENSA_REG_FUN_TABLE_WIN + +#define ASM_ENTRY(as, nlocal) asm_xtensa_entry_win((as), (nlocal)) +#define ASM_EXIT(as) asm_xtensa_exit_win((as)) +#define ASM_CALL_IND(as, idx) asm_xtensa_call_ind_win((as), (idx)) + +#endif + #define ASM_T asm_xtensa_t #define ASM_END_PASS asm_xtensa_end_pass -#define ASM_ENTRY asm_xtensa_entry -#define ASM_EXIT asm_xtensa_exit #define ASM_JUMP asm_xtensa_j_label #define ASM_JUMP_IF_REG_ZERO(as, reg, label, bool_test) \ @@ -288,15 +357,14 @@ void asm_xtensa_call_ind(asm_xtensa_t *as, uint idx); #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \ asm_xtensa_bcc_reg_reg_label(as, ASM_XTENSA_CC_EQ, reg1, reg2, label) #define ASM_JUMP_REG(as, reg) asm_xtensa_op_jx((as), (reg)) -#define ASM_CALL_IND(as, idx) asm_xtensa_call_ind((as), (idx)) -#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_xtensa_mov_local_reg((as), (local_num), (reg_src)) +#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_xtensa_mov_local_reg((as), ASM_NUM_REGS_SAVED + (local_num), (reg_src)) #define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_xtensa_mov_reg_i32_optimised((as), (reg_dest), (imm)) #define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_xtensa_mov_reg_i32((as), (reg_dest), (imm)) #define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_xtensa_mov_reg_i32((as), (reg_dest), (imm)) -#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_xtensa_mov_reg_local((as), (reg_dest), (local_num)) +#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_xtensa_mov_reg_local((as), (reg_dest), ASM_NUM_REGS_SAVED + (local_num)) #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_xtensa_op_mov_n((as), (reg_dest), (reg_src)) -#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_xtensa_mov_reg_local_addr((as), (reg_dest), (local_num)) +#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_xtensa_mov_reg_local_addr((as), (reg_dest), ASM_NUM_REGS_SAVED + (local_num)) #define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_xtensa_mov_reg_pcrel((as), (reg_dest), (label)) #define ASM_LSL_REG_REG(as, reg_dest, reg_shift) \ diff --git a/python/src/py/bc.c b/python/src/py/bc.c index b178e7c20..7dd4b2246 100644 --- a/python/src/py/bc.c +++ b/python/src/py/bc.c @@ -40,6 +40,8 @@ #define DEBUG_printf(...) (void)0 #endif +#if !MICROPY_PERSISTENT_CODE + mp_uint_t mp_decode_uint(const byte **ptr) { mp_uint_t unum = 0; byte val; @@ -70,6 +72,8 @@ const byte *mp_decode_uint_skip(const byte *ptr) { return ptr; } +#endif + STATIC NORETURN void fun_pos_args_mismatch(mp_obj_fun_bc_t *f, size_t expected, size_t given) { #if MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE // generic message, used also for other argument issues @@ -119,16 +123,22 @@ void mp_setup_code_state(mp_code_state_t *code_state, size_t n_args, size_t n_kw code_state->prev = NULL; #endif - // get params - size_t n_state = mp_decode_uint(&code_state->ip); - code_state->ip = mp_decode_uint_skip(code_state->ip); // skip n_exc_stack - size_t scope_flags = *code_state->ip++; - size_t n_pos_args = *code_state->ip++; - size_t n_kwonly_args = *code_state->ip++; - size_t n_def_pos_args = *code_state->ip++; + #if MICROPY_PY_SYS_SETTRACE + code_state->prev_state = NULL; + code_state->frame = NULL; + #endif + + // Get cached n_state (rather than decode it again) + size_t n_state = code_state->n_state; + + // Decode prelude + size_t n_state_unused, n_exc_stack_unused, scope_flags, n_pos_args, n_kwonly_args, n_def_pos_args; + MP_BC_PRELUDE_SIG_DECODE_INTO(code_state->ip, n_state_unused, n_exc_stack_unused, scope_flags, n_pos_args, n_kwonly_args, n_def_pos_args); + (void)n_state_unused; + (void)n_exc_stack_unused; code_state->sp = &code_state->state[0] - 1; - code_state->exc_sp = (mp_exc_stack_t*)(code_state->state + n_state) - 1; + code_state->exc_sp_idx = 0; // zero out the local stack to begin with memset(code_state->state, 0, n_state * sizeof(*code_state->state)); @@ -263,19 +273,25 @@ continue2:; } } - // get the ip and skip argument names + // read the size part of the prelude const byte *ip = code_state->ip; + MP_BC_PRELUDE_SIZE_DECODE(ip); // jump over code info (source file and line-number mapping) - ip += mp_decode_uint_value(ip); + ip += n_info; // bytecode prelude: initialise closed over variables - size_t local_num; - while ((local_num = *ip++) != 255) { + for (; n_cell; --n_cell) { + size_t local_num = *ip++; code_state->state[n_state - 1 - local_num] = mp_obj_new_cell(code_state->state[n_state - 1 - local_num]); } + #if !MICROPY_PERSISTENT_CODE + // so bytecode is aligned + ip = MP_ALIGN(ip, sizeof(mp_uint_t)); + #endif + // now that we skipped over the prelude, set the ip for the VM code_state->ip = ip; @@ -287,105 +303,17 @@ continue2:; #if MICROPY_PERSISTENT_CODE_LOAD || MICROPY_PERSISTENT_CODE_SAVE // The following table encodes the number of bytes that a specific opcode -// takes up. There are 3 special opcodes that always have an extra byte: -// MP_BC_MAKE_CLOSURE -// MP_BC_MAKE_CLOSURE_DEFARGS -// MP_BC_RAISE_VARARGS +// takes up. Some opcodes have an extra byte, defined by MP_BC_MASK_EXTRA_BYTE. // There are 4 special opcodes that have an extra byte only when // MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE is enabled (and they take a qstr): // MP_BC_LOAD_NAME // MP_BC_LOAD_GLOBAL // MP_BC_LOAD_ATTR // MP_BC_STORE_ATTR -#define OC4(a, b, c, d) (a | (b << 2) | (c << 4) | (d << 6)) -#define U (0) // undefined opcode -#define B (MP_OPCODE_BYTE) // single byte -#define Q (MP_OPCODE_QSTR) // single byte plus 2-byte qstr -#define V (MP_OPCODE_VAR_UINT) // single byte plus variable encoded unsigned int -#define O (MP_OPCODE_OFFSET) // single byte plus 2-byte bytecode offset -STATIC const byte opcode_format_table[64] = { - OC4(U, U, U, U), // 0x00-0x03 - OC4(U, U, U, U), // 0x04-0x07 - OC4(U, U, U, U), // 0x08-0x0b - OC4(U, U, U, U), // 0x0c-0x0f - OC4(B, B, B, U), // 0x10-0x13 - OC4(V, U, Q, V), // 0x14-0x17 - OC4(B, V, V, Q), // 0x18-0x1b - OC4(Q, Q, Q, Q), // 0x1c-0x1f - OC4(B, B, V, V), // 0x20-0x23 - OC4(Q, Q, Q, B), // 0x24-0x27 - OC4(V, V, Q, Q), // 0x28-0x2b - OC4(U, U, U, U), // 0x2c-0x2f - OC4(B, B, B, B), // 0x30-0x33 - OC4(B, O, O, O), // 0x34-0x37 - OC4(O, O, U, U), // 0x38-0x3b - OC4(U, O, B, O), // 0x3c-0x3f - OC4(O, B, B, O), // 0x40-0x43 - OC4(O, U, O, B), // 0x44-0x47 - OC4(U, U, U, U), // 0x48-0x4b - OC4(U, U, U, U), // 0x4c-0x4f - OC4(V, V, U, V), // 0x50-0x53 - OC4(B, U, V, V), // 0x54-0x57 - OC4(V, V, V, B), // 0x58-0x5b - OC4(B, B, B, U), // 0x5c-0x5f - OC4(V, V, V, V), // 0x60-0x63 - OC4(V, V, V, V), // 0x64-0x67 - OC4(Q, Q, B, U), // 0x68-0x6b - OC4(U, U, U, U), // 0x6c-0x6f - - OC4(B, B, B, B), // 0x70-0x73 - OC4(B, B, B, B), // 0x74-0x77 - OC4(B, B, B, B), // 0x78-0x7b - OC4(B, B, B, B), // 0x7c-0x7f - OC4(B, B, B, B), // 0x80-0x83 - OC4(B, B, B, B), // 0x84-0x87 - OC4(B, B, B, B), // 0x88-0x8b - OC4(B, B, B, B), // 0x8c-0x8f - OC4(B, B, B, B), // 0x90-0x93 - OC4(B, B, B, B), // 0x94-0x97 - OC4(B, B, B, B), // 0x98-0x9b - OC4(B, B, B, B), // 0x9c-0x9f - OC4(B, B, B, B), // 0xa0-0xa3 - OC4(B, B, B, B), // 0xa4-0xa7 - OC4(B, B, B, B), // 0xa8-0xab - OC4(B, B, B, B), // 0xac-0xaf - - OC4(B, B, B, B), // 0xb0-0xb3 - OC4(B, B, B, B), // 0xb4-0xb7 - OC4(B, B, B, B), // 0xb8-0xbb - OC4(B, B, B, B), // 0xbc-0xbf - - OC4(B, B, B, B), // 0xc0-0xc3 - OC4(B, B, B, B), // 0xc4-0xc7 - OC4(B, B, B, B), // 0xc8-0xcb - OC4(B, B, B, B), // 0xcc-0xcf - - OC4(B, B, B, B), // 0xd0-0xd3 - OC4(U, U, U, B), // 0xd4-0xd7 - OC4(B, B, B, B), // 0xd8-0xdb - OC4(B, B, B, B), // 0xdc-0xdf - - OC4(B, B, B, B), // 0xe0-0xe3 - OC4(B, B, B, B), // 0xe4-0xe7 - OC4(B, B, B, B), // 0xe8-0xeb - OC4(B, B, B, B), // 0xec-0xef - - OC4(B, B, B, B), // 0xf0-0xf3 - OC4(B, B, B, B), // 0xf4-0xf7 - OC4(U, U, U, U), // 0xf8-0xfb - OC4(U, U, U, U), // 0xfc-0xff -}; -#undef OC4 -#undef U -#undef B -#undef Q -#undef V -#undef O - uint mp_opcode_format(const byte *ip, size_t *opcode_size, bool count_var_uint) { - uint f = (opcode_format_table[*ip >> 2] >> (2 * (*ip & 3))) & 3; + uint f = MP_BC_FORMAT(*ip); const byte *ip_start = ip; - if (f == MP_OPCODE_QSTR) { + if (f == MP_BC_FORMAT_QSTR) { if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE_DYNAMIC) { if (*ip == MP_BC_LOAD_NAME || *ip == MP_BC_LOAD_GLOBAL @@ -396,18 +324,14 @@ uint mp_opcode_format(const byte *ip, size_t *opcode_size, bool count_var_uint) } ip += 3; } else { - int extra_byte = ( - *ip == MP_BC_RAISE_VARARGS - || *ip == MP_BC_MAKE_CLOSURE - || *ip == MP_BC_MAKE_CLOSURE_DEFARGS - ); + int extra_byte = (*ip & MP_BC_MASK_EXTRA_BYTE) == 0; ip += 1; - if (f == MP_OPCODE_VAR_UINT) { + if (f == MP_BC_FORMAT_VAR_UINT) { if (count_var_uint) { while ((*ip++ & 0x80) != 0) { } } - } else if (f == MP_OPCODE_OFFSET) { + } else if (f == MP_BC_FORMAT_OFFSET) { ip += 2; } ip += extra_byte; diff --git a/python/src/py/bc.h b/python/src/py/bc.h index 0aadfa8a3..a96d17a0d 100644 --- a/python/src/py/bc.h +++ b/python/src/py/bc.h @@ -32,24 +32,35 @@ // bytecode layout: // -// n_state : var uint -// n_exc_stack : var uint -// scope_flags : byte -// n_pos_args : byte number of arguments this function takes -// n_kwonly_args : byte number of keyword-only arguments this function takes -// n_def_pos_args : byte number of default positional arguments +// func signature : var uint +// contains six values interleaved bit-wise as: xSSSSEAA [xFSSKAED repeated] +// x = extension another byte follows +// S = n_state - 1 number of entries in Python value stack +// E = n_exc_stack number of entries in exception stack +// F = scope_flags four bits of flags, MP_SCOPE_FLAG_xxx +// A = n_pos_args number of arguments this function takes +// K = n_kwonly_args number of keyword-only arguments this function takes +// D = n_def_pos_args number of default positional arguments // -// code_info_size : var uint | code_info_size counts bytes in this chunk -// simple_name : var qstr | -// source_file : var qstr | -// | -// | only needed if bytecode contains pointers +// prelude size : var uint +// contains two values interleaved bit-wise as: xIIIIIIC repeated +// x = extension another byte follows +// I = n_info number of bytes in source info section +// C = n_cells number of bytes/cells in closure section // -// local_num0 : byte | -// ... : byte | -// local_numN : byte | N = num_cells -// 255 : byte | end of list sentinel -// | +// source info section: +// simple_name : var qstr +// source_file : var qstr +// +// +// closure section: +// local_num0 : byte +// ... : byte +// local_numN : byte N = n_cells-1 +// +// only needed if bytecode contains pointers +// +// // // // constant table layout: @@ -60,13 +71,127 @@ // const0 : obj // constN : obj +#define MP_BC_PRELUDE_SIG_ENCODE(S, E, scope, out_byte, out_env) \ +do { \ + /*// Get values to store in prelude */ \ + size_t F = scope->scope_flags & MP_SCOPE_FLAG_ALL_SIG; \ + size_t A = scope->num_pos_args; \ + size_t K = scope->num_kwonly_args; \ + size_t D = scope->num_def_pos_args; \ + \ + /* Adjust S to shrink range, to compress better */ \ + S -= 1; \ + \ + /* Encode prelude */ \ + /* xSSSSEAA */ \ + uint8_t z = (S & 0xf) << 3 | (E & 1) << 2 | (A & 3); \ + S >>= 4; \ + E >>= 1; \ + A >>= 2; \ + while (S | E | F | A | K | D) { \ + out_byte(out_env, 0x80 | z); \ + /* xFSSKAED */ \ + z = (F & 1) << 6 | (S & 3) << 4 | (K & 1) << 3 \ + | (A & 1) << 2 | (E & 1) << 1 | (D & 1); \ + S >>= 2; \ + E >>= 1; \ + F >>= 1; \ + A >>= 1; \ + K >>= 1; \ + D >>= 1; \ + } \ + out_byte(out_env, z); \ +} while (0) + +#define MP_BC_PRELUDE_SIG_DECODE_INTO(ip, S, E, F, A, K, D) \ +do { \ + uint8_t z = *(ip)++; \ + /* xSSSSEAA */ \ + S = (z >> 3) & 0xf; \ + E = (z >> 2) & 0x1; \ + F = 0; \ + A = z & 0x3; \ + K = 0; \ + D = 0; \ + for (unsigned n = 0; z & 0x80; ++n) { \ + z = *(ip)++; \ + /* xFSSKAED */ \ + S |= (z & 0x30) << (2 * n); \ + E |= (z & 0x02) << n; \ + F |= ((z & 0x40) >> 6) << n; \ + A |= (z & 0x4) << n; \ + K |= ((z & 0x08) >> 3) << n; \ + D |= (z & 0x1) << n; \ + } \ + S += 1; \ +} while (0) + +#define MP_BC_PRELUDE_SIG_DECODE(ip) \ + size_t n_state, n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_pos_args; \ + MP_BC_PRELUDE_SIG_DECODE_INTO(ip, n_state, n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_pos_args) + +#define MP_BC_PRELUDE_SIZE_ENCODE(I, C, out_byte, out_env) \ +do { \ + /* Encode bit-wise as: xIIIIIIC */ \ + uint8_t z = 0; \ + do { \ + z = (I & 0x3f) << 1 | (C & 1); \ + C >>= 1; \ + I >>= 6; \ + if (C | I) { \ + z |= 0x80; \ + } \ + out_byte(out_env, z); \ + } while (C | I); \ +} while (0) + +#define MP_BC_PRELUDE_SIZE_DECODE_INTO(ip, I, C) \ +do { \ + uint8_t z; \ + C = 0; \ + I = 0; \ + for (unsigned n = 0;; ++n) { \ + z = *(ip)++; \ + /* xIIIIIIC */ \ + C |= (z & 1) << n; \ + I |= ((z & 0x7e) >> 1) << (6 * n); \ + if (!(z & 0x80)) { \ + break; \ + } \ + } \ +} while (0) + +#define MP_BC_PRELUDE_SIZE_DECODE(ip) \ + size_t n_info, n_cell; \ + MP_BC_PRELUDE_SIZE_DECODE_INTO(ip, n_info, n_cell) + +// Sentinel value for mp_code_state_t.exc_sp_idx +#define MP_CODE_STATE_EXC_SP_IDX_SENTINEL ((uint16_t)-1) + +// To convert mp_code_state_t.exc_sp_idx to/from a pointer to mp_exc_stack_t +#define MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp) ((exc_sp) + 1 - (exc_stack)) +#define MP_CODE_STATE_EXC_SP_IDX_TO_PTR(exc_stack, exc_sp_idx) ((exc_stack) + (exc_sp_idx) - 1) + +typedef struct _mp_bytecode_prelude_t { + uint n_state; + uint n_exc_stack; + uint scope_flags; + uint n_pos_args; + uint n_kwonly_args; + uint n_def_pos_args; + qstr qstr_block_name; + qstr qstr_source_file; + const byte *line_info; + const byte *opcodes; +} mp_bytecode_prelude_t; + // Exception stack entry typedef struct _mp_exc_stack_t { const byte *handler; - // bit 0 is saved currently_in_except_block value + // bit 0 is currently unused // bit 1 is whether the opcode was SETUP_WITH or SETUP_FINALLY mp_obj_t *val_sp; - // Saved exception, valid if currently_in_except_block bit is 1 + // Saved exception mp_obj_base_t *prev_exc; } mp_exc_stack_t; @@ -78,12 +203,16 @@ typedef struct _mp_code_state_t { mp_obj_fun_bc_t *fun_bc; const byte *ip; mp_obj_t *sp; - // bit 0 is saved currently_in_except_block value - mp_exc_stack_t *exc_sp; + uint16_t n_state; + uint16_t exc_sp_idx; mp_obj_dict_t *old_globals; #if MICROPY_STACKLESS struct _mp_code_state_t *prev; #endif + #if MICROPY_PY_SYS_SETTRACE + struct _mp_code_state_t *prev_state; + struct _mp_obj_frame_t *frame; + #endif // Variable-length mp_obj_t state[0]; // Variable-length, never accessed by name, only as (void*)(state + n_state) @@ -110,13 +239,35 @@ const byte *mp_bytecode_print_str(const byte *ip); #if MICROPY_PERSISTENT_CODE_LOAD || MICROPY_PERSISTENT_CODE_SAVE -#define MP_OPCODE_BYTE (0) -#define MP_OPCODE_QSTR (1) -#define MP_OPCODE_VAR_UINT (2) -#define MP_OPCODE_OFFSET (3) - uint mp_opcode_format(const byte *ip, size_t *opcode_size, bool count_var_uint); #endif +static inline size_t mp_bytecode_get_source_line(const byte *line_info, size_t bc_offset) { + size_t source_line = 1; + size_t c; + while ((c = *line_info)) { + size_t b, l; + if ((c & 0x80) == 0) { + // 0b0LLBBBBB encoding + b = c & 0x1f; + l = c >> 5; + line_info += 1; + } else { + // 0b1LLLBBBB 0bLLLLLLLL encoding (l's LSB in second byte) + b = c & 0xf; + l = ((c << 4) & 0x700) | line_info[1]; + line_info += 2; + } + if (bc_offset >= b) { + bc_offset -= b; + source_line += l; + } else { + // found source line corresponding to bytecode offset + break; + } + } + return source_line; +} + #endif // MICROPY_INCLUDED_PY_BC_H diff --git a/python/src/py/bc0.h b/python/src/py/bc0.h index 175ee263a..7cf061fc4 100644 --- a/python/src/py/bc0.h +++ b/python/src/py/bc0.h @@ -26,93 +26,125 @@ #ifndef MICROPY_INCLUDED_PY_BC0_H #define MICROPY_INCLUDED_PY_BC0_H -// MicroPython byte-codes. -// The comment at the end of the line (if it exists) tells the arguments to the byte-code. +// MicroPython bytecode opcodes, grouped based on the format of the opcode -#define MP_BC_LOAD_CONST_FALSE (0x10) -#define MP_BC_LOAD_CONST_NONE (0x11) -#define MP_BC_LOAD_CONST_TRUE (0x12) -#define MP_BC_LOAD_CONST_SMALL_INT (0x14) // signed var-int -#define MP_BC_LOAD_CONST_STRING (0x16) // qstr -#define MP_BC_LOAD_CONST_OBJ (0x17) // ptr -#define MP_BC_LOAD_NULL (0x18) +#define MP_BC_MASK_FORMAT (0xf0) +#define MP_BC_MASK_EXTRA_BYTE (0x9e) -#define MP_BC_LOAD_FAST_N (0x19) // uint -#define MP_BC_LOAD_DEREF (0x1a) // uint -#define MP_BC_LOAD_NAME (0x1b) // qstr -#define MP_BC_LOAD_GLOBAL (0x1c) // qstr -#define MP_BC_LOAD_ATTR (0x1d) // qstr -#define MP_BC_LOAD_METHOD (0x1e) // qstr -#define MP_BC_LOAD_SUPER_METHOD (0x1f) // qstr -#define MP_BC_LOAD_BUILD_CLASS (0x20) -#define MP_BC_LOAD_SUBSCR (0x21) +#define MP_BC_FORMAT_BYTE (0) +#define MP_BC_FORMAT_QSTR (1) +#define MP_BC_FORMAT_VAR_UINT (2) +#define MP_BC_FORMAT_OFFSET (3) -#define MP_BC_STORE_FAST_N (0x22) // uint -#define MP_BC_STORE_DEREF (0x23) // uint -#define MP_BC_STORE_NAME (0x24) // qstr -#define MP_BC_STORE_GLOBAL (0x25) // qstr -#define MP_BC_STORE_ATTR (0x26) // qstr -#define MP_BC_STORE_SUBSCR (0x27) +// Nibbles in magic number are: BB BB BB BB BB BO VV QU +#define MP_BC_FORMAT(op) ((0x000003a4 >> (2 * ((op) >> 4))) & 3) -#define MP_BC_DELETE_FAST (0x28) // uint -#define MP_BC_DELETE_DEREF (0x29) // uint -#define MP_BC_DELETE_NAME (0x2a) // qstr -#define MP_BC_DELETE_GLOBAL (0x2b) // qstr +// Load, Store, Delete, Import, Make, Build, Unpack, Call, Jump, Exception, For, sTack, Return, Yield, Op +#define MP_BC_BASE_RESERVED (0x00) // ---------------- +#define MP_BC_BASE_QSTR_O (0x10) // LLLLLLSSSDDII--- +#define MP_BC_BASE_VINT_E (0x20) // MMLLLLSSDDBBBBBB +#define MP_BC_BASE_VINT_O (0x30) // UUMMCCCC-------- +#define MP_BC_BASE_JUMP_E (0x40) // J-JJJJJEEEEF---- +#define MP_BC_BASE_BYTE_O (0x50) // LLLLSSDTTTTTEEFF +#define MP_BC_BASE_BYTE_E (0x60) // --BREEEYYI------ +#define MP_BC_LOAD_CONST_SMALL_INT_MULTI (0x70) // LLLLLLLLLLLLLLLL + // (0x80) // LLLLLLLLLLLLLLLL + // (0x90) // LLLLLLLLLLLLLLLL + // (0xa0) // LLLLLLLLLLLLLLLL +#define MP_BC_LOAD_FAST_MULTI (0xb0) // LLLLLLLLLLLLLLLL +#define MP_BC_STORE_FAST_MULTI (0xc0) // SSSSSSSSSSSSSSSS +#define MP_BC_UNARY_OP_MULTI (0xd0) // OOOOOOO +#define MP_BC_BINARY_OP_MULTI (0xd7) // OOOOOOOOO + // (0xe0) // OOOOOOOOOOOOOOOO + // (0xf0) // OOOOOOOOOO------ -#define MP_BC_DUP_TOP (0x30) -#define MP_BC_DUP_TOP_TWO (0x31) -#define MP_BC_POP_TOP (0x32) -#define MP_BC_ROT_TWO (0x33) -#define MP_BC_ROT_THREE (0x34) +#define MP_BC_LOAD_CONST_SMALL_INT_MULTI_NUM (64) +#define MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS (16) +#define MP_BC_LOAD_FAST_MULTI_NUM (16) +#define MP_BC_STORE_FAST_MULTI_NUM (16) +#define MP_BC_UNARY_OP_MULTI_NUM (MP_UNARY_OP_NUM_BYTECODE) +#define MP_BC_BINARY_OP_MULTI_NUM (MP_BINARY_OP_NUM_BYTECODE) -#define MP_BC_JUMP (0x35) // rel byte code offset, 16-bit signed, in excess -#define MP_BC_POP_JUMP_IF_TRUE (0x36) // rel byte code offset, 16-bit signed, in excess -#define MP_BC_POP_JUMP_IF_FALSE (0x37) // rel byte code offset, 16-bit signed, in excess -#define MP_BC_JUMP_IF_TRUE_OR_POP (0x38) // rel byte code offset, 16-bit signed, in excess -#define MP_BC_JUMP_IF_FALSE_OR_POP (0x39) // rel byte code offset, 16-bit signed, in excess -#define MP_BC_SETUP_WITH (0x3d) // rel byte code offset, 16-bit unsigned -#define MP_BC_WITH_CLEANUP (0x3e) -#define MP_BC_SETUP_EXCEPT (0x3f) // rel byte code offset, 16-bit unsigned -#define MP_BC_SETUP_FINALLY (0x40) // rel byte code offset, 16-bit unsigned -#define MP_BC_END_FINALLY (0x41) -#define MP_BC_GET_ITER (0x42) -#define MP_BC_FOR_ITER (0x43) // rel byte code offset, 16-bit unsigned -#define MP_BC_POP_EXCEPT_JUMP (0x44) // rel byte code offset, 16-bit unsigned -#define MP_BC_UNWIND_JUMP (0x46) // rel byte code offset, 16-bit signed, in excess; then a byte -#define MP_BC_GET_ITER_STACK (0x47) +#define MP_BC_LOAD_CONST_FALSE (MP_BC_BASE_BYTE_O + 0x00) +#define MP_BC_LOAD_CONST_NONE (MP_BC_BASE_BYTE_O + 0x01) +#define MP_BC_LOAD_CONST_TRUE (MP_BC_BASE_BYTE_O + 0x02) +#define MP_BC_LOAD_CONST_SMALL_INT (MP_BC_BASE_VINT_E + 0x02) // signed var-int +#define MP_BC_LOAD_CONST_STRING (MP_BC_BASE_QSTR_O + 0x00) // qstr +#define MP_BC_LOAD_CONST_OBJ (MP_BC_BASE_VINT_E + 0x03) // ptr +#define MP_BC_LOAD_NULL (MP_BC_BASE_BYTE_O + 0x03) -#define MP_BC_BUILD_TUPLE (0x50) // uint -#define MP_BC_BUILD_LIST (0x51) // uint -#define MP_BC_BUILD_MAP (0x53) // uint -#define MP_BC_STORE_MAP (0x54) -#define MP_BC_BUILD_SET (0x56) // uint -#define MP_BC_BUILD_SLICE (0x58) // uint -#define MP_BC_STORE_COMP (0x57) // uint -#define MP_BC_UNPACK_SEQUENCE (0x59) // uint -#define MP_BC_UNPACK_EX (0x5a) // uint +#define MP_BC_LOAD_FAST_N (MP_BC_BASE_VINT_E + 0x04) // uint +#define MP_BC_LOAD_DEREF (MP_BC_BASE_VINT_E + 0x05) // uint +#define MP_BC_LOAD_NAME (MP_BC_BASE_QSTR_O + 0x01) // qstr +#define MP_BC_LOAD_GLOBAL (MP_BC_BASE_QSTR_O + 0x02) // qstr +#define MP_BC_LOAD_ATTR (MP_BC_BASE_QSTR_O + 0x03) // qstr +#define MP_BC_LOAD_METHOD (MP_BC_BASE_QSTR_O + 0x04) // qstr +#define MP_BC_LOAD_SUPER_METHOD (MP_BC_BASE_QSTR_O + 0x05) // qstr +#define MP_BC_LOAD_BUILD_CLASS (MP_BC_BASE_BYTE_O + 0x04) +#define MP_BC_LOAD_SUBSCR (MP_BC_BASE_BYTE_O + 0x05) -#define MP_BC_RETURN_VALUE (0x5b) -#define MP_BC_RAISE_VARARGS (0x5c) // byte -#define MP_BC_YIELD_VALUE (0x5d) -#define MP_BC_YIELD_FROM (0x5e) +#define MP_BC_STORE_FAST_N (MP_BC_BASE_VINT_E + 0x06) // uint +#define MP_BC_STORE_DEREF (MP_BC_BASE_VINT_E + 0x07) // uint +#define MP_BC_STORE_NAME (MP_BC_BASE_QSTR_O + 0x06) // qstr +#define MP_BC_STORE_GLOBAL (MP_BC_BASE_QSTR_O + 0x07) // qstr +#define MP_BC_STORE_ATTR (MP_BC_BASE_QSTR_O + 0x08) // qstr +#define MP_BC_STORE_SUBSCR (MP_BC_BASE_BYTE_O + 0x06) -#define MP_BC_MAKE_FUNCTION (0x60) // uint -#define MP_BC_MAKE_FUNCTION_DEFARGS (0x61) // uint -#define MP_BC_MAKE_CLOSURE (0x62) // uint -#define MP_BC_MAKE_CLOSURE_DEFARGS (0x63) // uint -#define MP_BC_CALL_FUNCTION (0x64) // uint -#define MP_BC_CALL_FUNCTION_VAR_KW (0x65) // uint -#define MP_BC_CALL_METHOD (0x66) // uint -#define MP_BC_CALL_METHOD_VAR_KW (0x67) // uint +#define MP_BC_DELETE_FAST (MP_BC_BASE_VINT_E + 0x08) // uint +#define MP_BC_DELETE_DEREF (MP_BC_BASE_VINT_E + 0x09) // uint +#define MP_BC_DELETE_NAME (MP_BC_BASE_QSTR_O + 0x09) // qstr +#define MP_BC_DELETE_GLOBAL (MP_BC_BASE_QSTR_O + 0x0a) // qstr -#define MP_BC_IMPORT_NAME (0x68) // qstr -#define MP_BC_IMPORT_FROM (0x69) // qstr -#define MP_BC_IMPORT_STAR (0x6a) +#define MP_BC_DUP_TOP (MP_BC_BASE_BYTE_O + 0x07) +#define MP_BC_DUP_TOP_TWO (MP_BC_BASE_BYTE_O + 0x08) +#define MP_BC_POP_TOP (MP_BC_BASE_BYTE_O + 0x09) +#define MP_BC_ROT_TWO (MP_BC_BASE_BYTE_O + 0x0a) +#define MP_BC_ROT_THREE (MP_BC_BASE_BYTE_O + 0x0b) -#define MP_BC_LOAD_CONST_SMALL_INT_MULTI (0x70) // + N(64) -#define MP_BC_LOAD_FAST_MULTI (0xb0) // + N(16) -#define MP_BC_STORE_FAST_MULTI (0xc0) // + N(16) -#define MP_BC_UNARY_OP_MULTI (0xd0) // + op( 'Z') -mp_obj_t mp_binary_get_val(char struct_type, char val_type, byte **ptr) { +mp_obj_t mp_binary_get_val(char struct_type, char val_type, byte *p_base, byte **ptr) { byte *p = *ptr; - mp_uint_t align; + size_t align; size_t size = mp_binary_get_size(struct_type, val_type, &align); if (struct_type == '@') { - // Make pointer aligned - p = (byte*)MP_ALIGN(p, (size_t)align); + // Align p relative to p_base + p = p_base + (uintptr_t)MP_ALIGN(p - p_base, align); #if MP_ENDIANNESS_LITTLE struct_type = '<'; #else @@ -231,7 +231,7 @@ mp_obj_t mp_binary_get_val(char struct_type, char val_type, byte **ptr) { } } -void mp_binary_set_int(mp_uint_t val_sz, bool big_endian, byte *dest, mp_uint_t val) { +void mp_binary_set_int(size_t val_sz, bool big_endian, byte *dest, mp_uint_t val) { if (MP_ENDIANNESS_LITTLE && !big_endian) { memcpy(dest, &val, val_sz); } else if (MP_ENDIANNESS_BIG && big_endian) { @@ -250,14 +250,14 @@ void mp_binary_set_int(mp_uint_t val_sz, bool big_endian, byte *dest, mp_uint_t } } -void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte **ptr) { +void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte *p_base, byte **ptr) { byte *p = *ptr; - mp_uint_t align; + size_t align; size_t size = mp_binary_get_size(struct_type, val_type, &align); if (struct_type == '@') { - // Make pointer aligned - p = (byte*)MP_ALIGN(p, (size_t)align); + // Align p relative to p_base + p = p_base + (uintptr_t)MP_ALIGN(p - p_base, align); if (MP_ENDIANNESS_LITTLE) { struct_type = '<'; } else { @@ -315,7 +315,7 @@ void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte ** mp_binary_set_int(MIN((size_t)size, sizeof(val)), struct_type == '>', p, val); } -void mp_binary_set_val_array(char typecode, void *p, mp_uint_t index, mp_obj_t val_in) { +void mp_binary_set_val_array(char typecode, void *p, size_t index, mp_obj_t val_in) { switch (typecode) { #if MICROPY_PY_BUILTINS_FLOAT case 'f': @@ -342,7 +342,7 @@ void mp_binary_set_val_array(char typecode, void *p, mp_uint_t index, mp_obj_t v } } -void mp_binary_set_val_array_from_int(char typecode, void *p, mp_uint_t index, mp_int_t val) { +void mp_binary_set_val_array_from_int(char typecode, void *p, size_t index, mp_int_t val) { switch (typecode) { case 'b': ((signed char*)p)[index] = val; diff --git a/python/src/py/binary.h b/python/src/py/binary.h index 71182042f..5c645bcaa 100644 --- a/python/src/py/binary.h +++ b/python/src/py/binary.h @@ -34,13 +34,13 @@ // type-specification errors due to end-of-string. #define BYTEARRAY_TYPECODE 1 -size_t mp_binary_get_size(char struct_type, char val_type, mp_uint_t *palign); -mp_obj_t mp_binary_get_val_array(char typecode, void *p, mp_uint_t index); -void mp_binary_set_val_array(char typecode, void *p, mp_uint_t index, mp_obj_t val_in); -void mp_binary_set_val_array_from_int(char typecode, void *p, mp_uint_t index, mp_int_t val); -mp_obj_t mp_binary_get_val(char struct_type, char val_type, byte **ptr); -void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte **ptr); -long long mp_binary_get_int(mp_uint_t size, bool is_signed, bool big_endian, const byte *src); -void mp_binary_set_int(mp_uint_t val_sz, bool big_endian, byte *dest, mp_uint_t val); +size_t mp_binary_get_size(char struct_type, char val_type, size_t *palign); +mp_obj_t mp_binary_get_val_array(char typecode, void *p, size_t index); +void mp_binary_set_val_array(char typecode, void *p, size_t index, mp_obj_t val_in); +void mp_binary_set_val_array_from_int(char typecode, void *p, size_t index, mp_int_t val); +mp_obj_t mp_binary_get_val(char struct_type, char val_type, byte *p_base, byte **ptr); +void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte *p_base, byte **ptr); +long long mp_binary_get_int(size_t size, bool is_signed, bool big_endian, const byte *src); +void mp_binary_set_int(size_t val_sz, bool big_endian, byte *dest, mp_uint_t val); #endif // MICROPY_INCLUDED_PY_BINARY_H diff --git a/python/src/py/builtin.h b/python/src/py/builtin.h index a5e0f5f2d..2dbe8a782 100644 --- a/python/src/py/builtin.h +++ b/python/src/py/builtin.h @@ -89,7 +89,7 @@ MP_DECLARE_CONST_FUN_OBJ_2(mp_op_delitem_obj); extern const mp_obj_module_t mp_module___main__; extern const mp_obj_module_t mp_module_builtins; -extern const mp_obj_module_t mp_module_array; +extern const mp_obj_module_t mp_module_uarray; extern const mp_obj_module_t mp_module_collections; extern const mp_obj_module_t mp_module_io; extern const mp_obj_module_t mp_module_math; @@ -122,6 +122,7 @@ extern const mp_obj_module_t mp_module_uwebsocket; extern const mp_obj_module_t mp_module_webrepl; extern const mp_obj_module_t mp_module_framebuf; extern const mp_obj_module_t mp_module_btree; +extern const mp_obj_module_t mp_module_ubluetooth; extern const char MICROPY_PY_BUILTINS_HELP_TEXT[]; diff --git a/python/src/py/builtinhelp.c b/python/src/py/builtinhelp.c index a7fede00a..8f162d885 100644 --- a/python/src/py/builtinhelp.c +++ b/python/src/py/builtinhelp.c @@ -80,10 +80,6 @@ STATIC void mp_help_print_modules(void) { mp_help_add_from_map(list, &mp_builtin_module_map); - #if MICROPY_MODULE_WEAK_LINKS - mp_help_add_from_map(list, &mp_builtin_module_weak_links_map); - #endif - #if MICROPY_MODULE_FROZEN_STR extern const char mp_frozen_str_names[]; mp_help_add_from_names(list, mp_frozen_str_names); diff --git a/python/src/py/builtinimport.c b/python/src/py/builtinimport.c index bb88f2ace..9d91b2059 100644 --- a/python/src/py/builtinimport.c +++ b/python/src/py/builtinimport.c @@ -3,7 +3,7 @@ * * The MIT License (MIT) * - * Copyright (c) 2013, 2014 Damien P. George + * Copyright (c) 2013-2019 Damien P. George * Copyright (c) 2014 Paul Sokolovsky * * Permission is hereby granted, free of charge, to any person obtaining a copy @@ -145,11 +145,11 @@ STATIC void do_load_from_lexer(mp_obj_t module_obj, mp_lexer_t *lex) { #endif #if MICROPY_PERSISTENT_CODE_LOAD || MICROPY_MODULE_FROZEN_MPY -STATIC void do_execute_raw_code(mp_obj_t module_obj, mp_raw_code_t *raw_code) { +STATIC void do_execute_raw_code(mp_obj_t module_obj, mp_raw_code_t *raw_code, const char* source_name) { + (void)source_name; + #if MICROPY_PY___FILE__ - // TODO - //qstr source_name = lex->source_name; - //mp_store_attr(module_obj, MP_QSTR___file__, MP_OBJ_NEW_QSTR(source_name)); + mp_store_attr(module_obj, MP_QSTR___file__, MP_OBJ_NEW_QSTR(qstr_from_str(source_name))); #endif // execute the module in its context @@ -206,7 +206,7 @@ STATIC void do_load(mp_obj_t module_obj, vstr_t *file) { // its data) in the list of frozen files, execute it. #if MICROPY_MODULE_FROZEN_MPY if (frozen_type == MP_FROZEN_MPY) { - do_execute_raw_code(module_obj, modref); + do_execute_raw_code(module_obj, modref, file_str); return; } #endif @@ -216,7 +216,7 @@ STATIC void do_load(mp_obj_t module_obj, vstr_t *file) { #if MICROPY_HAS_FILE_READER && MICROPY_PERSISTENT_CODE_LOAD if (file_str[file->len - 3] == 'm') { mp_raw_code_t *raw_code = mp_raw_code_load_file(file_str); - do_execute_raw_code(module_obj, raw_code); + do_execute_raw_code(module_obj, raw_code, file_str); return; } #endif @@ -385,21 +385,18 @@ mp_obj_t mp_builtin___import__(size_t n_args, const mp_obj_t *args) { DEBUG_printf("Current path: %.*s\n", vstr_len(&path), vstr_str(&path)); if (stat == MP_IMPORT_STAT_NO_EXIST) { + module_obj = MP_OBJ_NULL; #if MICROPY_MODULE_WEAK_LINKS // check if there is a weak link to this module if (i == mod_len) { - mp_map_elem_t *el = mp_map_lookup((mp_map_t*)&mp_builtin_module_weak_links_map, MP_OBJ_NEW_QSTR(mod_name), MP_MAP_LOOKUP); - if (el == NULL) { - goto no_exist; + module_obj = mp_module_search_umodule(mod_str); + if (module_obj != MP_OBJ_NULL) { + // found weak linked module + mp_module_call_init(mod_name, module_obj); } - // found weak linked module - module_obj = el->value; - mp_module_call_init(mod_name, module_obj); - } else { - no_exist: - #else - { + } #endif + if (module_obj == MP_OBJ_NULL) { // couldn't find the file, so fail if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) { mp_raise_msg(&mp_type_ImportError, "module not found"); @@ -496,11 +493,11 @@ mp_obj_t mp_builtin___import__(size_t n_args, const mp_obj_t *args) { #if MICROPY_MODULE_WEAK_LINKS // Check if there is a weak link to this module - mp_map_elem_t *el = mp_map_lookup((mp_map_t*)&mp_builtin_module_weak_links_map, MP_OBJ_NEW_QSTR(module_name_qstr), MP_MAP_LOOKUP); - if (el != NULL) { + module_obj = mp_module_search_umodule(qstr_str(module_name_qstr)); + if (module_obj != MP_OBJ_NULL) { // Found weak-linked module - mp_module_call_init(module_name_qstr, el->value); - return el->value; + mp_module_call_init(module_name_qstr, module_obj); + return module_obj; } #endif diff --git a/python/src/py/compile.c b/python/src/py/compile.c index 27b706c8f..0d36aef8b 100644 --- a/python/src/py/compile.c +++ b/python/src/py/compile.c @@ -95,6 +95,7 @@ STATIC const emit_method_table_t *emit_native_table[] = { &emit_native_thumb_method_table, &emit_native_thumb_method_table, &emit_native_xtensa_method_table, + &emit_native_xtensawin_method_table, }; #elif MICROPY_EMIT_NATIVE @@ -109,6 +110,8 @@ STATIC const emit_method_table_t *emit_native_table[] = { #define NATIVE_EMITTER(f) emit_native_arm_##f #elif MICROPY_EMIT_XTENSA #define NATIVE_EMITTER(f) emit_native_xtensa_##f +#elif MICROPY_EMIT_XTENSAWIN +#define NATIVE_EMITTER(f) emit_native_xtensawin_##f #else #error "unknown native emitter" #endif @@ -131,6 +134,7 @@ STATIC const emit_inline_asm_method_table_t *emit_asm_table[] = { &emit_inline_thumb_method_table, &emit_inline_thumb_method_table, &emit_inline_xtensa_method_table, + NULL, }; #elif MICROPY_EMIT_INLINE_ASM @@ -1024,16 +1028,13 @@ STATIC void compile_del_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { STATIC void compile_break_cont_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { uint16_t label; - const char *error_msg; if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_break_stmt) { label = comp->break_label; - error_msg = "'break' outside loop"; } else { label = comp->continue_label; - error_msg = "'continue' outside loop"; } if (label == INVALID_LABEL) { - compile_syntax_error(comp, (mp_parse_node_t)pns, error_msg); + compile_syntax_error(comp, (mp_parse_node_t)pns, "'break'/'continue' outside loop"); } assert(comp->cur_except_level >= comp->break_continue_except_level); EMIT_ARG(unwind_jump, label, comp->cur_except_level - comp->break_continue_except_level); @@ -1196,6 +1197,13 @@ STATIC void compile_import_from(compiler_t *comp, mp_parse_node_struct_t *pns) { } while (0); if (MP_PARSE_NODE_IS_TOKEN_KIND(pns->nodes[1], MP_TOKEN_OP_STAR)) { + #if MICROPY_CPYTHON_COMPAT + if (comp->scope_cur->kind != SCOPE_MODULE) { + compile_syntax_error(comp, (mp_parse_node_t)pns, "import * not at module level"); + return; + } + #endif + EMIT_ARG(load_const_small_int, import_level); // build the "fromlist" tuple @@ -1205,7 +1213,7 @@ STATIC void compile_import_from(compiler_t *comp, mp_parse_node_struct_t *pns) { // do the import qstr dummy_q; do_import_name(comp, pn_import_source, &dummy_q); - EMIT_ARG(import, MP_QSTR_NULL, MP_EMIT_IMPORT_STAR); + EMIT_ARG(import, MP_QSTRnull, MP_EMIT_IMPORT_STAR); } else { EMIT_ARG(load_const_small_int, import_level); @@ -1608,6 +1616,9 @@ STATIC void compile_try_except(compiler_t *comp, mp_parse_node_t pn_body, int n_ qstr qstr_exception_local = 0; uint end_finally_label = comp_next_label(comp); + #if MICROPY_PY_SYS_SETTRACE + EMIT_ARG(set_source_line, pns_except->source_line); + #endif if (MP_PARSE_NODE_IS_NULL(pns_except->nodes[0])) { // this is a catch all exception handler @@ -1993,21 +2004,8 @@ STATIC void compile_expr_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { c_assign(comp, pns->nodes[0], ASSIGN_AUG_LOAD); // lhs load for aug assign compile_node(comp, pns1->nodes[1]); // rhs assert(MP_PARSE_NODE_IS_TOKEN(pns1->nodes[0])); - mp_binary_op_t op; - switch (MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0])) { - case MP_TOKEN_DEL_PIPE_EQUAL: op = MP_BINARY_OP_INPLACE_OR; break; - case MP_TOKEN_DEL_CARET_EQUAL: op = MP_BINARY_OP_INPLACE_XOR; break; - case MP_TOKEN_DEL_AMPERSAND_EQUAL: op = MP_BINARY_OP_INPLACE_AND; break; - case MP_TOKEN_DEL_DBL_LESS_EQUAL: op = MP_BINARY_OP_INPLACE_LSHIFT; break; - case MP_TOKEN_DEL_DBL_MORE_EQUAL: op = MP_BINARY_OP_INPLACE_RSHIFT; break; - case MP_TOKEN_DEL_PLUS_EQUAL: op = MP_BINARY_OP_INPLACE_ADD; break; - case MP_TOKEN_DEL_MINUS_EQUAL: op = MP_BINARY_OP_INPLACE_SUBTRACT; break; - case MP_TOKEN_DEL_STAR_EQUAL: op = MP_BINARY_OP_INPLACE_MULTIPLY; break; - case MP_TOKEN_DEL_DBL_SLASH_EQUAL: op = MP_BINARY_OP_INPLACE_FLOOR_DIVIDE; break; - case MP_TOKEN_DEL_SLASH_EQUAL: op = MP_BINARY_OP_INPLACE_TRUE_DIVIDE; break; - case MP_TOKEN_DEL_PERCENT_EQUAL: op = MP_BINARY_OP_INPLACE_MODULO; break; - case MP_TOKEN_DEL_DBL_STAR_EQUAL: default: op = MP_BINARY_OP_INPLACE_POWER; break; - } + mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]); + mp_binary_op_t op = MP_BINARY_OP_INPLACE_OR + (tok - MP_TOKEN_DEL_PIPE_EQUAL); EMIT_ARG(binary_op, op); c_assign(comp, pns->nodes[0], ASSIGN_AUG_STORE); // lhs store for aug assign } else if (kind == PN_expr_stmt_assign_list) { @@ -2141,15 +2139,12 @@ STATIC void compile_comparison(compiler_t *comp, mp_parse_node_struct_t *pns) { EMIT(rot_three); } if (MP_PARSE_NODE_IS_TOKEN(pns->nodes[i])) { + mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]); mp_binary_op_t op; - switch (MP_PARSE_NODE_LEAF_ARG(pns->nodes[i])) { - case MP_TOKEN_OP_LESS: op = MP_BINARY_OP_LESS; break; - case MP_TOKEN_OP_MORE: op = MP_BINARY_OP_MORE; break; - case MP_TOKEN_OP_DBL_EQUAL: op = MP_BINARY_OP_EQUAL; break; - case MP_TOKEN_OP_LESS_EQUAL: op = MP_BINARY_OP_LESS_EQUAL; break; - case MP_TOKEN_OP_MORE_EQUAL: op = MP_BINARY_OP_MORE_EQUAL; break; - case MP_TOKEN_OP_NOT_EQUAL: op = MP_BINARY_OP_NOT_EQUAL; break; - case MP_TOKEN_KW_IN: default: op = MP_BINARY_OP_IN; break; + if (tok == MP_TOKEN_KW_IN) { + op = MP_BINARY_OP_IN; + } else { + op = MP_BINARY_OP_LESS + (tok - MP_TOKEN_OP_LESS); } EMIT_ARG(binary_op, op); } else { @@ -2203,36 +2198,21 @@ STATIC void compile_term(compiler_t *comp, mp_parse_node_struct_t *pns) { compile_node(comp, pns->nodes[0]); for (int i = 1; i + 1 < num_nodes; i += 2) { compile_node(comp, pns->nodes[i + 1]); - mp_binary_op_t op; mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]); - switch (tok) { - case MP_TOKEN_OP_PLUS: op = MP_BINARY_OP_ADD; break; - case MP_TOKEN_OP_MINUS: op = MP_BINARY_OP_SUBTRACT; break; - case MP_TOKEN_OP_STAR: op = MP_BINARY_OP_MULTIPLY; break; - case MP_TOKEN_OP_DBL_SLASH: op = MP_BINARY_OP_FLOOR_DIVIDE; break; - case MP_TOKEN_OP_SLASH: op = MP_BINARY_OP_TRUE_DIVIDE; break; - case MP_TOKEN_OP_PERCENT: op = MP_BINARY_OP_MODULO; break; - case MP_TOKEN_OP_DBL_LESS: op = MP_BINARY_OP_LSHIFT; break; - default: - assert(tok == MP_TOKEN_OP_DBL_MORE); - op = MP_BINARY_OP_RSHIFT; - break; - } + mp_binary_op_t op = MP_BINARY_OP_LSHIFT + (tok - MP_TOKEN_OP_DBL_LESS); EMIT_ARG(binary_op, op); } } STATIC void compile_factor_2(compiler_t *comp, mp_parse_node_struct_t *pns) { compile_node(comp, pns->nodes[1]); - mp_unary_op_t op; mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); - switch (tok) { - case MP_TOKEN_OP_PLUS: op = MP_UNARY_OP_POSITIVE; break; - case MP_TOKEN_OP_MINUS: op = MP_UNARY_OP_NEGATIVE; break; - default: - assert(tok == MP_TOKEN_OP_TILDE); - op = MP_UNARY_OP_INVERT; - break; + mp_unary_op_t op; + if (tok == MP_TOKEN_OP_TILDE) { + op = MP_UNARY_OP_INVERT; + } else { + assert(tok == MP_TOKEN_OP_PLUS || tok == MP_TOKEN_OP_MINUS); + op = MP_UNARY_OP_POSITIVE + (tok - MP_TOKEN_OP_PLUS); } EMIT_ARG(unary_op, op); } @@ -2851,7 +2831,7 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn return; } - qstr param_name = MP_QSTR_NULL; + qstr param_name = MP_QSTRnull; uint param_flag = ID_FLAG_IS_PARAM; mp_parse_node_struct_t *pns = NULL; if (MP_PARSE_NODE_IS_ID(pn)) { @@ -2910,7 +2890,7 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn } } - if (param_name != MP_QSTR_NULL) { + if (param_name != MP_QSTRnull) { id_info_t *id_info = scope_find_or_add_id(comp->scope_cur, param_name, ID_INFO_KIND_UNDECIDED); if (id_info->kind != ID_INFO_KIND_UNDECIDED) { compile_syntax_error(comp, pn, "argument name reused"); @@ -3078,6 +3058,9 @@ STATIC void compile_scope(compiler_t *comp, scope_t *scope, pass_kind_t pass) { mp_parse_node_struct_t *pns = (mp_parse_node_struct_t*)scope->pn; assert(MP_PARSE_NODE_STRUCT_NUM_NODES(pns) == 3); + // Set the source line number for the start of the lambda + EMIT_ARG(set_source_line, pns->source_line); + // work out number of parameters, keywords and default parameters, and add them to the id_info array // must be done before compiling the body so that arguments are numbered first (for LOAD_FAST etc) if (comp->pass == MP_PASS_SCOPE) { @@ -3112,6 +3095,9 @@ STATIC void compile_scope(compiler_t *comp, scope_t *scope, pass_kind_t pass) { scope->num_pos_args = 1; } + // Set the source line number for the start of the comprehension + EMIT_ARG(set_source_line, pns->source_line); + if (scope->kind == SCOPE_LIST_COMP) { EMIT_ARG(build, 0, MP_EMIT_BUILD_LIST); } else if (scope->kind == SCOPE_DICT_COMP) { @@ -3151,6 +3137,9 @@ STATIC void compile_scope(compiler_t *comp, scope_t *scope, pass_kind_t pass) { scope_find_or_add_id(scope, MP_QSTR___class__, ID_INFO_KIND_LOCAL); } + #if MICROPY_PY_SYS_SETTRACE + EMIT_ARG(set_source_line, pns->source_line); + #endif compile_load_id(comp, MP_QSTR___name__); compile_store_id(comp, MP_QSTR___module__); EMIT_ARG(load_const_str, MP_PARSE_NODE_LEAF_ARG(pns->nodes[0])); // 0 is class name @@ -3434,7 +3423,7 @@ STATIC void scope_compute_things(scope_t *scope) { #if !MICROPY_PERSISTENT_CODE_SAVE STATIC #endif -mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_file, uint emit_opt, bool is_repl) { +mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl) { // put compiler state on the stack, it's relatively small compiler_t comp_state = {0}; compiler_t *comp = &comp_state; @@ -3445,6 +3434,11 @@ mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_f comp->continue_label = INVALID_LABEL; // create the module scope + #if MICROPY_EMIT_NATIVE + const uint emit_opt = MP_STATE_VM(default_emit_opt); + #else + const uint emit_opt = MP_EMIT_OPT_NONE; + #endif scope_t *module_scope = scope_new_and_link(comp, SCOPE_MODULE, parse_tree->root, emit_opt); // create standard emitter; it's used at least for MP_PASS_SCOPE @@ -3599,8 +3593,8 @@ mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_f } } -mp_obj_t mp_compile(mp_parse_tree_t *parse_tree, qstr source_file, uint emit_opt, bool is_repl) { - mp_raw_code_t *rc = mp_compile_to_raw_code(parse_tree, source_file, emit_opt, is_repl); +mp_obj_t mp_compile(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl) { + mp_raw_code_t *rc = mp_compile_to_raw_code(parse_tree, source_file, is_repl); // return function that executes the outer module return mp_make_function_from_raw_code(rc, MP_OBJ_NULL, MP_OBJ_NULL); } diff --git a/python/src/py/compile.h b/python/src/py/compile.h index 99a17a8d1..1ad1f5e9c 100644 --- a/python/src/py/compile.h +++ b/python/src/py/compile.h @@ -32,11 +32,11 @@ // the compiler will raise an exception if an error occurred // the compiler will clear the parse tree before it returns -mp_obj_t mp_compile(mp_parse_tree_t *parse_tree, qstr source_file, uint emit_opt, bool is_repl); +mp_obj_t mp_compile(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl); #if MICROPY_PERSISTENT_CODE_SAVE // this has the same semantics as mp_compile -mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_file, uint emit_opt, bool is_repl); +mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl); #endif // this is implemented in runtime.c diff --git a/python/src/py/emit.h b/python/src/py/emit.h index 70f7bf122..26d027a7a 100644 --- a/python/src/py/emit.h +++ b/python/src/py/emit.h @@ -76,15 +76,15 @@ typedef enum { // Kind for emit->setup_block() #define MP_EMIT_SETUP_BLOCK_WITH (0) -#define MP_EMIT_SETUP_BLOCK_EXCEPT (2) -#define MP_EMIT_SETUP_BLOCK_FINALLY (3) +#define MP_EMIT_SETUP_BLOCK_EXCEPT (1) +#define MP_EMIT_SETUP_BLOCK_FINALLY (2) // Kind for emit->build() #define MP_EMIT_BUILD_TUPLE (0) #define MP_EMIT_BUILD_LIST (1) -#define MP_EMIT_BUILD_MAP (3) -#define MP_EMIT_BUILD_SET (6) -#define MP_EMIT_BUILD_SLICE (8) +#define MP_EMIT_BUILD_MAP (2) +#define MP_EMIT_BUILD_SET (3) +#define MP_EMIT_BUILD_SLICE (4) // Kind for emit->yield() #define MP_EMIT_YIELD_VALUE (0) @@ -174,6 +174,7 @@ extern const emit_method_table_t emit_native_x86_method_table; extern const emit_method_table_t emit_native_thumb_method_table; extern const emit_method_table_t emit_native_arm_method_table; extern const emit_method_table_t emit_native_xtensa_method_table; +extern const emit_method_table_t emit_native_xtensawin_method_table; extern const mp_emit_method_table_id_ops_t mp_emit_bc_method_table_load_id_ops; extern const mp_emit_method_table_id_ops_t mp_emit_bc_method_table_store_id_ops; @@ -185,6 +186,7 @@ emit_t *emit_native_x86_new(mp_obj_t *error_slot, uint *label_slot, mp_uint_t ma emit_t *emit_native_thumb_new(mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels); emit_t *emit_native_arm_new(mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels); emit_t *emit_native_xtensa_new(mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels); +emit_t *emit_native_xtensawin_new(mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels); void emit_bc_set_max_num_labels(emit_t* emit, mp_uint_t max_num_labels); @@ -194,6 +196,7 @@ void emit_native_x86_free(emit_t *emit); void emit_native_thumb_free(emit_t *emit); void emit_native_arm_free(emit_t *emit); void emit_native_xtensa_free(emit_t *emit); +void emit_native_xtensawin_free(emit_t *emit); void mp_emit_bc_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope); void mp_emit_bc_end_pass(emit_t *emit); diff --git a/python/src/py/emitbc.c b/python/src/py/emitbc.c index 35eb6df9c..34f6362ff 100644 --- a/python/src/py/emitbc.c +++ b/python/src/py/emitbc.c @@ -3,7 +3,7 @@ * * The MIT License (MIT) * - * Copyright (c) 2013, 2014 Damien P. George + * Copyright (c) 2013-2019 Damien P. George * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -64,6 +64,9 @@ struct _emit_t { size_t bytecode_size; byte *code_base; // stores both byte code and code info + size_t n_info; + size_t n_cell; + #if MICROPY_PERSISTENT_CODE uint16_t ct_cur_obj; uint16_t ct_num_obj; @@ -123,10 +126,6 @@ STATIC void emit_write_code_info_byte(emit_t* emit, byte val) { *emit_get_cur_to_write_code_info(emit, 1) = val; } -STATIC void emit_write_code_info_uint(emit_t* emit, mp_uint_t val) { - emit_write_uint(emit, emit_get_cur_to_write_code_info, val); -} - STATIC void emit_write_code_info_qstr(emit_t *emit, qstr qst) { #if MICROPY_PERSISTENT_CODE assert((qst >> 16) == 0); @@ -182,20 +181,20 @@ STATIC byte *emit_get_cur_to_write_bytecode(emit_t *emit, int num_bytes_to_write } } -STATIC void emit_write_bytecode_byte(emit_t *emit, byte b1) { +STATIC void emit_write_bytecode_raw_byte(emit_t *emit, byte b1) { byte *c = emit_get_cur_to_write_bytecode(emit, 1); c[0] = b1; } -STATIC void emit_write_bytecode_byte_byte(emit_t* emit, byte b1, byte b2) { - byte *c = emit_get_cur_to_write_bytecode(emit, 2); +STATIC void emit_write_bytecode_byte(emit_t *emit, int stack_adj, byte b1) { + mp_emit_bc_adjust_stack_size(emit, stack_adj); + byte *c = emit_get_cur_to_write_bytecode(emit, 1); c[0] = b1; - c[1] = b2; } // Similar to emit_write_bytecode_uint(), just some extra handling to encode sign -STATIC void emit_write_bytecode_byte_int(emit_t *emit, byte b1, mp_int_t num) { - emit_write_bytecode_byte(emit, b1); +STATIC void emit_write_bytecode_byte_int(emit_t *emit, int stack_adj, byte b1, mp_int_t num) { + emit_write_bytecode_byte(emit, stack_adj, b1); // We store each 7 bits in a separate byte, and that's how many bytes needed byte buf[BYTES_FOR_INT]; @@ -220,40 +219,41 @@ STATIC void emit_write_bytecode_byte_int(emit_t *emit, byte b1, mp_int_t num) { *c = *p; } -STATIC void emit_write_bytecode_byte_uint(emit_t *emit, byte b, mp_uint_t val) { - emit_write_bytecode_byte(emit, b); +STATIC void emit_write_bytecode_byte_uint(emit_t *emit, int stack_adj, byte b, mp_uint_t val) { + emit_write_bytecode_byte(emit, stack_adj, b); emit_write_uint(emit, emit_get_cur_to_write_bytecode, val); } #if MICROPY_PERSISTENT_CODE -STATIC void emit_write_bytecode_byte_const(emit_t *emit, byte b, mp_uint_t n, mp_uint_t c) { +STATIC void emit_write_bytecode_byte_const(emit_t *emit, int stack_adj, byte b, mp_uint_t n, mp_uint_t c) { if (emit->pass == MP_PASS_EMIT) { emit->const_table[n] = c; } - emit_write_bytecode_byte_uint(emit, b, n); + emit_write_bytecode_byte_uint(emit, stack_adj, b, n); } #endif -STATIC void emit_write_bytecode_byte_qstr(emit_t* emit, byte b, qstr qst) { +STATIC void emit_write_bytecode_byte_qstr(emit_t* emit, int stack_adj, byte b, qstr qst) { #if MICROPY_PERSISTENT_CODE assert((qst >> 16) == 0); + mp_emit_bc_adjust_stack_size(emit, stack_adj); byte *c = emit_get_cur_to_write_bytecode(emit, 3); c[0] = b; c[1] = qst; c[2] = qst >> 8; #else - emit_write_bytecode_byte_uint(emit, b, qst); + emit_write_bytecode_byte_uint(emit, stack_adj, b, qst); #endif } -STATIC void emit_write_bytecode_byte_obj(emit_t *emit, byte b, mp_obj_t obj) { +STATIC void emit_write_bytecode_byte_obj(emit_t *emit, int stack_adj, byte b, mp_obj_t obj) { #if MICROPY_PERSISTENT_CODE - emit_write_bytecode_byte_const(emit, b, + emit_write_bytecode_byte_const(emit, stack_adj, b, emit->scope->num_pos_args + emit->scope->num_kwonly_args + emit->ct_cur_obj++, (mp_uint_t)obj); #else // aligns the pointer so it is friendly to GC - emit_write_bytecode_byte(emit, b); + emit_write_bytecode_byte(emit, stack_adj, b); emit->bytecode_offset = (size_t)MP_ALIGN(emit->bytecode_offset, sizeof(mp_obj_t)); mp_obj_t *c = (mp_obj_t*)emit_get_cur_to_write_bytecode(emit, sizeof(mp_obj_t)); // Verify thar c is already uint-aligned @@ -262,24 +262,28 @@ STATIC void emit_write_bytecode_byte_obj(emit_t *emit, byte b, mp_obj_t obj) { #endif } -STATIC void emit_write_bytecode_byte_raw_code(emit_t *emit, byte b, mp_raw_code_t *rc) { +STATIC void emit_write_bytecode_byte_raw_code(emit_t *emit, int stack_adj, byte b, mp_raw_code_t *rc) { #if MICROPY_PERSISTENT_CODE - emit_write_bytecode_byte_const(emit, b, + emit_write_bytecode_byte_const(emit, stack_adj, b, emit->scope->num_pos_args + emit->scope->num_kwonly_args + emit->ct_num_obj + emit->ct_cur_raw_code++, (mp_uint_t)(uintptr_t)rc); #else // aligns the pointer so it is friendly to GC - emit_write_bytecode_byte(emit, b); + emit_write_bytecode_byte(emit, stack_adj, b); emit->bytecode_offset = (size_t)MP_ALIGN(emit->bytecode_offset, sizeof(void*)); void **c = (void**)emit_get_cur_to_write_bytecode(emit, sizeof(void*)); // Verify thar c is already uint-aligned assert(c == MP_ALIGN(c, sizeof(void*))); *c = rc; #endif + #if MICROPY_PY_SYS_SETTRACE + rc->line_of_definition = emit->last_source_line; + #endif } // unsigned labels are relative to ip following this instruction, stored as 16 bits -STATIC void emit_write_bytecode_byte_unsigned_label(emit_t *emit, byte b1, mp_uint_t label) { +STATIC void emit_write_bytecode_byte_unsigned_label(emit_t *emit, int stack_adj, byte b1, mp_uint_t label) { + mp_emit_bc_adjust_stack_size(emit, stack_adj); mp_uint_t bytecode_offset; if (emit->pass < MP_PASS_EMIT) { bytecode_offset = 0; @@ -293,7 +297,8 @@ STATIC void emit_write_bytecode_byte_unsigned_label(emit_t *emit, byte b1, mp_ui } // signed labels are relative to ip following this instruction, stored as 16 bits, in excess -STATIC void emit_write_bytecode_byte_signed_label(emit_t *emit, byte b1, mp_uint_t label) { +STATIC void emit_write_bytecode_byte_signed_label(emit_t *emit, int stack_adj, byte b1, mp_uint_t label) { + mp_emit_bc_adjust_stack_size(emit, stack_adj); int bytecode_offset; if (emit->pass < MP_PASS_EMIT) { bytecode_offset = 0; @@ -322,7 +327,7 @@ void mp_emit_bc_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) { emit->bytecode_offset = 0; emit->code_info_offset = 0; - // Write local state size and exception stack size. + // Write local state size, exception stack size, scope flags and number of arguments { mp_uint_t n_state = scope->num_locals + scope->stack_size; if (n_state == 0) { @@ -335,40 +340,22 @@ void mp_emit_bc_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) { // An extra slot in the stack is needed to detect VM stack overflow n_state += 1; #endif - emit_write_code_info_uint(emit, n_state); - emit_write_code_info_uint(emit, scope->exc_stack_size); + + size_t n_exc_stack = scope->exc_stack_size; + MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, scope, emit_write_code_info_byte, emit); } - // Write scope flags and number of arguments. - // TODO check that num args all fit in a byte - emit_write_code_info_byte(emit, emit->scope->scope_flags); - emit_write_code_info_byte(emit, emit->scope->num_pos_args); - emit_write_code_info_byte(emit, emit->scope->num_kwonly_args); - emit_write_code_info_byte(emit, emit->scope->num_def_pos_args); - - // Write size of the rest of the code info. We don't know how big this - // variable uint will be on the MP_PASS_CODE_SIZE pass so we reserve 2 bytes - // for it and hope that is enough! TODO assert this or something. - if (pass == MP_PASS_EMIT) { - emit_write_code_info_uint(emit, emit->code_info_size - emit->code_info_offset); - } else { - emit_get_cur_to_write_code_info(emit, 2); + // Write number of cells and size of the source code info + if (pass >= MP_PASS_CODE_SIZE) { + MP_BC_PRELUDE_SIZE_ENCODE(emit->n_info, emit->n_cell, emit_write_code_info_byte, emit); } + emit->n_info = emit->code_info_offset; + // Write the name and source file of this function. emit_write_code_info_qstr(emit, scope->simple_name); emit_write_code_info_qstr(emit, scope->source_file); - // bytecode prelude: initialise closed over variables - for (int i = 0; i < scope->id_info_len; i++) { - id_info_t *id = &scope->id_info[i]; - if (id->kind == ID_INFO_KIND_CELL) { - assert(id->local_num < 255); - emit_write_bytecode_byte(emit, id->local_num); // write the local which should be converted to a cell - } - } - emit_write_bytecode_byte(emit, 255); // end of list sentinel - #if MICROPY_PERSISTENT_CODE emit->ct_cur_obj = 0; emit->ct_cur_raw_code = 0; @@ -414,6 +401,20 @@ void mp_emit_bc_end_pass(emit_t *emit) { emit_write_code_info_byte(emit, 0); // end of line number info + // Calculate size of source code info section + emit->n_info = emit->code_info_offset - emit->n_info; + + // Emit closure section of prelude + emit->n_cell = 0; + for (size_t i = 0; i < emit->scope->id_info_len; ++i) { + id_info_t *id = &emit->scope->id_info[i]; + if (id->kind == ID_INFO_KIND_CELL) { + assert(id->local_num <= 255); + emit_write_code_info_byte(emit, id->local_num); // write the local which should be converted to a cell + ++emit->n_cell; + } + } + #if MICROPY_PERSISTENT_CODE assert(emit->pass <= MP_PASS_STACK_SIZE || (emit->ct_num_obj == emit->ct_cur_obj)); emit->ct_num_obj = emit->ct_cur_obj; @@ -468,10 +469,6 @@ void mp_emit_bc_adjust_stack_size(emit_t *emit, mp_int_t delta) { emit->last_emit_was_return_value = false; } -static inline void emit_bc_pre(emit_t *emit, mp_int_t stack_size_delta) { - mp_emit_bc_adjust_stack_size(emit, stack_size_delta); -} - void mp_emit_bc_set_source_line(emit_t *emit, mp_uint_t source_line) { //printf("source: line %d -> %d offset %d -> %d\n", emit->last_source_line, source_line, emit->last_source_line_offset, emit->bytecode_offset); #if MICROPY_ENABLE_SOURCE_LINE @@ -493,7 +490,7 @@ void mp_emit_bc_set_source_line(emit_t *emit, mp_uint_t source_line) { } void mp_emit_bc_label_assign(emit_t *emit, mp_uint_t l) { - emit_bc_pre(emit, 0); + mp_emit_bc_adjust_stack_size(emit, 0); if (emit->pass == MP_PASS_SCOPE) { return; } @@ -511,64 +508,54 @@ void mp_emit_bc_label_assign(emit_t *emit, mp_uint_t l) { void mp_emit_bc_import(emit_t *emit, qstr qst, int kind) { MP_STATIC_ASSERT(MP_BC_IMPORT_NAME + MP_EMIT_IMPORT_NAME == MP_BC_IMPORT_NAME); MP_STATIC_ASSERT(MP_BC_IMPORT_NAME + MP_EMIT_IMPORT_FROM == MP_BC_IMPORT_FROM); - if (kind == MP_EMIT_IMPORT_FROM) { - emit_bc_pre(emit, 1); - } else { - emit_bc_pre(emit, -1); - } + int stack_adj = kind == MP_EMIT_IMPORT_FROM ? 1 : -1; if (kind == MP_EMIT_IMPORT_STAR) { - emit_write_bytecode_byte(emit, MP_BC_IMPORT_STAR); + emit_write_bytecode_byte(emit, stack_adj, MP_BC_IMPORT_STAR); } else { - emit_write_bytecode_byte_qstr(emit, MP_BC_IMPORT_NAME + kind, qst); + emit_write_bytecode_byte_qstr(emit, stack_adj, MP_BC_IMPORT_NAME + kind, qst); } } void mp_emit_bc_load_const_tok(emit_t *emit, mp_token_kind_t tok) { - emit_bc_pre(emit, 1); - switch (tok) { - case MP_TOKEN_KW_FALSE: emit_write_bytecode_byte(emit, MP_BC_LOAD_CONST_FALSE); break; - case MP_TOKEN_KW_NONE: emit_write_bytecode_byte(emit, MP_BC_LOAD_CONST_NONE); break; - case MP_TOKEN_KW_TRUE: emit_write_bytecode_byte(emit, MP_BC_LOAD_CONST_TRUE); break; - default: - assert(tok == MP_TOKEN_ELLIPSIS); - emit_write_bytecode_byte_obj(emit, MP_BC_LOAD_CONST_OBJ, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj)); - break; + MP_STATIC_ASSERT(MP_BC_LOAD_CONST_FALSE + (MP_TOKEN_KW_NONE - MP_TOKEN_KW_FALSE) == MP_BC_LOAD_CONST_NONE); + MP_STATIC_ASSERT(MP_BC_LOAD_CONST_FALSE + (MP_TOKEN_KW_TRUE - MP_TOKEN_KW_FALSE) == MP_BC_LOAD_CONST_TRUE); + if (tok == MP_TOKEN_ELLIPSIS) { + emit_write_bytecode_byte_obj(emit, 1, MP_BC_LOAD_CONST_OBJ, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj)); + } else { + emit_write_bytecode_byte(emit, 1, MP_BC_LOAD_CONST_FALSE + (tok - MP_TOKEN_KW_FALSE)); } } void mp_emit_bc_load_const_small_int(emit_t *emit, mp_int_t arg) { - emit_bc_pre(emit, 1); - if (-16 <= arg && arg <= 47) { - emit_write_bytecode_byte(emit, MP_BC_LOAD_CONST_SMALL_INT_MULTI + 16 + arg); + if (-MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS <= arg + && arg < MP_BC_LOAD_CONST_SMALL_INT_MULTI_NUM - MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS) { + emit_write_bytecode_byte(emit, 1, + MP_BC_LOAD_CONST_SMALL_INT_MULTI + MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS + arg); } else { - emit_write_bytecode_byte_int(emit, MP_BC_LOAD_CONST_SMALL_INT, arg); + emit_write_bytecode_byte_int(emit, 1, MP_BC_LOAD_CONST_SMALL_INT, arg); } } void mp_emit_bc_load_const_str(emit_t *emit, qstr qst) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_CONST_STRING, qst); + emit_write_bytecode_byte_qstr(emit, 1, MP_BC_LOAD_CONST_STRING, qst); } void mp_emit_bc_load_const_obj(emit_t *emit, mp_obj_t obj) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte_obj(emit, MP_BC_LOAD_CONST_OBJ, obj); + emit_write_bytecode_byte_obj(emit, 1, MP_BC_LOAD_CONST_OBJ, obj); } void mp_emit_bc_load_null(emit_t *emit) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte(emit, MP_BC_LOAD_NULL); + emit_write_bytecode_byte(emit, 1, MP_BC_LOAD_NULL); } void mp_emit_bc_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) { MP_STATIC_ASSERT(MP_BC_LOAD_FAST_N + MP_EMIT_IDOP_LOCAL_FAST == MP_BC_LOAD_FAST_N); MP_STATIC_ASSERT(MP_BC_LOAD_FAST_N + MP_EMIT_IDOP_LOCAL_DEREF == MP_BC_LOAD_DEREF); (void)qst; - emit_bc_pre(emit, 1); if (kind == MP_EMIT_IDOP_LOCAL_FAST && local_num <= 15) { - emit_write_bytecode_byte(emit, MP_BC_LOAD_FAST_MULTI + local_num); + emit_write_bytecode_byte(emit, 1, MP_BC_LOAD_FAST_MULTI + local_num); } else { - emit_write_bytecode_byte_uint(emit, MP_BC_LOAD_FAST_N + kind, local_num); + emit_write_bytecode_byte_uint(emit, 1, MP_BC_LOAD_FAST_N + kind, local_num); } } @@ -576,51 +563,45 @@ void mp_emit_bc_load_global(emit_t *emit, qstr qst, int kind) { MP_STATIC_ASSERT(MP_BC_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_BC_LOAD_NAME); MP_STATIC_ASSERT(MP_BC_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_BC_LOAD_GLOBAL); (void)qst; - emit_bc_pre(emit, 1); - emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_NAME + kind, qst); + emit_write_bytecode_byte_qstr(emit, 1, MP_BC_LOAD_NAME + kind, qst); if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE_DYNAMIC) { - emit_write_bytecode_byte(emit, 0); + emit_write_bytecode_raw_byte(emit, 0); } } void mp_emit_bc_load_method(emit_t *emit, qstr qst, bool is_super) { - emit_bc_pre(emit, 1 - 2 * is_super); - emit_write_bytecode_byte_qstr(emit, is_super ? MP_BC_LOAD_SUPER_METHOD : MP_BC_LOAD_METHOD, qst); + int stack_adj = 1 - 2 * is_super; + emit_write_bytecode_byte_qstr(emit, stack_adj, is_super ? MP_BC_LOAD_SUPER_METHOD : MP_BC_LOAD_METHOD, qst); } void mp_emit_bc_load_build_class(emit_t *emit) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte(emit, MP_BC_LOAD_BUILD_CLASS); + emit_write_bytecode_byte(emit, 1, MP_BC_LOAD_BUILD_CLASS); } void mp_emit_bc_subscr(emit_t *emit, int kind) { if (kind == MP_EMIT_SUBSCR_LOAD) { - emit_bc_pre(emit, -1); - emit_write_bytecode_byte(emit, MP_BC_LOAD_SUBSCR); + emit_write_bytecode_byte(emit, -1, MP_BC_LOAD_SUBSCR); } else { if (kind == MP_EMIT_SUBSCR_DELETE) { mp_emit_bc_load_null(emit); mp_emit_bc_rot_three(emit); } - emit_bc_pre(emit, -3); - emit_write_bytecode_byte(emit, MP_BC_STORE_SUBSCR); + emit_write_bytecode_byte(emit, -3, MP_BC_STORE_SUBSCR); } } void mp_emit_bc_attr(emit_t *emit, qstr qst, int kind) { if (kind == MP_EMIT_ATTR_LOAD) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte_qstr(emit, MP_BC_LOAD_ATTR, qst); + emit_write_bytecode_byte_qstr(emit, 0, MP_BC_LOAD_ATTR, qst); } else { if (kind == MP_EMIT_ATTR_DELETE) { mp_emit_bc_load_null(emit); mp_emit_bc_rot_two(emit); } - emit_bc_pre(emit, -2); - emit_write_bytecode_byte_qstr(emit, MP_BC_STORE_ATTR, qst); + emit_write_bytecode_byte_qstr(emit, -2, MP_BC_STORE_ATTR, qst); } if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE_DYNAMIC) { - emit_write_bytecode_byte(emit, 0); + emit_write_bytecode_raw_byte(emit, 0); } } @@ -628,98 +609,86 @@ void mp_emit_bc_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kin MP_STATIC_ASSERT(MP_BC_STORE_FAST_N + MP_EMIT_IDOP_LOCAL_FAST == MP_BC_STORE_FAST_N); MP_STATIC_ASSERT(MP_BC_STORE_FAST_N + MP_EMIT_IDOP_LOCAL_DEREF == MP_BC_STORE_DEREF); (void)qst; - emit_bc_pre(emit, -1); if (kind == MP_EMIT_IDOP_LOCAL_FAST && local_num <= 15) { - emit_write_bytecode_byte(emit, MP_BC_STORE_FAST_MULTI + local_num); + emit_write_bytecode_byte(emit, -1, MP_BC_STORE_FAST_MULTI + local_num); } else { - emit_write_bytecode_byte_uint(emit, MP_BC_STORE_FAST_N + kind, local_num); + emit_write_bytecode_byte_uint(emit, -1, MP_BC_STORE_FAST_N + kind, local_num); } } void mp_emit_bc_store_global(emit_t *emit, qstr qst, int kind) { MP_STATIC_ASSERT(MP_BC_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_BC_STORE_NAME); MP_STATIC_ASSERT(MP_BC_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_BC_STORE_GLOBAL); - emit_bc_pre(emit, -1); - emit_write_bytecode_byte_qstr(emit, MP_BC_STORE_NAME + kind, qst); + emit_write_bytecode_byte_qstr(emit, -1, MP_BC_STORE_NAME + kind, qst); } void mp_emit_bc_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) { MP_STATIC_ASSERT(MP_BC_DELETE_FAST + MP_EMIT_IDOP_LOCAL_FAST == MP_BC_DELETE_FAST); MP_STATIC_ASSERT(MP_BC_DELETE_FAST + MP_EMIT_IDOP_LOCAL_DEREF == MP_BC_DELETE_DEREF); (void)qst; - emit_write_bytecode_byte_uint(emit, MP_BC_DELETE_FAST + kind, local_num); + emit_write_bytecode_byte_uint(emit, 0, MP_BC_DELETE_FAST + kind, local_num); } void mp_emit_bc_delete_global(emit_t *emit, qstr qst, int kind) { MP_STATIC_ASSERT(MP_BC_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_BC_DELETE_NAME); MP_STATIC_ASSERT(MP_BC_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_BC_DELETE_GLOBAL); - emit_bc_pre(emit, 0); - emit_write_bytecode_byte_qstr(emit, MP_BC_DELETE_NAME + kind, qst); + emit_write_bytecode_byte_qstr(emit, 0, MP_BC_DELETE_NAME + kind, qst); } void mp_emit_bc_dup_top(emit_t *emit) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte(emit, MP_BC_DUP_TOP); + emit_write_bytecode_byte(emit, 1, MP_BC_DUP_TOP); } void mp_emit_bc_dup_top_two(emit_t *emit) { - emit_bc_pre(emit, 2); - emit_write_bytecode_byte(emit, MP_BC_DUP_TOP_TWO); + emit_write_bytecode_byte(emit, 2, MP_BC_DUP_TOP_TWO); } void mp_emit_bc_pop_top(emit_t *emit) { - emit_bc_pre(emit, -1); - emit_write_bytecode_byte(emit, MP_BC_POP_TOP); + emit_write_bytecode_byte(emit, -1, MP_BC_POP_TOP); } void mp_emit_bc_rot_two(emit_t *emit) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte(emit, MP_BC_ROT_TWO); + emit_write_bytecode_byte(emit, 0, MP_BC_ROT_TWO); } void mp_emit_bc_rot_three(emit_t *emit) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte(emit, MP_BC_ROT_THREE); + emit_write_bytecode_byte(emit, 0, MP_BC_ROT_THREE); } void mp_emit_bc_jump(emit_t *emit, mp_uint_t label) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte_signed_label(emit, MP_BC_JUMP, label); + emit_write_bytecode_byte_signed_label(emit, 0, MP_BC_JUMP, label); } void mp_emit_bc_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) { - emit_bc_pre(emit, -1); if (cond) { - emit_write_bytecode_byte_signed_label(emit, MP_BC_POP_JUMP_IF_TRUE, label); + emit_write_bytecode_byte_signed_label(emit, -1, MP_BC_POP_JUMP_IF_TRUE, label); } else { - emit_write_bytecode_byte_signed_label(emit, MP_BC_POP_JUMP_IF_FALSE, label); + emit_write_bytecode_byte_signed_label(emit, -1, MP_BC_POP_JUMP_IF_FALSE, label); } } void mp_emit_bc_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) { - emit_bc_pre(emit, -1); if (cond) { - emit_write_bytecode_byte_signed_label(emit, MP_BC_JUMP_IF_TRUE_OR_POP, label); + emit_write_bytecode_byte_signed_label(emit, -1, MP_BC_JUMP_IF_TRUE_OR_POP, label); } else { - emit_write_bytecode_byte_signed_label(emit, MP_BC_JUMP_IF_FALSE_OR_POP, label); + emit_write_bytecode_byte_signed_label(emit, -1, MP_BC_JUMP_IF_FALSE_OR_POP, label); } } void mp_emit_bc_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) { if (except_depth == 0) { - emit_bc_pre(emit, 0); if (label & MP_EMIT_BREAK_FROM_FOR) { // need to pop the iterator if we are breaking out of a for loop - emit_write_bytecode_byte(emit, MP_BC_POP_TOP); + emit_write_bytecode_raw_byte(emit, MP_BC_POP_TOP); // also pop the iter_buf for (size_t i = 0; i < MP_OBJ_ITER_BUF_NSLOTS - 1; ++i) { - emit_write_bytecode_byte(emit, MP_BC_POP_TOP); + emit_write_bytecode_raw_byte(emit, MP_BC_POP_TOP); } } - emit_write_bytecode_byte_signed_label(emit, MP_BC_JUMP, label & ~MP_EMIT_BREAK_FROM_FOR); + emit_write_bytecode_byte_signed_label(emit, 0, MP_BC_JUMP, label & ~MP_EMIT_BREAK_FROM_FOR); } else { - emit_write_bytecode_byte_signed_label(emit, MP_BC_UNWIND_JUMP, label & ~MP_EMIT_BREAK_FROM_FOR); - emit_write_bytecode_byte(emit, ((label & MP_EMIT_BREAK_FROM_FOR) ? 0x80 : 0) | except_depth); + emit_write_bytecode_byte_signed_label(emit, 0, MP_BC_UNWIND_JUMP, label & ~MP_EMIT_BREAK_FROM_FOR); + emit_write_bytecode_raw_byte(emit, ((label & MP_EMIT_BREAK_FROM_FOR) ? 0x80 : 0) | except_depth); } } @@ -727,52 +696,45 @@ void mp_emit_bc_setup_block(emit_t *emit, mp_uint_t label, int kind) { MP_STATIC_ASSERT(MP_BC_SETUP_WITH + MP_EMIT_SETUP_BLOCK_WITH == MP_BC_SETUP_WITH); MP_STATIC_ASSERT(MP_BC_SETUP_WITH + MP_EMIT_SETUP_BLOCK_EXCEPT == MP_BC_SETUP_EXCEPT); MP_STATIC_ASSERT(MP_BC_SETUP_WITH + MP_EMIT_SETUP_BLOCK_FINALLY == MP_BC_SETUP_FINALLY); - if (kind == MP_EMIT_SETUP_BLOCK_WITH) { // The SETUP_WITH opcode pops ctx_mgr from the top of the stack // and then pushes 3 entries: __exit__, ctx_mgr, as_value. - emit_bc_pre(emit, 2); - } else { - emit_bc_pre(emit, 0); - } - emit_write_bytecode_byte_unsigned_label(emit, MP_BC_SETUP_WITH + kind, label); + int stack_adj = kind == MP_EMIT_SETUP_BLOCK_WITH ? 2 : 0; + emit_write_bytecode_byte_unsigned_label(emit, stack_adj, MP_BC_SETUP_WITH + kind, label); } void mp_emit_bc_with_cleanup(emit_t *emit, mp_uint_t label) { mp_emit_bc_load_const_tok(emit, MP_TOKEN_KW_NONE); mp_emit_bc_label_assign(emit, label); - emit_bc_pre(emit, 2); // ensure we have enough stack space to call the __exit__ method - emit_write_bytecode_byte(emit, MP_BC_WITH_CLEANUP); - emit_bc_pre(emit, -4); // cancel the 2 above, plus the 2 from mp_emit_bc_setup_block(MP_EMIT_SETUP_BLOCK_WITH) + // The +2 is to ensure we have enough stack space to call the __exit__ method + emit_write_bytecode_byte(emit, 2, MP_BC_WITH_CLEANUP); + // Cancel the +2 above, plus the +2 from mp_emit_bc_setup_block(MP_EMIT_SETUP_BLOCK_WITH) + mp_emit_bc_adjust_stack_size(emit, -4); } void mp_emit_bc_end_finally(emit_t *emit) { - emit_bc_pre(emit, -1); - emit_write_bytecode_byte(emit, MP_BC_END_FINALLY); + emit_write_bytecode_byte(emit, -1, MP_BC_END_FINALLY); } void mp_emit_bc_get_iter(emit_t *emit, bool use_stack) { - emit_bc_pre(emit, use_stack ? MP_OBJ_ITER_BUF_NSLOTS - 1 : 0); - emit_write_bytecode_byte(emit, use_stack ? MP_BC_GET_ITER_STACK : MP_BC_GET_ITER); + int stack_adj = use_stack ? MP_OBJ_ITER_BUF_NSLOTS - 1 : 0; + emit_write_bytecode_byte(emit, stack_adj, use_stack ? MP_BC_GET_ITER_STACK : MP_BC_GET_ITER); } void mp_emit_bc_for_iter(emit_t *emit, mp_uint_t label) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte_unsigned_label(emit, MP_BC_FOR_ITER, label); + emit_write_bytecode_byte_unsigned_label(emit, 1, MP_BC_FOR_ITER, label); } void mp_emit_bc_for_iter_end(emit_t *emit) { - emit_bc_pre(emit, -MP_OBJ_ITER_BUF_NSLOTS); + mp_emit_bc_adjust_stack_size(emit, -MP_OBJ_ITER_BUF_NSLOTS); } void mp_emit_bc_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) { (void)within_exc_handler; - emit_bc_pre(emit, 0); - emit_write_bytecode_byte_unsigned_label(emit, MP_BC_POP_EXCEPT_JUMP, label); + emit_write_bytecode_byte_unsigned_label(emit, 0, MP_BC_POP_EXCEPT_JUMP, label); } void mp_emit_bc_unary_op(emit_t *emit, mp_unary_op_t op) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte(emit, MP_BC_UNARY_OP_MULTI + op); + emit_write_bytecode_byte(emit, 0, MP_BC_UNARY_OP_MULTI + op); } void mp_emit_bc_binary_op(emit_t *emit, mp_binary_op_t op) { @@ -784,11 +746,9 @@ void mp_emit_bc_binary_op(emit_t *emit, mp_binary_op_t op) { invert = true; op = MP_BINARY_OP_IS; } - emit_bc_pre(emit, -1); - emit_write_bytecode_byte(emit, MP_BC_BINARY_OP_MULTI + op); + emit_write_bytecode_byte(emit, -1, MP_BC_BINARY_OP_MULTI + op); if (invert) { - emit_bc_pre(emit, 0); - emit_write_bytecode_byte(emit, MP_BC_UNARY_OP_MULTI + MP_UNARY_OP_NOT); + emit_write_bytecode_byte(emit, 0, MP_BC_UNARY_OP_MULTI + MP_UNARY_OP_NOT); } } @@ -798,17 +758,12 @@ void mp_emit_bc_build(emit_t *emit, mp_uint_t n_args, int kind) { MP_STATIC_ASSERT(MP_BC_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_BC_BUILD_MAP); MP_STATIC_ASSERT(MP_BC_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_BC_BUILD_SET); MP_STATIC_ASSERT(MP_BC_BUILD_TUPLE + MP_EMIT_BUILD_SLICE == MP_BC_BUILD_SLICE); - if (kind == MP_EMIT_BUILD_MAP) { - emit_bc_pre(emit, 1); - } else { - emit_bc_pre(emit, 1 - n_args); - } - emit_write_bytecode_byte_uint(emit, MP_BC_BUILD_TUPLE + kind, n_args); + int stack_adj = kind == MP_EMIT_BUILD_MAP ? 1 : 1 - n_args; + emit_write_bytecode_byte_uint(emit, stack_adj, MP_BC_BUILD_TUPLE + kind, n_args); } void mp_emit_bc_store_map(emit_t *emit) { - emit_bc_pre(emit, -2); - emit_write_bytecode_byte(emit, MP_BC_STORE_MAP); + emit_write_bytecode_byte(emit, -2, MP_BC_STORE_MAP); } void mp_emit_bc_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_stack_index) { @@ -824,51 +779,46 @@ void mp_emit_bc_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection n = 0; t = 2; } - emit_bc_pre(emit, -1 - n); // the lower 2 bits of the opcode argument indicate the collection type - emit_write_bytecode_byte_uint(emit, MP_BC_STORE_COMP, ((collection_stack_index + n) << 2) | t); + emit_write_bytecode_byte_uint(emit, -1 - n, MP_BC_STORE_COMP, ((collection_stack_index + n) << 2) | t); } void mp_emit_bc_unpack_sequence(emit_t *emit, mp_uint_t n_args) { - emit_bc_pre(emit, -1 + n_args); - emit_write_bytecode_byte_uint(emit, MP_BC_UNPACK_SEQUENCE, n_args); + emit_write_bytecode_byte_uint(emit, -1 + n_args, MP_BC_UNPACK_SEQUENCE, n_args); } void mp_emit_bc_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) { - emit_bc_pre(emit, -1 + n_left + n_right + 1); - emit_write_bytecode_byte_uint(emit, MP_BC_UNPACK_EX, n_left | (n_right << 8)); + emit_write_bytecode_byte_uint(emit, -1 + n_left + n_right + 1, MP_BC_UNPACK_EX, n_left | (n_right << 8)); } void mp_emit_bc_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) { if (n_pos_defaults == 0 && n_kw_defaults == 0) { - emit_bc_pre(emit, 1); - emit_write_bytecode_byte_raw_code(emit, MP_BC_MAKE_FUNCTION, scope->raw_code); + emit_write_bytecode_byte_raw_code(emit, 1, MP_BC_MAKE_FUNCTION, scope->raw_code); } else { - emit_bc_pre(emit, -1); - emit_write_bytecode_byte_raw_code(emit, MP_BC_MAKE_FUNCTION_DEFARGS, scope->raw_code); + emit_write_bytecode_byte_raw_code(emit, -1, MP_BC_MAKE_FUNCTION_DEFARGS, scope->raw_code); } } void mp_emit_bc_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) { if (n_pos_defaults == 0 && n_kw_defaults == 0) { - emit_bc_pre(emit, -n_closed_over + 1); - emit_write_bytecode_byte_raw_code(emit, MP_BC_MAKE_CLOSURE, scope->raw_code); - emit_write_bytecode_byte(emit, n_closed_over); + int stack_adj = -n_closed_over + 1; + emit_write_bytecode_byte_raw_code(emit, stack_adj, MP_BC_MAKE_CLOSURE, scope->raw_code); + emit_write_bytecode_raw_byte(emit, n_closed_over); } else { assert(n_closed_over <= 255); - emit_bc_pre(emit, -2 - (mp_int_t)n_closed_over + 1); - emit_write_bytecode_byte_raw_code(emit, MP_BC_MAKE_CLOSURE_DEFARGS, scope->raw_code); - emit_write_bytecode_byte(emit, n_closed_over); + int stack_adj = -2 - (mp_int_t)n_closed_over + 1; + emit_write_bytecode_byte_raw_code(emit, stack_adj, MP_BC_MAKE_CLOSURE_DEFARGS, scope->raw_code); + emit_write_bytecode_raw_byte(emit, n_closed_over); } } -STATIC void emit_bc_call_function_method_helper(emit_t *emit, mp_int_t stack_adj, mp_uint_t bytecode_base, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) { +STATIC void emit_bc_call_function_method_helper(emit_t *emit, int stack_adj, mp_uint_t bytecode_base, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) { if (star_flags) { - emit_bc_pre(emit, stack_adj - (mp_int_t)n_positional - 2 * (mp_int_t)n_keyword - 2); - emit_write_bytecode_byte_uint(emit, bytecode_base + 1, (n_keyword << 8) | n_positional); // TODO make it 2 separate uints? + stack_adj -= (int)n_positional + 2 * (int)n_keyword + 2; + emit_write_bytecode_byte_uint(emit, stack_adj, bytecode_base + 1, (n_keyword << 8) | n_positional); // TODO make it 2 separate uints? } else { - emit_bc_pre(emit, stack_adj - (mp_int_t)n_positional - 2 * (mp_int_t)n_keyword); - emit_write_bytecode_byte_uint(emit, bytecode_base, (n_keyword << 8) | n_positional); // TODO make it 2 separate uints? + stack_adj -= (int)n_positional + 2 * (int)n_keyword; + emit_write_bytecode_byte_uint(emit, stack_adj, bytecode_base, (n_keyword << 8) | n_positional); // TODO make it 2 separate uints? } } @@ -881,22 +831,21 @@ void mp_emit_bc_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_ke } void mp_emit_bc_return_value(emit_t *emit) { - emit_bc_pre(emit, -1); + emit_write_bytecode_byte(emit, -1, MP_BC_RETURN_VALUE); emit->last_emit_was_return_value = true; - emit_write_bytecode_byte(emit, MP_BC_RETURN_VALUE); } void mp_emit_bc_raise_varargs(emit_t *emit, mp_uint_t n_args) { + MP_STATIC_ASSERT(MP_BC_RAISE_LAST + 1 == MP_BC_RAISE_OBJ); + MP_STATIC_ASSERT(MP_BC_RAISE_LAST + 2 == MP_BC_RAISE_FROM); assert(n_args <= 2); - emit_bc_pre(emit, -n_args); - emit_write_bytecode_byte_byte(emit, MP_BC_RAISE_VARARGS, n_args); + emit_write_bytecode_byte(emit, -n_args, MP_BC_RAISE_LAST + n_args); } void mp_emit_bc_yield(emit_t *emit, int kind) { MP_STATIC_ASSERT(MP_BC_YIELD_VALUE + 1 == MP_BC_YIELD_FROM); - emit_bc_pre(emit, -kind); + emit_write_bytecode_byte(emit, -kind, MP_BC_YIELD_VALUE + kind); emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR; - emit_write_bytecode_byte(emit, MP_BC_YIELD_VALUE + kind); } void mp_emit_bc_start_except_handler(emit_t *emit) { diff --git a/python/src/py/emitglue.c b/python/src/py/emitglue.c index c073258f0..d30a1e674 100644 --- a/python/src/py/emitglue.c +++ b/python/src/py/emitglue.c @@ -34,6 +34,7 @@ #include "py/emitglue.h" #include "py/runtime0.h" #include "py/bc.h" +#include "py/profile.h" #if MICROPY_DEBUG_VERBOSE // print debugging info #define DEBUG_PRINT (1) @@ -52,6 +53,9 @@ mp_uint_t mp_verbose_flag = 0; mp_raw_code_t *mp_emit_glue_new_raw_code(void) { mp_raw_code_t *rc = m_new0(mp_raw_code_t, 1); rc->kind = MP_CODE_RESERVED; + #if MICROPY_PY_SYS_SETTRACE + rc->line_of_definition = 0; + #endif return rc; } @@ -75,6 +79,11 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, const byte *code, rc->n_raw_code = n_raw_code; #endif + #if MICROPY_PY_SYS_SETTRACE + mp_bytecode_prelude_t *prelude = &rc->prelude; + mp_prof_extract_prelude(code, prelude); + #endif + #ifdef DEBUG_PRINT #if !MICROPY_DEBUG_PRINTERS const size_t len = 0; @@ -88,7 +97,7 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, const byte *code, #endif } -#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM +#if MICROPY_EMIT_MACHINE_CODE void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun_data, mp_uint_t fun_len, const mp_uint_t *const_table, #if MICROPY_PERSISTENT_CODE_SAVE uint16_t prelude_offset, @@ -172,6 +181,12 @@ mp_obj_t mp_make_function_from_raw_code(const mp_raw_code_t *rc, mp_obj_t def_ar if ((rc->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) { ((mp_obj_base_t*)MP_OBJ_TO_PTR(fun))->type = &mp_type_gen_wrap; } + + #if MICROPY_PY_SYS_SETTRACE + mp_obj_fun_bc_t *self_fun = (mp_obj_fun_bc_t *)MP_OBJ_TO_PTR(fun); + self_fun->rc = rc; + #endif + break; } diff --git a/python/src/py/emitglue.h b/python/src/py/emitglue.h index 058f06018..a5411dc2e 100644 --- a/python/src/py/emitglue.h +++ b/python/src/py/emitglue.h @@ -27,6 +27,7 @@ #define MICROPY_INCLUDED_PY_EMITGLUE_H #include "py/obj.h" +#include "py/bc.h" // These variables and functions glue the code emitters to the runtime. @@ -63,13 +64,21 @@ typedef struct _mp_raw_code_t { size_t fun_data_len; uint16_t n_obj; uint16_t n_raw_code; - #if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM + #if MICROPY_PY_SYS_SETTRACE + mp_bytecode_prelude_t prelude; + // line_of_definition is a Python source line where the raw_code was + // created e.g. MP_BC_MAKE_FUNCTION. This is different from lineno info + // stored in prelude, which provides line number for first statement of + // a function. Required to properly implement "call" trace event. + mp_uint_t line_of_definition; + #endif + #if MICROPY_EMIT_MACHINE_CODE uint16_t prelude_offset; uint16_t n_qstr; mp_qstr_link_entry_t *qstr_link; #endif #endif - #if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM + #if MICROPY_EMIT_MACHINE_CODE mp_uint_t type_sig; // for viper, compressed as 2-bit types; ret is MSB, then arg0, arg1, etc #endif } mp_raw_code_t; diff --git a/python/src/py/emitnative.c b/python/src/py/emitnative.c index f123ecbb5..07b984b78 100644 --- a/python/src/py/emitnative.c +++ b/python/src/py/emitnative.c @@ -47,7 +47,8 @@ #include #include "py/emit.h" -#include "py/bc.h" +#include "py/nativeglue.h" +#include "py/objstr.h" #if MICROPY_DEBUG_VERBOSE // print debugging info #define DEBUG_PRINT (1) @@ -57,7 +58,7 @@ #endif // wrapper around everything in this file -#if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA +#if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN // C stack layout for native functions: // 0: nlr_buf_t [optional] @@ -80,6 +81,30 @@ // locals (reversed, L0 at end) | // (L0-L2 may be in regs instead) +// Native emitter needs to know the following sizes and offsets of C structs (on the target): +#if MICROPY_DYNAMIC_COMPILER +#define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled +#else +#define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t)) +#endif +#define SIZEOF_CODE_STATE (sizeof(mp_code_state_t) / sizeof(uintptr_t)) +#define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_t, state) / sizeof(uintptr_t)) +#define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t)) +#define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_t, ip) / sizeof(uintptr_t)) +#define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_t, sp) / sizeof(uintptr_t)) +#define OFFSETOF_OBJ_FUN_BC_GLOBALS (offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t)) +#define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t)) +#define OFFSETOF_OBJ_FUN_BC_CONST_TABLE (offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)) + +// If not already defined, set parent args to same as child call registers +#ifndef REG_PARENT_RET +#define REG_PARENT_RET REG_RET +#define REG_PARENT_ARG_1 REG_ARG_1 +#define REG_PARENT_ARG_2 REG_ARG_2 +#define REG_PARENT_ARG_3 REG_ARG_3 +#define REG_PARENT_ARG_4 REG_ARG_4 +#endif + // Word index of nlr_buf_t.ret_val #define NLR_BUF_IDX_RET_VAL (1) @@ -101,9 +126,9 @@ #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1) #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (NLR_BUF_IDX_LOCAL_2) #define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3) -#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t)) -#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t)) -#define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t)) +#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC) +#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP) +#define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP) #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num)) #define REG_GENERATOR_STATE (REG_LOCAL_3) @@ -194,6 +219,7 @@ struct _emit_t { uint16_t code_state_start; uint16_t stack_start; int stack_size; + uint16_t n_cell; uint16_t const_table_cur_obj; uint16_t const_table_num_obj; @@ -309,7 +335,11 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop emit->pass = pass; emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER; emit->stack_size = 0; + #if N_PRELUDE_AS_BYTES_OBJ + emit->const_table_cur_obj = emit->do_viper_types ? 0 : 1; // reserve first obj for prelude bytes obj + #else emit->const_table_cur_obj = 0; + #endif emit->const_table_cur_raw_code = 0; #if MICROPY_PERSISTENT_CODE_SAVE emit->qstr_link_cur = 0; @@ -364,7 +394,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop // Work out start of code state (mp_code_state_t or reduced version for viper) emit->code_state_start = 0; if (NEED_GLOBAL_EXC_HANDLER(emit)) { - emit->code_state_start = sizeof(nlr_buf_t) / sizeof(uintptr_t); + emit->code_state_start = SIZEOF_NLR_BUF; } if (emit->do_viper_types) { @@ -398,16 +428,16 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs); #if N_X86 - asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_1); + asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1); #endif // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table - ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_ARG_1, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, 0); // Store function object (passed as first arg) to stack if needed if (NEED_FUN_OBJ(emit)) { - ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_ARG_1); + ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1); } // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_3 @@ -416,9 +446,9 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2); asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_3); #else - ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_ARG_2); - ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_ARG_3); - ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_4); + ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2); + ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3); + ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_4); #endif // Check number of args matches this function, and call mp_arg_check_num_sig if not @@ -458,31 +488,36 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) { emit->code_state_start = 0; - emit->stack_start = sizeof(mp_code_state_t) / sizeof(mp_uint_t); + emit->stack_start = SIZEOF_CODE_STATE; + #if N_PRELUDE_AS_BYTES_OBJ + // Load index of prelude bytes object in const_table + mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)(emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1)); + #else mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_offset); + #endif mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset); - ASM_ENTRY(emit->as, sizeof(nlr_buf_t) / sizeof(uintptr_t)); + ASM_ENTRY(emit->as, SIZEOF_NLR_BUF); // Put address of code_state into REG_GENERATOR_STATE #if N_X86 asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE); #else - ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_ARG_1); + ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1); #endif // Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from #if N_X86 - asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2); + asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2); #endif - ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_ARG_2); + ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_PARENT_ARG_2); // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit)); - ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, emit->scope->num_pos_args + emit->scope->num_kwonly_args); } else { // The locals and stack start after the code_state structure - emit->stack_start = emit->code_state_start + sizeof(mp_code_state_t) / sizeof(mp_uint_t); + emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE; // Allocate space on C-stack for code_state structure, which includes state ASM_ENTRY(emit->as, emit->stack_start + emit->n_state); @@ -490,26 +525,49 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop // Prepare incoming arguments for call to mp_setup_code_state #if N_X86 - asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_1); - asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2); - asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_3); - asm_x86_mov_arg_to_r32(emit->as, 3, REG_ARG_4); + asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1); + asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2); + asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3); + asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4); #endif // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table - ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_ARG_1, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args); // Set code_state.fun_bc - ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_ARG_1); + ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1); // Set code_state.ip (offset from start of this function to prelude info) + #if N_PRELUDE_AS_BYTES_OBJ + // Prelude is a bytes object in const_table; store ip = prelude->data - fun_bc->bytecode + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, offsetof(mp_obj_str_t, data) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_BYTECODE); + ASM_SUB_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1); + emit_native_mov_state_reg(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, REG_LOCAL_3); + #else // TODO this encoding may change size in the final pass, need to make it fixed - emit_native_mov_state_imm_via(emit, emit->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t), emit->prelude_offset, REG_ARG_1); + emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, emit->prelude_offset, REG_PARENT_ARG_1); + #endif + + // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t) + emit_native_mov_state_imm_via(emit, emit->code_state_start + offsetof(mp_code_state_t, n_state) / sizeof(uintptr_t), emit->n_state, REG_ARG_1); // Put address of code_state into first arg ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start); + // Copy next 3 args if needed + #if REG_ARG_2 != REG_PARENT_ARG_2 + ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2); + #endif + #if REG_ARG_3 != REG_PARENT_ARG_3 + ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3); + #endif + #if REG_ARG_4 != REG_PARENT_ARG_4 + ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4); + #endif + // Call mp_setup_code_state to prepare code_state structure #if N_THUMB asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4); @@ -556,22 +614,28 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop } +static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) { + mp_asm_base_data(&emit->as->base, 1, val); +} + STATIC void emit_native_end_pass(emit_t *emit) { emit_native_global_exc_exit(emit); if (!emit->do_viper_types) { emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base); - mp_asm_base_data(&emit->as->base, 1, 0x80 | ((emit->n_state >> 7) & 0x7f)); - mp_asm_base_data(&emit->as->base, 1, emit->n_state & 0x7f); - mp_asm_base_data(&emit->as->base, 1, 0); // n_exc_stack - mp_asm_base_data(&emit->as->base, 1, emit->scope->scope_flags); - mp_asm_base_data(&emit->as->base, 1, emit->scope->num_pos_args); - mp_asm_base_data(&emit->as->base, 1, emit->scope->num_kwonly_args); - mp_asm_base_data(&emit->as->base, 1, emit->scope->num_def_pos_args); - // write code info + size_t n_state = emit->n_state; + size_t n_exc_stack = 0; // exc-stack not needed for native code + MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit); + + #if MICROPY_PERSISTENT_CODE + size_t n_info = 4; + #else + size_t n_info = 1; + #endif + MP_BC_PRELUDE_SIZE_ENCODE(n_info, emit->n_cell, emit_native_write_code_info_byte, emit); + #if MICROPY_PERSISTENT_CODE - mp_asm_base_data(&emit->as->base, 1, 5); mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name); mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8); mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file); @@ -581,14 +645,25 @@ STATIC void emit_native_end_pass(emit_t *emit) { #endif // bytecode prelude: initialise closed over variables + size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base); for (int i = 0; i < emit->scope->id_info_len; i++) { id_info_t *id = &emit->scope->id_info[i]; if (id->kind == ID_INFO_KIND_CELL) { - assert(id->local_num < 255); + assert(id->local_num <= 255); mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell } } - mp_asm_base_data(&emit->as->base, 1, 255); // end of list sentinel + emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start; + + #if N_PRELUDE_AS_BYTES_OBJ + // Prelude bytes object is after qstr arg names and mp_fun_table + size_t table_off = emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1; + if (emit->pass == MP_PASS_EMIT) { + void *buf = emit->as->base.code_base + emit->prelude_offset; + size_t n = emit->as->base.code_offset - emit->prelude_offset; + emit->const_table[table_off] = (uintptr_t)mp_obj_new_bytes(buf, n); + } + #endif } ASM_END_PASS(emit->as); @@ -609,8 +684,11 @@ STATIC void emit_native_end_pass(emit_t *emit) { const_table_alloc += nqstr; } emit->const_table = m_new(mp_uint_t, const_table_alloc); + #if !MICROPY_DYNAMIC_COMPILER // Store mp_fun_table pointer just after qstrs - emit->const_table[nqstr] = (mp_uint_t)(uintptr_t)mp_fun_table; + // (but in dynamic-compiler mode eliminate dependency on mp_fun_table) + emit->const_table[nqstr] = (mp_uint_t)(uintptr_t)&mp_fun_table; + #endif #if MICROPY_PERSISTENT_CODE_SAVE size_t qstr_link_alloc = emit->qstr_link_cur; @@ -1033,7 +1111,7 @@ STATIC void emit_load_reg_with_ptr(emit_t *emit, int reg, mp_uint_t ptr, size_t emit->const_table[table_off] = ptr; } emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit)); - ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off); } @@ -1090,7 +1168,7 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) { if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) { // Set new globals emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit)); - ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_GLOBALS); emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS); // Save old globals (or NULL if globals didn't change) @@ -1106,6 +1184,10 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) { // Wrap everything in an nlr context ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0); emit_call(emit, MP_F_NLR_PUSH); + #if N_NLR_SETJMP + ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2); + emit_call(emit, MP_F_SETJMP); + #endif ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true); } else { // Clear the unwind state @@ -1120,6 +1202,10 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) { ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_2, LOCAL_IDX_EXC_HANDLER_UNWIND(emit)); ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0); emit_call(emit, MP_F_NLR_PUSH); + #if N_NLR_SETJMP + ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2); + emit_call(emit, MP_F_SETJMP); + #endif ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_LOCAL_2); ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true); @@ -1130,6 +1216,12 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) { // Global exception handler: check for valid exception handler emit_native_label_assign(emit, global_except_label); + #if N_NLR_SETJMP + // Reload REG_FUN_TABLE, since it may be clobbered by longjmp + emit_native_mov_reg_state(emit, REG_LOCAL_1, LOCAL_IDX_FUN_OBJ(emit)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t)); + ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, emit->scope->num_pos_args + emit->scope->num_kwonly_args); + #endif ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit)); ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false); } @@ -1143,10 +1235,10 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) { if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) { // Store return value in state[0] ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit)); - ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, offsetof(mp_code_state_t, state) / sizeof(uintptr_t)); + ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE); // Load return kind - ASM_MOV_REG_IMM(emit->as, REG_RET, MP_VM_RETURN_EXCEPTION); + ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION); ASM_EXIT(emit->as); } else { @@ -1201,7 +1293,7 @@ STATIC void emit_native_global_exc_exit(emit_t *emit) { } // Load return value - ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_RET_VAL(emit)); + ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit)); } ASM_EXIT(emit->as); @@ -2312,7 +2404,7 @@ STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) { ASM_ARM_CC_NE, }; asm_arm_setcc_reg(emit->as, REG_RET, ccs[op - MP_BINARY_OP_LESS]); - #elif N_XTENSA + #elif N_XTENSA || N_XTENSAWIN static uint8_t ccs[6] = { ASM_XTENSA_CC_LT, 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args @@ -2572,7 +2664,7 @@ STATIC void emit_native_return_value(emit_t *emit) { if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) { // Save pointer to current stack position for caller to access return value emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1); - emit_native_mov_state_reg(emit, offsetof(mp_code_state_t, sp) / sizeof(uintptr_t), REG_TEMP0); + emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0); // Put return type in return value slot ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL); @@ -2589,13 +2681,13 @@ STATIC void emit_native_return_value(emit_t *emit) { if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) { emit_pre_pop_discard(emit); if (return_vtype == VTYPE_PYOBJ) { - emit_native_mov_reg_const(emit, REG_RET, MP_F_CONST_NONE_OBJ); + emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ); } else { ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0); } } else { vtype_kind_t vtype; - emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_RET : REG_ARG_1); + emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1); if (vtype != return_vtype) { EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "return expected '%q' but got '%q'", @@ -2604,15 +2696,18 @@ STATIC void emit_native_return_value(emit_t *emit) { } if (return_vtype != VTYPE_PYOBJ) { emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2); + #if REG_RET != REG_PARENT_RET + ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET); + #endif } } else { vtype_kind_t vtype; - emit_pre_pop_reg(emit, &vtype, REG_RET); + emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET); assert(vtype == VTYPE_PYOBJ); } if (NEED_GLOBAL_EXC_HANDLER(emit)) { // Save return value for the global exception handler to use - ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_RET); + ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET); } emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size); emit->last_emit_was_return_value = true; @@ -2655,7 +2750,7 @@ STATIC void emit_native_yield(emit_t *emit, int kind) { // Save pointer to current stack position for caller to access yielded value emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1); - emit_native_mov_state_reg(emit, offsetof(mp_code_state_t, sp) / sizeof(uintptr_t), REG_TEMP0); + emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0); // Put return type in return value slot ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD); diff --git a/python/src/py/emitnx86.c b/python/src/py/emitnx86.c index 7c96c3b82..f0553f068 100644 --- a/python/src/py/emitnx86.c +++ b/python/src/py/emitnx86.c @@ -1,7 +1,7 @@ // x86 specific stuff #include "py/mpconfig.h" -#include "py/runtime0.h" +#include "py/nativeglue.h" #if MICROPY_EMIT_X86 @@ -34,13 +34,11 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = { [MP_F_BINARY_OP] = 3, [MP_F_BUILD_TUPLE] = 2, [MP_F_BUILD_LIST] = 2, - [MP_F_LIST_APPEND] = 2, [MP_F_BUILD_MAP] = 1, - [MP_F_STORE_MAP] = 3, - #if MICROPY_PY_BUILTINS_SET [MP_F_BUILD_SET] = 2, [MP_F_STORE_SET] = 2, - #endif + [MP_F_LIST_APPEND] = 2, + [MP_F_STORE_MAP] = 3, [MP_F_MAKE_FUNCTION_FROM_RAW_CODE] = 3, [MP_F_NATIVE_CALL_FUNCTION_N_KW] = 3, [MP_F_CALL_METHOD_N_KW] = 3, @@ -53,20 +51,18 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = { [MP_F_IMPORT_NAME] = 3, [MP_F_IMPORT_FROM] = 2, [MP_F_IMPORT_ALL] = 1, - #if MICROPY_PY_BUILTINS_SLICE [MP_F_NEW_SLICE] = 3, - #endif [MP_F_UNPACK_SEQUENCE] = 3, [MP_F_UNPACK_EX] = 3, [MP_F_DELETE_NAME] = 1, [MP_F_DELETE_GLOBAL] = 1, - [MP_F_NEW_CELL] = 1, [MP_F_MAKE_CLOSURE_FROM_RAW_CODE] = 3, [MP_F_ARG_CHECK_NUM_SIG] = 3, [MP_F_SETUP_CODE_STATE] = 4, [MP_F_SMALL_INT_FLOOR_DIVIDE] = 2, [MP_F_SMALL_INT_MODULO] = 2, [MP_F_NATIVE_YIELD_FROM] = 3, + [MP_F_SETJMP] = 1, }; #define N_X86 (1) diff --git a/python/src/py/emitnxtensawin.c b/python/src/py/emitnxtensawin.c new file mode 100644 index 000000000..38d5db13e --- /dev/null +++ b/python/src/py/emitnxtensawin.c @@ -0,0 +1,23 @@ +// Xtensa-Windowed specific stuff + +#include "py/mpconfig.h" + +#if MICROPY_EMIT_XTENSAWIN + +// this is defined so that the assembler exports generic assembler API macros +#define GENERIC_ASM_API (1) +#define GENERIC_ASM_API_WIN (1) +#include "py/asmxtensa.h" + +// Word indices of REG_LOCAL_x in nlr_buf_t +#define NLR_BUF_IDX_LOCAL_1 (2 + 4) // a4 +#define NLR_BUF_IDX_LOCAL_2 (2 + 5) // a5 +#define NLR_BUF_IDX_LOCAL_3 (2 + 6) // a6 + +#define N_NLR_SETJMP (1) +#define N_PRELUDE_AS_BYTES_OBJ (1) +#define N_XTENSAWIN (1) +#define EXPORT_FUN(name) emit_native_xtensawin_##name +#include "py/emitnative.c" + +#endif diff --git a/python/src/py/grammar.h b/python/src/py/grammar.h index 5a5b682ac..ca9b5b379 100644 --- a/python/src/py/grammar.h +++ b/python/src/py/grammar.h @@ -55,7 +55,7 @@ DEF_RULE_NC(eval_input_2, and(1), tok(NEWLINE)) // varargslist: vfpdef ['=' test] (',' vfpdef ['=' test])* [',' ['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]] | '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef // vfpdef: NAME -DEF_RULE_NC(decorator, and(4), tok(DEL_AT), rule(dotted_name), opt_rule(trailer_paren), tok(NEWLINE)) +DEF_RULE_NC(decorator, and(4), tok(OP_AT), rule(dotted_name), opt_rule(trailer_paren), tok(NEWLINE)) DEF_RULE_NC(decorators, one_or_more, rule(decorator)) DEF_RULE(decorated, c(decorated), and_ident(2), rule(decorators), rule(decorated_body)) #if MICROPY_PY_ASYNC_AWAIT @@ -96,7 +96,7 @@ DEF_RULE(simple_stmt_2, c(generic_all_nodes), list_with_end, rule(small_stmt), t // small_stmt: expr_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt // expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | ('=' (yield_expr|testlist_star_expr))*) // testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] -// augassign: '+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//=' +// augassign: '+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//=' // # For normal assignments, additional restrictions enforced by the interpreter DEF_RULE_NC(small_stmt, or(8), rule(del_stmt), rule(pass_stmt), rule(flow_stmt), rule(import_stmt), rule(global_stmt), rule(nonlocal_stmt), rule(assert_stmt), rule(expr_stmt)) @@ -108,7 +108,7 @@ DEF_RULE_NC(expr_stmt_assign, and_ident(2), tok(DEL_EQUAL), rule(expr_stmt_6)) DEF_RULE_NC(expr_stmt_6, or(2), rule(yield_expr), rule(testlist_star_expr)) DEF_RULE(testlist_star_expr, c(generic_tuple), list_with_end, rule(testlist_star_expr_2), tok(DEL_COMMA)) DEF_RULE_NC(testlist_star_expr_2, or(2), rule(star_expr), rule(test)) -DEF_RULE_NC(augassign, or(12), tok(DEL_PLUS_EQUAL), tok(DEL_MINUS_EQUAL), tok(DEL_STAR_EQUAL), tok(DEL_SLASH_EQUAL), tok(DEL_PERCENT_EQUAL), tok(DEL_AMPERSAND_EQUAL), tok(DEL_PIPE_EQUAL), tok(DEL_CARET_EQUAL), tok(DEL_DBL_LESS_EQUAL), tok(DEL_DBL_MORE_EQUAL), tok(DEL_DBL_STAR_EQUAL), tok(DEL_DBL_SLASH_EQUAL)) +DEF_RULE_NC(augassign, or(13), tok(DEL_PLUS_EQUAL), tok(DEL_MINUS_EQUAL), tok(DEL_STAR_EQUAL), tok(DEL_AT_EQUAL), tok(DEL_SLASH_EQUAL), tok(DEL_PERCENT_EQUAL), tok(DEL_AMPERSAND_EQUAL), tok(DEL_PIPE_EQUAL), tok(DEL_CARET_EQUAL), tok(DEL_DBL_LESS_EQUAL), tok(DEL_DBL_MORE_EQUAL), tok(DEL_DBL_STAR_EQUAL), tok(DEL_DBL_SLASH_EQUAL)) // del_stmt: 'del' exprlist // pass_stmt: 'pass' @@ -226,7 +226,7 @@ DEF_RULE(lambdef_nocond, c(lambdef), and_blank(4), tok(KW_LAMBDA), opt_rule(vara // and_expr: shift_expr ('&' shift_expr)* // shift_expr: arith_expr (('<<'|'>>') arith_expr)* // arith_expr: term (('+'|'-') term)* -// term: factor (('*'|'/'|'%'|'//') factor)* +// term: factor (('*'|'@'|'/'|'%'|'//') factor)* // factor: ('+'|'-'|'~') factor | power // power: atom_expr ['**' factor] // atom_expr: 'await' atom trailer* | atom trailer* @@ -249,7 +249,7 @@ DEF_RULE_NC(shift_op, or(2), tok(OP_DBL_LESS), tok(OP_DBL_MORE)) DEF_RULE(arith_expr, c(term), list, rule(term), rule(arith_op)) DEF_RULE_NC(arith_op, or(2), tok(OP_PLUS), tok(OP_MINUS)) DEF_RULE(term, c(term), list, rule(factor), rule(term_op)) -DEF_RULE_NC(term_op, or(4), tok(OP_STAR), tok(OP_SLASH), tok(OP_PERCENT), tok(OP_DBL_SLASH)) +DEF_RULE_NC(term_op, or(5), tok(OP_STAR), tok(OP_AT), tok(OP_SLASH), tok(OP_PERCENT), tok(OP_DBL_SLASH)) DEF_RULE_NC(factor, or(2), rule(factor_2), rule(power)) DEF_RULE(factor_2, c(factor_2), and_ident(2), rule(factor_op), rule(factor)) DEF_RULE_NC(factor_op, or(3), tok(OP_PLUS), tok(OP_MINUS), tok(OP_TILDE)) diff --git a/python/src/py/lexer.c b/python/src/py/lexer.c index e161700b1..5f8adda91 100644 --- a/python/src/py/lexer.c +++ b/python/src/py/lexer.c @@ -174,7 +174,7 @@ STATIC void indent_pop(mp_lexer_t *lex) { // this means if the start of two ops are the same then they are equal til the last char STATIC const char *const tok_enc = - "()[]{},:;@~" // singles + "()[]{},:;~" // singles " >= >> >>= "*e=c*e=" // * *= ** **= @@ -185,6 +185,7 @@ STATIC const char *const tok_enc = "/e=c/e=" // / /= // //= "%e=" // % %= "^e=" // ^ ^= + "@e=" // @ @= "=e=" // = == "!."; // start of special cases: != . ... @@ -193,7 +194,7 @@ STATIC const uint8_t tok_enc_kind[] = { MP_TOKEN_DEL_PAREN_OPEN, MP_TOKEN_DEL_PAREN_CLOSE, MP_TOKEN_DEL_BRACKET_OPEN, MP_TOKEN_DEL_BRACKET_CLOSE, MP_TOKEN_DEL_BRACE_OPEN, MP_TOKEN_DEL_BRACE_CLOSE, - MP_TOKEN_DEL_COMMA, MP_TOKEN_DEL_COLON, MP_TOKEN_DEL_SEMICOLON, MP_TOKEN_DEL_AT, MP_TOKEN_OP_TILDE, + MP_TOKEN_DEL_COMMA, MP_TOKEN_DEL_COLON, MP_TOKEN_DEL_SEMICOLON, MP_TOKEN_OP_TILDE, MP_TOKEN_OP_LESS, MP_TOKEN_OP_LESS_EQUAL, MP_TOKEN_OP_DBL_LESS, MP_TOKEN_DEL_DBL_LESS_EQUAL, MP_TOKEN_OP_MORE, MP_TOKEN_OP_MORE_EQUAL, MP_TOKEN_OP_DBL_MORE, MP_TOKEN_DEL_DBL_MORE_EQUAL, @@ -205,6 +206,7 @@ STATIC const uint8_t tok_enc_kind[] = { MP_TOKEN_OP_SLASH, MP_TOKEN_DEL_SLASH_EQUAL, MP_TOKEN_OP_DBL_SLASH, MP_TOKEN_DEL_DBL_SLASH_EQUAL, MP_TOKEN_OP_PERCENT, MP_TOKEN_DEL_PERCENT_EQUAL, MP_TOKEN_OP_CARET, MP_TOKEN_DEL_CARET_EQUAL, + MP_TOKEN_OP_AT, MP_TOKEN_DEL_AT_EQUAL, MP_TOKEN_DEL_EQUAL, MP_TOKEN_OP_DBL_EQUAL, }; diff --git a/python/src/py/lexer.h b/python/src/py/lexer.h index a29709107..b9f97013a 100644 --- a/python/src/py/lexer.h +++ b/python/src/py/lexer.h @@ -96,25 +96,45 @@ typedef enum _mp_token_kind_t { MP_TOKEN_KW_WITH, MP_TOKEN_KW_YIELD, + MP_TOKEN_OP_TILDE, + + // Order of these 6 matches corresponding mp_binary_op_t operator + MP_TOKEN_OP_LESS, + MP_TOKEN_OP_MORE, + MP_TOKEN_OP_DBL_EQUAL, + MP_TOKEN_OP_LESS_EQUAL, + MP_TOKEN_OP_MORE_EQUAL, + MP_TOKEN_OP_NOT_EQUAL, + + // Order of these 13 matches corresponding mp_binary_op_t operator + MP_TOKEN_OP_PIPE, + MP_TOKEN_OP_CARET, + MP_TOKEN_OP_AMPERSAND, + MP_TOKEN_OP_DBL_LESS, + MP_TOKEN_OP_DBL_MORE, MP_TOKEN_OP_PLUS, MP_TOKEN_OP_MINUS, MP_TOKEN_OP_STAR, - MP_TOKEN_OP_DBL_STAR, - MP_TOKEN_OP_SLASH, + MP_TOKEN_OP_AT, MP_TOKEN_OP_DBL_SLASH, + MP_TOKEN_OP_SLASH, MP_TOKEN_OP_PERCENT, - MP_TOKEN_OP_LESS, - MP_TOKEN_OP_DBL_LESS, - MP_TOKEN_OP_MORE, - MP_TOKEN_OP_DBL_MORE, - MP_TOKEN_OP_AMPERSAND, - MP_TOKEN_OP_PIPE, - MP_TOKEN_OP_CARET, - MP_TOKEN_OP_TILDE, - MP_TOKEN_OP_LESS_EQUAL, - MP_TOKEN_OP_MORE_EQUAL, - MP_TOKEN_OP_DBL_EQUAL, - MP_TOKEN_OP_NOT_EQUAL, + MP_TOKEN_OP_DBL_STAR, + + // Order of these 13 matches corresponding mp_binary_op_t operator + MP_TOKEN_DEL_PIPE_EQUAL, + MP_TOKEN_DEL_CARET_EQUAL, + MP_TOKEN_DEL_AMPERSAND_EQUAL, + MP_TOKEN_DEL_DBL_LESS_EQUAL, + MP_TOKEN_DEL_DBL_MORE_EQUAL, + MP_TOKEN_DEL_PLUS_EQUAL, + MP_TOKEN_DEL_MINUS_EQUAL, + MP_TOKEN_DEL_STAR_EQUAL, + MP_TOKEN_DEL_AT_EQUAL, + MP_TOKEN_DEL_DBL_SLASH_EQUAL, + MP_TOKEN_DEL_SLASH_EQUAL, + MP_TOKEN_DEL_PERCENT_EQUAL, + MP_TOKEN_DEL_DBL_STAR_EQUAL, MP_TOKEN_DEL_PAREN_OPEN, MP_TOKEN_DEL_PAREN_CLOSE, @@ -126,20 +146,7 @@ typedef enum _mp_token_kind_t { MP_TOKEN_DEL_COLON, MP_TOKEN_DEL_PERIOD, MP_TOKEN_DEL_SEMICOLON, - MP_TOKEN_DEL_AT, MP_TOKEN_DEL_EQUAL, - MP_TOKEN_DEL_PLUS_EQUAL, - MP_TOKEN_DEL_MINUS_EQUAL, - MP_TOKEN_DEL_STAR_EQUAL, - MP_TOKEN_DEL_SLASH_EQUAL, - MP_TOKEN_DEL_DBL_SLASH_EQUAL, - MP_TOKEN_DEL_PERCENT_EQUAL, - MP_TOKEN_DEL_AMPERSAND_EQUAL, - MP_TOKEN_DEL_PIPE_EQUAL, - MP_TOKEN_DEL_CARET_EQUAL, - MP_TOKEN_DEL_DBL_MORE_EQUAL, - MP_TOKEN_DEL_DBL_LESS_EQUAL, - MP_TOKEN_DEL_DBL_STAR_EQUAL, MP_TOKEN_DEL_MINUS_MORE, } mp_token_kind_t; @@ -177,8 +184,6 @@ void mp_lexer_to_next(mp_lexer_t *lex); // platform specific import function; must be implemented for a specific port // TODO tidy up, rename, or put elsewhere -//mp_lexer_t *mp_import_open_file(qstr mod_name); - typedef enum { MP_IMPORT_STAT_NO_EXIST, MP_IMPORT_STAT_DIR, diff --git a/python/src/py/makeqstrdata.py b/python/src/py/makeqstrdata.py index 060ebb7fd..7799be0a9 100644 --- a/python/src/py/makeqstrdata.py +++ b/python/src/py/makeqstrdata.py @@ -279,9 +279,11 @@ def parse_input_headers(infiles): # get the qstr value qstr = match.group(1) - # special case to specify control characters + # special cases to specify control characters if qstr == '\\n': qstr = '\n' + elif qstr == '\\r\\n': + qstr = '\r\n' # work out the corresponding qstr name ident = qstr_escape(qstr) @@ -337,7 +339,7 @@ def print_qstr_data(qcfgs, qstrs): print('') # add NULL qstr with no hash or data - print('QDEF(MP_QSTR_NULL, (const byte*)"%s%s" "")' % ('\\x00' * cfg_bytes_hash, '\\x00' * cfg_bytes_len)) + print('QDEF(MP_QSTRnull, (const byte*)"%s%s" "")' % ('\\x00' * cfg_bytes_hash, '\\x00' * cfg_bytes_len)) # go through each qstr and print it out for order, ident, qstr in sorted(qstrs.values(), key=lambda x: x[0]): diff --git a/python/src/py/modarray.c b/python/src/py/modarray.c index de84fc858..b459a8375 100644 --- a/python/src/py/modarray.c +++ b/python/src/py/modarray.c @@ -29,17 +29,17 @@ #if MICROPY_PY_ARRAY STATIC const mp_rom_map_elem_t mp_module_array_globals_table[] = { - { MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_array) }, + { MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_uarray) }, { MP_ROM_QSTR(MP_QSTR_array), MP_ROM_PTR(&mp_type_array) }, }; STATIC MP_DEFINE_CONST_DICT(mp_module_array_globals, mp_module_array_globals_table); -const mp_obj_module_t mp_module_array = { +const mp_obj_module_t mp_module_uarray = { .base = { &mp_type_module }, .globals = (mp_obj_dict_t*)&mp_module_array_globals, }; -MP_REGISTER_MODULE(MP_QSTR_array, mp_module_array, MICROPY_PY_ARRAY); +MP_REGISTER_MODULE(MP_QSTR_uarray, mp_module_uarray, MICROPY_PY_ARRAY); #endif diff --git a/python/src/py/modio.c b/python/src/py/modio.c index 0f4a4326c..94ef5d42e 100644 --- a/python/src/py/modio.c +++ b/python/src/py/modio.c @@ -208,15 +208,8 @@ STATIC mp_obj_t resource_stream(mp_obj_t package_in, mp_obj_t path_in) { // package parameter being None, the path_in is interpreted as a // raw path. if (package_in != mp_const_none) { - mp_obj_t args[5]; - args[0] = package_in; - args[1] = mp_const_none; // TODO should be globals - args[2] = mp_const_none; // TODO should be locals - args[3] = mp_const_true; // Pass sentinel "non empty" value to force returning of leaf module - args[4] = MP_OBJ_NEW_SMALL_INT(0); - - // TODO lookup __import__ and call that instead of going straight to builtin implementation - mp_obj_t pkg = mp_builtin___import__(5, args); + // Pass "True" as sentinel value in fromlist to force returning of leaf module + mp_obj_t pkg = mp_import_name(mp_obj_str_get_qstr(package_in), mp_const_true, MP_OBJ_NEW_SMALL_INT(0)); mp_obj_t dest[2]; mp_load_method_maybe(pkg, MP_QSTR___path__, dest); diff --git a/python/src/py/modmath.c b/python/src/py/modmath.c index d106f240c..35bb44bea 100644 --- a/python/src/py/modmath.c +++ b/python/src/py/modmath.c @@ -171,6 +171,42 @@ MATH_FUN_1(lgamma, lgamma) #endif //TODO: fsum +#if MICROPY_PY_MATH_ISCLOSE +STATIC mp_obj_t mp_math_isclose(size_t n_args, const mp_obj_t *pos_args, mp_map_t *kw_args) { + enum { ARG_a, ARG_b, ARG_rel_tol, ARG_abs_tol }; + static const mp_arg_t allowed_args[] = { + {MP_QSTR_, MP_ARG_REQUIRED | MP_ARG_OBJ}, + {MP_QSTR_, MP_ARG_REQUIRED | MP_ARG_OBJ}, + {MP_QSTR_rel_tol, MP_ARG_KW_ONLY | MP_ARG_OBJ, {.u_obj = MP_OBJ_NULL}}, + {MP_QSTR_abs_tol, MP_ARG_KW_ONLY | MP_ARG_OBJ, {.u_obj = MP_OBJ_NEW_SMALL_INT(0)}}, + }; + mp_arg_val_t args[MP_ARRAY_SIZE(allowed_args)]; + mp_arg_parse_all(n_args, pos_args, kw_args, MP_ARRAY_SIZE(allowed_args), allowed_args, args); + const mp_float_t a = mp_obj_get_float(args[ARG_a].u_obj); + const mp_float_t b = mp_obj_get_float(args[ARG_b].u_obj); + const mp_float_t rel_tol = args[ARG_rel_tol].u_obj == MP_OBJ_NULL + ? (mp_float_t)1e-9 : mp_obj_get_float(args[ARG_rel_tol].u_obj); + const mp_float_t abs_tol = mp_obj_get_float(args[ARG_abs_tol].u_obj); + if (rel_tol < (mp_float_t)0.0 || abs_tol < (mp_float_t)0.0) { + math_error(); + } + if (a == b) { + return mp_const_true; + } + const mp_float_t difference = MICROPY_FLOAT_C_FUN(fabs)(a - b); + if (isinf(difference)) { // Either a or b is inf + return mp_const_false; + } + if ((difference <= abs_tol) || + (difference <= MICROPY_FLOAT_C_FUN(fabs)(rel_tol * a)) || + (difference <= MICROPY_FLOAT_C_FUN(fabs)(rel_tol * b))) { + return mp_const_true; + } + return mp_const_false; +} +MP_DEFINE_CONST_FUN_OBJ_KW(mp_math_isclose_obj, 2, mp_math_isclose); +#endif + // Function that takes a variable number of arguments // log(x[, base]) @@ -335,6 +371,9 @@ STATIC const mp_rom_map_elem_t mp_module_math_globals_table[] = { { MP_ROM_QSTR(MP_QSTR_isfinite), MP_ROM_PTR(&mp_math_isfinite_obj) }, { MP_ROM_QSTR(MP_QSTR_isinf), MP_ROM_PTR(&mp_math_isinf_obj) }, { MP_ROM_QSTR(MP_QSTR_isnan), MP_ROM_PTR(&mp_math_isnan_obj) }, + #if MICROPY_PY_MATH_ISCLOSE + { MP_ROM_QSTR(MP_QSTR_isclose), MP_ROM_PTR(&mp_math_isclose_obj) }, + #endif { MP_ROM_QSTR(MP_QSTR_trunc), MP_ROM_PTR(&mp_math_trunc_obj) }, { MP_ROM_QSTR(MP_QSTR_radians), MP_ROM_PTR(&mp_math_radians_obj) }, { MP_ROM_QSTR(MP_QSTR_degrees), MP_ROM_PTR(&mp_math_degrees_obj) }, diff --git a/python/src/py/modmicropython.c b/python/src/py/modmicropython.c index 864d1a5c5..8d36697f1 100644 --- a/python/src/py/modmicropython.c +++ b/python/src/py/modmicropython.c @@ -150,7 +150,7 @@ STATIC MP_DEFINE_CONST_FUN_OBJ_1(mp_micropython_kbd_intr_obj, mp_micropython_kbd #if MICROPY_ENABLE_SCHEDULER STATIC mp_obj_t mp_micropython_schedule(mp_obj_t function, mp_obj_t arg) { if (!mp_sched_schedule(function, arg)) { - mp_raise_msg(&mp_type_RuntimeError, "schedule stack full"); + mp_raise_msg(&mp_type_RuntimeError, "schedule queue full"); } return mp_const_none; } diff --git a/python/src/py/modstruct.c b/python/src/py/modstruct.c index 8617a8e0d..36af4260e 100644 --- a/python/src/py/modstruct.c +++ b/python/src/py/modstruct.c @@ -97,7 +97,7 @@ STATIC size_t calc_size_items(const char *fmt, size_t *total_sz) { size += cnt; } else { total_cnt += cnt; - mp_uint_t align; + size_t align; size_t sz = mp_binary_get_size(fmt_type, *fmt, &align); while (cnt--) { // Apply alignment @@ -146,6 +146,7 @@ STATIC mp_obj_t struct_unpack_from(size_t n_args, const mp_obj_t *args) { } p += offset; } + byte *p_base = p; // Check that the input buffer is big enough to unpack all the values if (p + total_sz > end_p) { @@ -164,7 +165,7 @@ STATIC mp_obj_t struct_unpack_from(size_t n_args, const mp_obj_t *args) { res->items[i++] = item; } else { while (cnt--) { - item = mp_binary_get_val(fmt_type, *fmt, &p); + item = mp_binary_get_val(fmt_type, *fmt, p_base, &p); res->items[i++] = item; } } @@ -179,6 +180,7 @@ STATIC void struct_pack_into_internal(mp_obj_t fmt_in, byte *p, size_t n_args, c const char *fmt = mp_obj_str_get_str(fmt_in); char fmt_type = get_fmt_type(&fmt); + byte *p_base = p; size_t i; for (i = 0; i < n_args;) { mp_uint_t cnt = 1; @@ -203,7 +205,7 @@ STATIC void struct_pack_into_internal(mp_obj_t fmt_in, byte *p, size_t n_args, c } else { // If we run out of args then we just finish; CPython would raise struct.error while (cnt-- && i < n_args) { - mp_binary_set_val(fmt_type, *fmt, args[i++], &p); + mp_binary_set_val(fmt_type, *fmt, args[i++], p_base, &p); } } fmt++; diff --git a/python/src/py/modsys.c b/python/src/py/modsys.c index 343451732..29fac7c31 100644 --- a/python/src/py/modsys.c +++ b/python/src/py/modsys.c @@ -34,6 +34,12 @@ #include "py/stream.h" #include "py/smallint.h" #include "py/runtime.h" +#include "py/persistentcode.h" + +#if MICROPY_PY_SYS_SETTRACE +#include "py/objmodule.h" +#include "py/profile.h" +#endif #if MICROPY_PY_SYS @@ -61,22 +67,36 @@ STATIC const mp_obj_tuple_t mp_sys_implementation_version_info_obj = { 3, { I(MICROPY_VERSION_MAJOR), I(MICROPY_VERSION_MINOR), I(MICROPY_VERSION_MICRO) } }; +#if MICROPY_PERSISTENT_CODE_LOAD +#define SYS_IMPLEMENTATION_ELEMS \ + MP_ROM_QSTR(MP_QSTR_micropython), \ + MP_ROM_PTR(&mp_sys_implementation_version_info_obj), \ + MP_ROM_INT(MPY_FILE_HEADER_INT) +#else +#define SYS_IMPLEMENTATION_ELEMS \ + MP_ROM_QSTR(MP_QSTR_micropython), \ + MP_ROM_PTR(&mp_sys_implementation_version_info_obj) +#endif #if MICROPY_PY_ATTRTUPLE -STATIC const qstr impl_fields[] = { MP_QSTR_name, MP_QSTR_version }; +STATIC const qstr impl_fields[] = { + MP_QSTR_name, + MP_QSTR_version, + #if MICROPY_PERSISTENT_CODE_LOAD + MP_QSTR_mpy, + #endif +}; STATIC MP_DEFINE_ATTRTUPLE( mp_sys_implementation_obj, impl_fields, - 2, - MP_ROM_QSTR(MP_QSTR_micropython), - MP_ROM_PTR(&mp_sys_implementation_version_info_obj) + 2 + MICROPY_PERSISTENT_CODE_LOAD, + SYS_IMPLEMENTATION_ELEMS ); #else STATIC const mp_rom_obj_tuple_t mp_sys_implementation_obj = { {&mp_type_tuple}, - 2, + 2 + MICROPY_PERSISTENT_CODE_LOAD, { - MP_ROM_QSTR(MP_QSTR_micropython), - MP_ROM_PTR(&mp_sys_implementation_version_info_obj), + SYS_IMPLEMENTATION_ELEMS } }; #endif @@ -146,6 +166,24 @@ STATIC mp_obj_t mp_sys_getsizeof(mp_obj_t obj) { STATIC MP_DEFINE_CONST_FUN_OBJ_1(mp_sys_getsizeof_obj, mp_sys_getsizeof); #endif +#if MICROPY_PY_SYS_ATEXIT +// atexit(callback): Callback is called when sys.exit is called. +STATIC mp_obj_t mp_sys_atexit(mp_obj_t obj) { + mp_obj_t old = MP_STATE_VM(sys_exitfunc); + MP_STATE_VM(sys_exitfunc) = obj; + return old; +} +STATIC MP_DEFINE_CONST_FUN_OBJ_1(mp_sys_atexit_obj, mp_sys_atexit); +#endif + +#if MICROPY_PY_SYS_SETTRACE +// settrace(tracefunc): Set the system’s trace function. +STATIC mp_obj_t mp_sys_settrace(mp_obj_t obj) { + return mp_prof_settrace(obj); +} +MP_DEFINE_CONST_FUN_OBJ_1(mp_sys_settrace_obj, mp_sys_settrace); +#endif // MICROPY_PY_SYS_SETTRACE + STATIC const mp_rom_map_elem_t mp_module_sys_globals_table[] = { { MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_sys) }, @@ -180,6 +218,10 @@ STATIC const mp_rom_map_elem_t mp_module_sys_globals_table[] = { { MP_ROM_QSTR(MP_QSTR_exit), MP_ROM_PTR(&mp_sys_exit_obj) }, #endif + #if MICROPY_PY_SYS_SETTRACE + { MP_ROM_QSTR(MP_QSTR_settrace), MP_ROM_PTR(&mp_sys_settrace_obj) }, + #endif + #if MICROPY_PY_SYS_STDFILES { MP_ROM_QSTR(MP_QSTR_stdin), MP_ROM_PTR(&mp_sys_stdin_obj) }, { MP_ROM_QSTR(MP_QSTR_stdout), MP_ROM_PTR(&mp_sys_stdout_obj) }, @@ -201,6 +243,9 @@ STATIC const mp_rom_map_elem_t mp_module_sys_globals_table[] = { */ { MP_ROM_QSTR(MP_QSTR_print_exception), MP_ROM_PTR(&mp_sys_print_exception_obj) }, + #if MICROPY_PY_SYS_ATEXIT + { MP_ROM_QSTR(MP_QSTR_atexit), MP_ROM_PTR(&mp_sys_atexit_obj) }, + #endif }; STATIC MP_DEFINE_CONST_DICT(mp_module_sys_globals, mp_module_sys_globals_table); diff --git a/python/src/py/moduerrno.c b/python/src/py/moduerrno.c index de66c941b..0e0a3f008 100644 --- a/python/src/py/moduerrno.c +++ b/python/src/py/moduerrno.c @@ -104,7 +104,7 @@ qstr mp_errno_to_str(mp_obj_t errno_val) { // We have the errorcode dict so can do a lookup using the hash map mp_map_elem_t *elem = mp_map_lookup((mp_map_t*)&errorcode_dict.map, errno_val, MP_MAP_LOOKUP); if (elem == NULL) { - return MP_QSTR_NULL; + return MP_QSTRnull; } else { return MP_OBJ_QSTR_VALUE(elem->value); } @@ -115,7 +115,7 @@ qstr mp_errno_to_str(mp_obj_t errno_val) { return MP_OBJ_QSTR_VALUE(mp_module_uerrno_globals_table[i].key); } } - return MP_QSTR_NULL; + return MP_QSTRnull; #endif } diff --git a/python/src/py/mpconfig.h b/python/src/py/mpconfig.h index 219f8de44..1e786f753 100644 --- a/python/src/py/mpconfig.h +++ b/python/src/py/mpconfig.h @@ -28,7 +28,7 @@ // Current version of MicroPython #define MICROPY_VERSION_MAJOR 1 -#define MICROPY_VERSION_MINOR 11 +#define MICROPY_VERSION_MINOR 12 #define MICROPY_VERSION_MICRO 0 // Combined version as a 32-bit number for convenience @@ -323,12 +323,23 @@ #define MICROPY_EMIT_INLINE_XTENSA (0) #endif +// Whether to emit Xtensa-Windowed native code +#ifndef MICROPY_EMIT_XTENSAWIN +#define MICROPY_EMIT_XTENSAWIN (0) +#endif + // Convenience definition for whether any native emitter is enabled -#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_X86 || MICROPY_EMIT_THUMB || MICROPY_EMIT_ARM || MICROPY_EMIT_XTENSA) +#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_X86 || MICROPY_EMIT_THUMB || MICROPY_EMIT_ARM || MICROPY_EMIT_XTENSA || MICROPY_EMIT_XTENSAWIN) + +// Select prelude-as-bytes-object for certain emitters +#define MICROPY_EMIT_NATIVE_PRELUDE_AS_BYTES_OBJ (MICROPY_EMIT_XTENSAWIN) // Convenience definition for whether any inline assembler emitter is enabled #define MICROPY_EMIT_INLINE_ASM (MICROPY_EMIT_INLINE_THUMB || MICROPY_EMIT_INLINE_XTENSA) +// Convenience definition for whether any native or inline assembler emitter is enabled +#define MICROPY_EMIT_MACHINE_CODE (MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM) + /*****************************************************************************/ /* Compiler configuration */ @@ -338,6 +349,7 @@ #endif // Whether the compiler is dynamically configurable (ie at runtime) +// This will disable the ability to execute native/viper code #ifndef MICROPY_DYNAMIC_COMPILER #define MICROPY_DYNAMIC_COMPILER (0) #endif @@ -1075,6 +1087,11 @@ typedef double mp_float_t; #define MICROPY_PY_MATH_FACTORIAL (0) #endif +// Whether to provide math.isclose function +#ifndef MICROPY_PY_MATH_ISCLOSE +#define MICROPY_PY_MATH_ISCLOSE (0) +#endif + // Whether to provide "cmath" module #ifndef MICROPY_PY_CMATH #define MICROPY_PY_CMATH (0) @@ -1157,6 +1174,16 @@ typedef double mp_float_t; #define MICROPY_PY_SYS_EXIT (1) #endif +// Whether to provide "sys.atexit" function (MicroPython extension) +#ifndef MICROPY_PY_SYS_ATEXIT +#define MICROPY_PY_SYS_ATEXIT (0) +#endif + +// Whether to provide "sys.settrace" function +#ifndef MICROPY_PY_SYS_SETTRACE +#define MICROPY_PY_SYS_SETTRACE (0) +#endif + // Whether to provide "sys.getsizeof" function #ifndef MICROPY_PY_SYS_GETSIZEOF #define MICROPY_PY_SYS_GETSIZEOF (0) @@ -1245,6 +1272,10 @@ typedef double mp_float_t; #define MICROPY_PY_URE (0) #endif +#ifndef MICROPY_PY_URE_DEBUG +#define MICROPY_PY_URE_DEBUG (0) +#endif + #ifndef MICROPY_PY_URE_MATCH_GROUPS #define MICROPY_PY_URE_MATCH_GROUPS (0) #endif @@ -1361,11 +1392,6 @@ typedef double mp_float_t; #define MICROPY_PORT_BUILTIN_MODULES #endif -// Any module weak links - see objmodule.c:mp_builtin_module_weak_links_table. -#ifndef MICROPY_PORT_BUILTIN_MODULE_WEAK_LINKS -#define MICROPY_PORT_BUILTIN_MODULE_WEAK_LINKS -#endif - // Additional constant definitions for the compiler - see compile.c:mp_constants_table. #ifndef MICROPY_PORT_CONSTANTS #define MICROPY_PORT_CONSTANTS @@ -1513,6 +1539,15 @@ typedef double mp_float_t; #define MP_UNLIKELY(x) __builtin_expect((x), 0) #endif +// To annotate that code is unreachable +#ifndef MP_UNREACHABLE +#if defined(__GNUC__) +#define MP_UNREACHABLE __builtin_unreachable(); +#else +#define MP_UNREACHABLE for (;;); +#endif +#endif + #ifndef MP_HTOBE16 #if MP_ENDIANNESS_LITTLE # define MP_HTOBE16(x) ((uint16_t)( (((x) & 0xff) << 8) | (((x) >> 8) & 0xff) )) @@ -1544,4 +1579,14 @@ typedef double mp_float_t; # define MP_WARN_CAT(x) (NULL) #endif +// Feature dependency check. +#if MICROPY_PY_SYS_SETTRACE +#if !MICROPY_PERSISTENT_CODE_SAVE +#error "MICROPY_PY_SYS_SETTRACE requires MICROPY_PERSISTENT_CODE_SAVE to be enabled" +#endif +#if MICROPY_COMP_CONST +#error "MICROPY_PY_SYS_SETTRACE requires MICROPY_COMP_CONST to be disabled" +#endif +#endif + #endif // MICROPY_INCLUDED_PY_MPCONFIG_H diff --git a/python/src/py/mphal.h b/python/src/py/mphal.h index 92de01d08..66d80705a 100644 --- a/python/src/py/mphal.h +++ b/python/src/py/mphal.h @@ -34,6 +34,10 @@ #include #endif +#ifndef mp_hal_stdio_poll +uintptr_t mp_hal_stdio_poll(uintptr_t poll_flags); +#endif + #ifndef mp_hal_stdin_rx_chr int mp_hal_stdin_rx_chr(void); #endif diff --git a/python/src/py/mpstate.h b/python/src/py/mpstate.h index a9c2b32d6..ab7d8c51b 100644 --- a/python/src/py/mpstate.h +++ b/python/src/py/mpstate.h @@ -47,6 +47,7 @@ typedef struct mp_dynamic_compiler_t { bool opt_cache_map_lookup_in_bytecode; bool py_builtins_str_unicode; uint8_t native_arch; + uint8_t nlr_buf_num_regs; } mp_dynamic_compiler_t; extern mp_dynamic_compiler_t mp_dynamic_compiler; #endif @@ -141,7 +142,7 @@ typedef struct _mp_state_vm_t { volatile mp_obj_t mp_pending_exception; #if MICROPY_ENABLE_SCHEDULER - mp_sched_item_t sched_stack[MICROPY_SCHEDULER_DEPTH]; + mp_sched_item_t sched_queue[MICROPY_SCHEDULER_DEPTH]; #endif // current exception being handled, for sys.exc_info() @@ -149,6 +150,11 @@ typedef struct _mp_state_vm_t { mp_obj_base_t *cur_exception; #endif + #if MICROPY_PY_SYS_ATEXIT + // exposed through sys.atexit function + mp_obj_t sys_exitfunc; + #endif + // dictionary for the __main__ module mp_obj_dict_t dict_main; @@ -183,6 +189,10 @@ typedef struct _mp_state_vm_t { struct _mp_vfs_mount_t *vfs_mount_table; #endif + #if MICROPY_PY_BLUETOOTH + mp_obj_t bluetooth; + #endif + // // END ROOT POINTER SECTION //////////////////////////////////////////////////////////// @@ -200,6 +210,9 @@ typedef struct _mp_state_vm_t { #if MICROPY_ENABLE_COMPILER mp_uint_t mp_optimise_value; + #if MICROPY_EMIT_NATIVE + uint8_t default_emit_opt; // one of MP_EMIT_OPT_xxx + #endif #endif // size of the emergency exception buf, if it's dynamically allocated @@ -245,6 +258,12 @@ typedef struct _mp_state_thread_t { mp_obj_dict_t *dict_globals; nlr_buf_t *nlr_top; + + #if MICROPY_PY_SYS_SETTRACE + mp_obj_t prof_trace_callback; + bool prof_callback_is_executing; + struct _mp_code_state_t *current_code_state; + #endif } mp_state_thread_t; // This structure combines the above 3 structures. diff --git a/python/src/py/nativeglue.c b/python/src/py/nativeglue.c index c810f31e6..1a7f92f94 100644 --- a/python/src/py/nativeglue.c +++ b/python/src/py/nativeglue.c @@ -24,14 +24,15 @@ * THE SOFTWARE. */ +#include #include #include #include #include "py/runtime.h" #include "py/smallint.h" -#include "py/emitglue.h" -#include "py/bc.h" +#include "py/nativeglue.h" +#include "py/gc.h" #if MICROPY_DEBUG_VERBOSE // print debugging info #define DEBUG_printf DEBUG_printf @@ -65,7 +66,7 @@ mp_uint_t mp_native_from_obj(mp_obj_t obj, mp_uint_t type) { case MP_NATIVE_TYPE_UINT: return mp_obj_get_int_truncated(obj); default: { // cast obj to a pointer mp_buffer_info_t bufinfo; - if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_RW)) { + if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_READ)) { return (mp_uint_t)bufinfo.buf; } else { // assume obj is an integer that represents an address @@ -77,7 +78,7 @@ mp_uint_t mp_native_from_obj(mp_obj_t obj, mp_uint_t type) { #endif -#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_ASM +#if MICROPY_EMIT_MACHINE_CODE // convert a native value to a MicroPython object based on type mp_obj_t mp_native_to_obj(mp_uint_t val, mp_uint_t type) { @@ -95,9 +96,32 @@ mp_obj_t mp_native_to_obj(mp_uint_t val, mp_uint_t type) { #endif -#if MICROPY_EMIT_NATIVE +#if MICROPY_EMIT_NATIVE && !MICROPY_DYNAMIC_COMPILER -mp_obj_dict_t *mp_native_swap_globals(mp_obj_dict_t *new_globals) { +#if !MICROPY_PY_BUILTINS_SET +mp_obj_t mp_obj_new_set(size_t n_args, mp_obj_t *items) { + (void)n_args; + (void)items; + mp_raise_msg(&mp_type_RuntimeError, "set unsupported"); +} + +void mp_obj_set_store(mp_obj_t self_in, mp_obj_t item) { + (void)self_in; + (void)item; + mp_raise_msg(&mp_type_RuntimeError, "set unsupported"); +} +#endif + +#if !MICROPY_PY_BUILTINS_SLICE +mp_obj_t mp_obj_new_slice(mp_obj_t ostart, mp_obj_t ostop, mp_obj_t ostep) { + (void)ostart; + (void)ostop; + (void)ostep; + mp_raise_msg(&mp_type_RuntimeError, "slice unsupported"); +} +#endif + +STATIC mp_obj_dict_t *mp_native_swap_globals(mp_obj_dict_t *new_globals) { if (new_globals == NULL) { // Globals were the originally the same so don't restore them return NULL; @@ -113,13 +137,13 @@ mp_obj_dict_t *mp_native_swap_globals(mp_obj_dict_t *new_globals) { // wrapper that accepts n_args and n_kw in one argument // (native emitter can only pass at most 3 arguments to a function) -mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, size_t n_args_kw, const mp_obj_t *args) { +STATIC mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, size_t n_args_kw, const mp_obj_t *args) { return mp_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args); } // wrapper that makes raise obj and raises it // END_FINALLY opcode requires that we don't raise if o==None -void mp_native_raise(mp_obj_t o) { +STATIC void mp_native_raise(mp_obj_t o) { if (o != MP_OBJ_NULL && o != mp_const_none) { nlr_raise(mp_make_raise_obj(o)); } @@ -187,8 +211,50 @@ STATIC bool mp_native_yield_from(mp_obj_t gen, mp_obj_t send_value, mp_obj_t *re return false; } +#if MICROPY_PY_BUILTINS_FLOAT + +STATIC mp_obj_t mp_obj_new_float_from_f(float f) { + return mp_obj_new_float((mp_float_t)f); +} + +STATIC mp_obj_t mp_obj_new_float_from_d(double d) { + return mp_obj_new_float((mp_float_t)d); +} + +STATIC float mp_obj_get_float_to_f(mp_obj_t o) { + return (float)mp_obj_get_float(o); +} + +STATIC double mp_obj_get_float_to_d(mp_obj_t o) { + return (double)mp_obj_get_float(o); +} + +#else + +STATIC mp_obj_t mp_obj_new_float_from_f(float f) { + (void)f; + mp_raise_msg(&mp_type_RuntimeError, "float unsupported"); +} + +STATIC mp_obj_t mp_obj_new_float_from_d(double d) { + (void)d; + mp_raise_msg(&mp_type_RuntimeError, "float unsupported"); +} + +STATIC float mp_obj_get_float_to_f(mp_obj_t o) { + (void)o; + mp_raise_msg(&mp_type_RuntimeError, "float unsupported"); +} + +STATIC double mp_obj_get_float_to_d(mp_obj_t o) { + (void)o; + mp_raise_msg(&mp_type_RuntimeError, "float unsupported"); +} + +#endif + // these must correspond to the respective enum in runtime0.h -const void *const mp_fun_table[MP_F_NUMBER_OF] = { +const mp_fun_table_t mp_fun_table = { &mp_const_none_obj, &mp_const_false_obj, &mp_const_true_obj, @@ -210,45 +276,74 @@ const void *const mp_fun_table[MP_F_NUMBER_OF] = { mp_binary_op, mp_obj_new_tuple, mp_obj_new_list, - mp_obj_list_append, mp_obj_new_dict, - mp_obj_dict_store, -#if MICROPY_PY_BUILTINS_SET - mp_obj_set_store, mp_obj_new_set, -#endif + mp_obj_set_store, + mp_obj_list_append, + mp_obj_dict_store, mp_make_function_from_raw_code, mp_native_call_function_n_kw, mp_call_method_n_kw, mp_call_method_n_kw_var, mp_native_getiter, mp_native_iternext, + #if MICROPY_NLR_SETJMP + nlr_push_tail, + #else nlr_push, + #endif nlr_pop, mp_native_raise, mp_import_name, mp_import_from, mp_import_all, -#if MICROPY_PY_BUILTINS_SLICE mp_obj_new_slice, -#endif mp_unpack_sequence, mp_unpack_ex, mp_delete_name, mp_delete_global, - mp_obj_new_cell, mp_make_closure_from_raw_code, mp_arg_check_num_sig, mp_setup_code_state, mp_small_int_floor_divide, mp_small_int_modulo, mp_native_yield_from, + #if MICROPY_NLR_SETJMP + setjmp, + #else + NULL, + #endif + // Additional entries for dynamic runtime, starts at index 50 + memset, + memmove, + gc_realloc, + mp_printf, + mp_vprintf, + mp_raise_msg, + mp_obj_get_type, + mp_obj_new_str, + mp_obj_new_bytes, + mp_obj_new_bytearray_by_ref, + mp_obj_new_float_from_f, + mp_obj_new_float_from_d, + mp_obj_get_float_to_f, + mp_obj_get_float_to_d, + mp_get_buffer_raise, + mp_get_stream_raise, + &mp_plat_print, + &mp_type_type, + &mp_type_str, + &mp_type_list, + &mp_type_dict, + &mp_type_fun_builtin_0, + &mp_type_fun_builtin_1, + &mp_type_fun_builtin_2, + &mp_type_fun_builtin_3, + &mp_type_fun_builtin_var, + &mp_stream_read_obj, + &mp_stream_readinto_obj, + &mp_stream_unbuffered_readline_obj, + &mp_stream_write_obj, }; -/* -void mp_f_vector(mp_fun_kind_t fun_kind) { - (mp_f_table[fun_kind])(); -} -*/ - #endif // MICROPY_EMIT_NATIVE diff --git a/python/src/py/nativeglue.h b/python/src/py/nativeglue.h new file mode 100644 index 000000000..021e7a8ec --- /dev/null +++ b/python/src/py/nativeglue.h @@ -0,0 +1,177 @@ +/* + * This file is part of the MicroPython project, http://micropython.org/ + * + * The MIT License (MIT) + * + * Copyright (c) 2013-2019 Damien P. George + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ +#ifndef MICROPY_INCLUDED_PY_NATIVEGLUE_H +#define MICROPY_INCLUDED_PY_NATIVEGLUE_H + +#include +#include "py/obj.h" +#include "py/persistentcode.h" +#include "py/stream.h" + +typedef enum { + MP_F_CONST_NONE_OBJ = 0, + MP_F_CONST_FALSE_OBJ, + MP_F_CONST_TRUE_OBJ, + MP_F_CONVERT_OBJ_TO_NATIVE, + MP_F_CONVERT_NATIVE_TO_OBJ, + MP_F_NATIVE_SWAP_GLOBALS, + MP_F_LOAD_NAME, + MP_F_LOAD_GLOBAL, + MP_F_LOAD_BUILD_CLASS, + MP_F_LOAD_ATTR, + MP_F_LOAD_METHOD, + MP_F_LOAD_SUPER_METHOD, + MP_F_STORE_NAME, + MP_F_STORE_GLOBAL, + MP_F_STORE_ATTR, + MP_F_OBJ_SUBSCR, + MP_F_OBJ_IS_TRUE, + MP_F_UNARY_OP, + MP_F_BINARY_OP, + MP_F_BUILD_TUPLE, + MP_F_BUILD_LIST, + MP_F_BUILD_MAP, + MP_F_BUILD_SET, + MP_F_STORE_SET, + MP_F_LIST_APPEND, + MP_F_STORE_MAP, + MP_F_MAKE_FUNCTION_FROM_RAW_CODE, + MP_F_NATIVE_CALL_FUNCTION_N_KW, + MP_F_CALL_METHOD_N_KW, + MP_F_CALL_METHOD_N_KW_VAR, + MP_F_NATIVE_GETITER, + MP_F_NATIVE_ITERNEXT, + MP_F_NLR_PUSH, + MP_F_NLR_POP, + MP_F_NATIVE_RAISE, + MP_F_IMPORT_NAME, + MP_F_IMPORT_FROM, + MP_F_IMPORT_ALL, + MP_F_NEW_SLICE, + MP_F_UNPACK_SEQUENCE, + MP_F_UNPACK_EX, + MP_F_DELETE_NAME, + MP_F_DELETE_GLOBAL, + MP_F_MAKE_CLOSURE_FROM_RAW_CODE, + MP_F_ARG_CHECK_NUM_SIG, + MP_F_SETUP_CODE_STATE, + MP_F_SMALL_INT_FLOOR_DIVIDE, + MP_F_SMALL_INT_MODULO, + MP_F_NATIVE_YIELD_FROM, + MP_F_SETJMP, + MP_F_NUMBER_OF, +} mp_fun_kind_t; + +typedef struct _mp_fun_table_t { + mp_const_obj_t const_none; + mp_const_obj_t const_false; + mp_const_obj_t const_true; + mp_uint_t (*native_from_obj)(mp_obj_t obj, mp_uint_t type); + mp_obj_t (*native_to_obj)(mp_uint_t val, mp_uint_t type); + mp_obj_dict_t *(*swap_globals)(mp_obj_dict_t *new_globals); + mp_obj_t (*load_name)(qstr qst); + mp_obj_t (*load_global)(qstr qst); + mp_obj_t (*load_build_class)(void); + mp_obj_t (*load_attr)(mp_obj_t base, qstr attr); + void (*load_method)(mp_obj_t base, qstr attr, mp_obj_t *dest); + void (*load_super_method)(qstr attr, mp_obj_t *dest); + void (*store_name)(qstr qst, mp_obj_t obj); + void (*store_global)(qstr qst, mp_obj_t obj); + void (*store_attr)(mp_obj_t base, qstr attr, mp_obj_t val); + mp_obj_t (*obj_subscr)(mp_obj_t base, mp_obj_t index, mp_obj_t val); + bool (*obj_is_true)(mp_obj_t arg); + mp_obj_t (*unary_op)(mp_unary_op_t op, mp_obj_t arg); + mp_obj_t (*binary_op)(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t rhs); + mp_obj_t (*new_tuple)(size_t n, const mp_obj_t *items); + mp_obj_t (*new_list)(size_t n, mp_obj_t *items); + mp_obj_t (*new_dict)(size_t n_args); + mp_obj_t (*new_set)(size_t n_args, mp_obj_t *items); + void (*set_store)(mp_obj_t self_in, mp_obj_t item); + mp_obj_t (*list_append)(mp_obj_t self_in, mp_obj_t arg); + mp_obj_t (*dict_store)(mp_obj_t self_in, mp_obj_t key, mp_obj_t value); + mp_obj_t (*make_function_from_raw_code)(const mp_raw_code_t *rc, mp_obj_t def_args, mp_obj_t def_kw_args); + mp_obj_t (*call_function_n_kw)(mp_obj_t fun_in, size_t n_args_kw, const mp_obj_t *args); + mp_obj_t (*call_method_n_kw)(size_t n_args, size_t n_kw, const mp_obj_t *args); + mp_obj_t (*call_method_n_kw_var)(bool have_self, size_t n_args_n_kw, const mp_obj_t *args); + mp_obj_t (*getiter)(mp_obj_t obj, mp_obj_iter_buf_t *iter); + mp_obj_t (*iternext)(mp_obj_iter_buf_t *iter); + unsigned int (*nlr_push)(nlr_buf_t *); + void (*nlr_pop)(void); + void (*raise)(mp_obj_t o); + mp_obj_t (*import_name)(qstr name, mp_obj_t fromlist, mp_obj_t level); + mp_obj_t (*import_from)(mp_obj_t module, qstr name); + void (*import_all)(mp_obj_t module); + mp_obj_t (*new_slice)(mp_obj_t start, mp_obj_t stop, mp_obj_t step); + void (*unpack_sequence)(mp_obj_t seq, size_t num, mp_obj_t *items); + void (*unpack_ex)(mp_obj_t seq, size_t num, mp_obj_t *items); + void (*delete_name)(qstr qst); + void (*delete_global)(qstr qst); + mp_obj_t (*make_closure_from_raw_code)(const mp_raw_code_t *rc, mp_uint_t n_closed_over, const mp_obj_t *args); + void (*arg_check_num_sig)(size_t n_args, size_t n_kw, uint32_t sig); + void (*setup_code_state)(mp_code_state_t *code_state, size_t n_args, size_t n_kw, const mp_obj_t *args); + mp_int_t (*small_int_floor_divide)(mp_int_t num, mp_int_t denom); + mp_int_t (*small_int_modulo)(mp_int_t dividend, mp_int_t divisor); + bool (*yield_from)(mp_obj_t gen, mp_obj_t send_value, mp_obj_t *ret_value); + void *setjmp; + // Additional entries for dynamic runtime, starts at index 50 + void *(*memset_)(void *s, int c, size_t n); + void *(*memmove_)(void *dest, const void *src, size_t n); + void *(*realloc_)(void *ptr, size_t n_bytes, bool allow_move); + int (*printf_)(const mp_print_t *print, const char *fmt, ...); + int (*vprintf_)(const mp_print_t *print, const char *fmt, va_list args); + #if defined(__GNUC__) + NORETURN // Only certain compilers support no-return attributes in function pointer declarations + #endif + void (*raise_msg)(const mp_obj_type_t *exc_type, const char *msg); + mp_obj_type_t *(*obj_get_type)(mp_const_obj_t o_in); + mp_obj_t (*obj_new_str)(const char* data, size_t len); + mp_obj_t (*obj_new_bytes)(const byte* data, size_t len); + mp_obj_t (*obj_new_bytearray_by_ref)(size_t n, void *items); + mp_obj_t (*obj_new_float_from_f)(float f); + mp_obj_t (*obj_new_float_from_d)(double d); + float (*obj_get_float_to_f)(mp_obj_t o); + double (*obj_get_float_to_d)(mp_obj_t o); + void (*get_buffer_raise)(mp_obj_t obj, mp_buffer_info_t *bufinfo, mp_uint_t flags); + const mp_stream_p_t *(*get_stream_raise)(mp_obj_t self_in, int flags); + const mp_print_t *plat_print; + const mp_obj_type_t *type_type; + const mp_obj_type_t *type_str; + const mp_obj_type_t *type_list; + const mp_obj_type_t *type_dict; + const mp_obj_type_t *type_fun_builtin_0; + const mp_obj_type_t *type_fun_builtin_1; + const mp_obj_type_t *type_fun_builtin_2; + const mp_obj_type_t *type_fun_builtin_3; + const mp_obj_type_t *type_fun_builtin_var; + const mp_obj_fun_builtin_var_t *stream_read_obj; + const mp_obj_fun_builtin_var_t *stream_readinto_obj; + const mp_obj_fun_builtin_var_t *stream_unbuffered_readline_obj; + const mp_obj_fun_builtin_var_t *stream_write_obj; +} mp_fun_table_t; + +extern const mp_fun_table_t mp_fun_table; + +#endif // MICROPY_INCLUDED_PY_NATIVEGLUE_H diff --git a/python/src/py/nlr.h b/python/src/py/nlr.h index 90595a12d..3be3eb58c 100644 --- a/python/src/py/nlr.h +++ b/python/src/py/nlr.h @@ -34,6 +34,14 @@ #include "py/mpconfig.h" +#define MICROPY_NLR_NUM_REGS_X86 (6) +#define MICROPY_NLR_NUM_REGS_X64 (8) +#define MICROPY_NLR_NUM_REGS_X64_WIN (10) +#define MICROPY_NLR_NUM_REGS_ARM_THUMB (10) +#define MICROPY_NLR_NUM_REGS_ARM_THUMB_FP (10 + 6) +#define MICROPY_NLR_NUM_REGS_XTENSA (10) +#define MICROPY_NLR_NUM_REGS_XTENSAWIN (17) + // If MICROPY_NLR_SETJMP is not enabled then auto-detect the machine arch #if !MICROPY_NLR_SETJMP // A lot of nlr-related things need different treatment on Windows @@ -44,20 +52,31 @@ #endif #if defined(__i386__) #define MICROPY_NLR_X86 (1) - #define MICROPY_NLR_NUM_REGS (6) + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_X86) #elif defined(__x86_64__) #define MICROPY_NLR_X64 (1) #if MICROPY_NLR_OS_WINDOWS - #define MICROPY_NLR_NUM_REGS (10) + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_X64_WIN) #else - #define MICROPY_NLR_NUM_REGS (8) + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_X64) #endif #elif defined(__thumb2__) || defined(__thumb__) || defined(__arm__) #define MICROPY_NLR_THUMB (1) - #define MICROPY_NLR_NUM_REGS (10) + #if defined(__SOFTFP__) + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_ARM_THUMB) + #else + // With hardware FP registers s16-s31 are callee save so in principle + // should be saved and restored by the NLR code. gcc only uses s16-s21 + // so only save/restore those as an optimisation. + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_ARM_THUMB_FP) + #endif #elif defined(__xtensa__) #define MICROPY_NLR_XTENSA (1) - #define MICROPY_NLR_NUM_REGS (10) + #define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_XTENSA) +#elif defined(__powerpc__) + #define MICROPY_NLR_POWERPC (1) + // this could be less but using 128 for safety + #define MICROPY_NLR_NUM_REGS (128) #else #define MICROPY_NLR_SETJMP (1) //#warning "No native NLR support for this arch, using setjmp implementation" diff --git a/python/src/py/nlrpowerpc.c b/python/src/py/nlrpowerpc.c new file mode 100644 index 000000000..b40382381 --- /dev/null +++ b/python/src/py/nlrpowerpc.c @@ -0,0 +1,121 @@ +/* + * This file is part of the MicroPython project, http://micropython.org/ + * + * The MIT License (MIT) + * + * Copyright (c) 2019, Michael Neuling, IBM Corporation. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#include "py/mpstate.h" + +#if MICROPY_NLR_POWERPC + +#undef nlr_push + +// Saving all ABI non-vol registers here + +unsigned int nlr_push(nlr_buf_t *nlr) { + + __asm__ volatile( + "li 4, 0x4eed ; " // Store canary + "std 4, 0x00(%0) ;" + "std 0, 0x08(%0) ;" + "std 1, 0x10(%0) ;" + "std 2, 0x18(%0) ;" + "std 14, 0x20(%0) ;" + "std 15, 0x28(%0) ;" + "std 16, 0x30(%0) ;" + "std 17, 0x38(%0) ;" + "std 18, 0x40(%0) ;" + "std 19, 0x48(%0) ;" + "std 20, 0x50(%0) ;" + "std 21, 0x58(%0) ;" + "std 22, 0x60(%0) ;" + "std 23, 0x68(%0) ;" + "std 24, 0x70(%0) ;" + "std 25, 0x78(%0) ;" + "std 26, 0x80(%0) ;" + "std 27, 0x88(%0) ;" + "std 28, 0x90(%0) ;" + "std 29, 0x98(%0) ;" + "std 30, 0xA0(%0) ;" + "std 31, 0xA8(%0) ;" + + "mfcr 4 ; " + "std 4, 0xB0(%0) ;" + "mflr 4 ;" + "std 4, 0xB8(%0) ;" + "li 4, nlr_push_tail@l ;" + "oris 4, 4, nlr_push_tail@h ;" + "mtctr 4 ;" + "mr 3, %1 ; " + "bctr ;" + : + : "r"(&nlr->regs), "r"(nlr) + : + ); + + return 0; +} + +NORETURN void nlr_jump(void *val) { + MP_NLR_JUMP_HEAD(val, top) + + __asm__ volatile( + "ld 3, 0x0(%0) ;" + "cmpdi 3, 0x4eed ; " // Check canary + "bne . ; " + "ld 0, 0x08(%0) ;" + "ld 1, 0x10(%0) ;" + "ld 2, 0x18(%0) ;" + "ld 14, 0x20(%0) ;" + "ld 15, 0x28(%0) ;" + "ld 16, 0x30(%0) ;" + "ld 17, 0x38(%0) ;" + "ld 18, 0x40(%0) ;" + "ld 19, 0x48(%0) ;" + "ld 20, 0x50(%0) ;" + "ld 21, 0x58(%0) ;" + "ld 22, 0x60(%0) ;" + "ld 23, 0x68(%0) ;" + "ld 24, 0x70(%0) ;" + "ld 25, 0x78(%0) ;" + "ld 26, 0x80(%0) ;" + "ld 27, 0x88(%0) ;" + "ld 28, 0x90(%0) ;" + "ld 29, 0x98(%0) ;" + "ld 30, 0xA0(%0) ;" + "ld 31, 0xA8(%0) ;" + "ld 3, 0xB0(%0) ;" + "mtcr 3 ;" + "ld 3, 0xB8(%0) ;" + "mtlr 3 ; " + "li 3, 1;" + "blr ;" + : + : "r"(&top->regs) + : + ); + + MP_UNREACHABLE; +} + +#endif // MICROPY_NLR_POWERPC diff --git a/python/src/py/nlrthumb.c b/python/src/py/nlrthumb.c index 99061e62c..bc3038827 100644 --- a/python/src/py/nlrthumb.c +++ b/python/src/py/nlrthumb.c @@ -44,7 +44,7 @@ __attribute__((naked)) unsigned int nlr_push(nlr_buf_t *nlr) { "str r6, [r0, #20] \n" // store r6 into nlr_buf "str r7, [r0, #24] \n" // store r7 into nlr_buf -#if defined(__ARM_ARCH_6M__) +#if !defined(__thumb2__) "mov r1, r8 \n" "str r1, [r0, #28] \n" // store r8 into nlr_buf "mov r1, r9 \n" @@ -63,10 +63,15 @@ __attribute__((naked)) unsigned int nlr_push(nlr_buf_t *nlr) { "str r10, [r0, #36] \n" // store r10 into nlr_buf "str r11, [r0, #40] \n" // store r11 into nlr_buf "str r13, [r0, #44] \n" // store r13=sp into nlr_buf + #if MICROPY_NLR_NUM_REGS == 16 + "vstr d8, [r0, #48] \n" // store s16-s17 into nlr_buf + "vstr d9, [r0, #56] \n" // store s18-s19 into nlr_buf + "vstr d10, [r0, #64] \n" // store s20-s21 into nlr_buf + #endif "str lr, [r0, #8] \n" // store lr into nlr_buf #endif -#if defined(__ARM_ARCH_6M__) +#if !defined(__thumb2__) "ldr r1, nlr_push_tail_var \n" "bx r1 \n" // do the rest in C ".align 2 \n" @@ -97,7 +102,7 @@ NORETURN void nlr_jump(void *val) { "ldr r6, [r0, #20] \n" // load r6 from nlr_buf "ldr r7, [r0, #24] \n" // load r7 from nlr_buf -#if defined(__ARM_ARCH_6M__) +#if !defined(__thumb2__) "ldr r1, [r0, #28] \n" // load r8 from nlr_buf "mov r8, r1 \n" "ldr r1, [r0, #32] \n" // load r9 from nlr_buf @@ -116,6 +121,11 @@ NORETURN void nlr_jump(void *val) { "ldr r10, [r0, #36] \n" // load r10 from nlr_buf "ldr r11, [r0, #40] \n" // load r11 from nlr_buf "ldr r13, [r0, #44] \n" // load r13=sp from nlr_buf + #if MICROPY_NLR_NUM_REGS == 16 + "vldr d8, [r0, #48] \n" // load s16-s17 from nlr_buf + "vldr d9, [r0, #56] \n" // load s18-s19 from nlr_buf + "vldr d10, [r0, #64] \n" // load s20-s21 from nlr_buf + #endif "ldr lr, [r0, #8] \n" // load lr from nlr_buf #endif "movs r0, #1 \n" // return 1, non-local return @@ -125,11 +135,7 @@ NORETURN void nlr_jump(void *val) { : // clobbered registers ); - #if defined(__GNUC__) - __builtin_unreachable(); - #else - for (;;); // needed to silence compiler warning - #endif + MP_UNREACHABLE } #endif // MICROPY_NLR_THUMB diff --git a/python/src/py/nlrx64.c b/python/src/py/nlrx64.c index a3a1cf341..95496b380 100644 --- a/python/src/py/nlrx64.c +++ b/python/src/py/nlrx64.c @@ -108,7 +108,7 @@ NORETURN void nlr_jump(void *val) { : // clobbered registers ); - for (;;); // needed to silence compiler warning + MP_UNREACHABLE } #endif // MICROPY_NLR_X64 diff --git a/python/src/py/nlrx86.c b/python/src/py/nlrx86.c index 59b97d8ee..6195db63c 100644 --- a/python/src/py/nlrx86.c +++ b/python/src/py/nlrx86.c @@ -100,7 +100,7 @@ NORETURN void nlr_jump(void *val) { : // clobbered registers ); - for (;;); // needed to silence compiler warning + MP_UNREACHABLE } #endif // MICROPY_NLR_X86 diff --git a/python/src/py/nlrxtensa.c b/python/src/py/nlrxtensa.c index cd3dee364..895b2029e 100644 --- a/python/src/py/nlrxtensa.c +++ b/python/src/py/nlrxtensa.c @@ -77,7 +77,7 @@ NORETURN void nlr_jump(void *val) { : // clobbered registers ); - for (;;); // needed to silence compiler warning + MP_UNREACHABLE } #endif // MICROPY_NLR_XTENSA diff --git a/python/src/py/obj.c b/python/src/py/obj.c index 122f0ea62..4588d896a 100644 --- a/python/src/py/obj.c +++ b/python/src/py/obj.c @@ -93,7 +93,7 @@ void mp_obj_print_exception(const mp_print_t *print, mp_obj_t exc) { #endif // the block name can be NULL if it's unknown qstr block = values[i + 2]; - if (block == MP_QSTR_NULL) { + if (block == MP_QSTRnull) { mp_print_str(print, "\n"); } else { mp_printf(print, ", in %q\n", block); @@ -113,7 +113,7 @@ bool mp_obj_is_true(mp_obj_t arg) { } else if (arg == mp_const_none) { return 0; } else if (mp_obj_is_small_int(arg)) { - if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) { + if (arg == MP_OBJ_NEW_SMALL_INT(0)) { return 0; } else { return 1; diff --git a/python/src/py/obj.h b/python/src/py/obj.h index a22d5f8b8..5b54892ce 100644 --- a/python/src/py/obj.h +++ b/python/src/py/obj.h @@ -691,7 +691,6 @@ mp_float_t mp_obj_get_float(mp_obj_t self_in); bool mp_obj_get_float_maybe(mp_obj_t arg, mp_float_t *value); void mp_obj_get_complex(mp_obj_t self_in, mp_float_t *real, mp_float_t *imag); #endif -//qstr mp_obj_get_qstr(mp_obj_t arg); void mp_obj_get_array(mp_obj_t o, size_t *len, mp_obj_t **items); // *items may point inside a GC block void mp_obj_get_array_fixed_n(mp_obj_t o, size_t len, mp_obj_t **items); // *items may point inside a GC block size_t mp_get_index(const mp_obj_type_t *type, size_t len, mp_obj_t index, bool is_slice); diff --git a/python/src/py/objarray.c b/python/src/py/objarray.c index 4e58d8e5d..c19617d4e 100644 --- a/python/src/py/objarray.c +++ b/python/src/py/objarray.c @@ -445,7 +445,7 @@ STATIC mp_obj_t array_subscr(mp_obj_t self_in, mp_obj_t index_in, mp_obj_t value if (len_adj > o->free) { // TODO: alloc policy; at the moment we go conservative o->items = m_renew(byte, o->items, (o->len + o->free) * item_sz, (o->len + len_adj) * item_sz); - o->free = 0; + o->free = len_adj; dest_items = o->items; } mp_seq_replace_slice_grow_inplace(dest_items, o->len, @@ -458,6 +458,7 @@ STATIC mp_obj_t array_subscr(mp_obj_t self_in, mp_obj_t index_in, mp_obj_t value mp_seq_clear(dest_items, o->len + len_adj, o->len, item_sz); // TODO: alloc policy after shrinking } + o->free -= len_adj; o->len += len_adj; return mp_const_none; #else diff --git a/python/src/py/objdict.c b/python/src/py/objdict.c index 0a223f731..7a43a8548 100644 --- a/python/src/py/objdict.c +++ b/python/src/py/objdict.c @@ -31,6 +31,7 @@ #include "py/runtime.h" #include "py/builtin.h" #include "py/objtype.h" +#include "py/objstr.h" #define mp_obj_is_dict_type(o) (mp_obj_is_obj(o) && ((mp_obj_base_t*)MP_OBJ_TO_PTR(o))->type->make_new == dict_make_new) @@ -59,7 +60,7 @@ STATIC void dict_print(const mp_print_t *print, mp_obj_t self_in, mp_print_kind_ if (!(MICROPY_PY_UJSON && kind == PRINT_JSON)) { kind = PRINT_REPR; } - if (MICROPY_PY_COLLECTIONS_ORDEREDDICT && self->base.type != &mp_type_dict) { + if (MICROPY_PY_COLLECTIONS_ORDEREDDICT && self->base.type != &mp_type_dict && kind != PRINT_JSON) { mp_printf(print, "%q(", self->base.type->name); } mp_print_str(print, "{"); @@ -70,12 +71,19 @@ STATIC void dict_print(const mp_print_t *print, mp_obj_t self_in, mp_print_kind_ mp_print_str(print, ", "); } first = false; + bool add_quote = MICROPY_PY_UJSON && kind == PRINT_JSON && !mp_obj_is_str_or_bytes(next->key); + if (add_quote) { + mp_print_str(print, "\""); + } mp_obj_print_helper(print, next->key, kind); + if (add_quote) { + mp_print_str(print, "\""); + } mp_print_str(print, ": "); mp_obj_print_helper(print, next->value, kind); } mp_print_str(print, "}"); - if (MICROPY_PY_COLLECTIONS_ORDEREDDICT && self->base.type != &mp_type_dict) { + if (MICROPY_PY_COLLECTIONS_ORDEREDDICT && self->base.type != &mp_type_dict && kind != PRINT_JSON) { mp_print_str(print, ")"); } } diff --git a/python/src/py/objenumerate.c b/python/src/py/objenumerate.c index 493e45c2a..243c9f83a 100644 --- a/python/src/py/objenumerate.c +++ b/python/src/py/objenumerate.c @@ -59,7 +59,7 @@ STATIC mp_obj_t enumerate_make_new(const mp_obj_type_t *type, size_t n_args, siz o->iter = mp_getiter(arg_vals.iterable.u_obj, NULL); o->cur = arg_vals.start.u_int; #else - (void)n_kw; + mp_arg_check_num(n_args, n_kw, 1, 2, false); mp_obj_enumerate_t *o = m_new_obj(mp_obj_enumerate_t); o->base.type = type; o->iter = mp_getiter(args[0], NULL); diff --git a/python/src/py/objexcept.c b/python/src/py/objexcept.c index 1fb636f66..dadbe98ae 100644 --- a/python/src/py/objexcept.c +++ b/python/src/py/objexcept.c @@ -100,11 +100,6 @@ mp_obj_t mp_alloc_emergency_exception_buf(mp_obj_t size_in) { #endif #endif // MICROPY_ENABLE_EMERGENCY_EXCEPTION_BUF -// Instance of GeneratorExit exception - needed by generator.close() -// This would belong to objgenerator.c, but to keep mp_obj_exception_t -// definition module-private so far, have it here. -const mp_obj_exception_t mp_const_GeneratorExit_obj = {{&mp_type_GeneratorExit}, 0, 0, NULL, (mp_obj_tuple_t*)&mp_const_empty_tuple_obj}; - void mp_obj_exception_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_t kind) { mp_obj_exception_t *o = MP_OBJ_TO_PTR(o_in); mp_print_kind_t k = kind & ~PRINT_EXC_SUBCLASS; @@ -126,7 +121,7 @@ void mp_obj_exception_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kin // try to provide a nice OSError error message if (o->base.type == &mp_type_OSError && mp_obj_is_small_int(o->args->items[0])) { qstr qst = mp_errno_to_str(o->args->items[0]); - if (qst != MP_QSTR_NULL) { + if (qst != MP_QSTRnull) { mp_printf(print, "[Errno " INT_FMT "] %q", MP_OBJ_SMALL_INT_VALUE(o->args->items[0]), qst); return; } diff --git a/python/src/py/objfun.c b/python/src/py/objfun.c index d96c79ede..7051f3476 100644 --- a/python/src/py/objfun.c +++ b/python/src/py/objfun.c @@ -139,7 +139,7 @@ const mp_obj_type_t mp_type_fun_builtin_var = { /* byte code functions */ qstr mp_obj_code_get_name(const byte *code_info) { - code_info = mp_decode_uint_skip(code_info); // skip code_info_size entry + MP_BC_PRELUDE_SIZE_DECODE(code_info); #if MICROPY_PERSISTENT_CODE return code_info[0] | (code_info[1] << 8); #else @@ -161,12 +161,7 @@ qstr mp_obj_fun_get_name(mp_const_obj_t fun_in) { #endif const byte *bc = fun->bytecode; - bc = mp_decode_uint_skip(bc); // skip n_state - bc = mp_decode_uint_skip(bc); // skip n_exc_stack - bc++; // skip scope_params - bc++; // skip n_pos_args - bc++; // skip n_kwonly_args - bc++; // skip n_def_pos_args + MP_BC_PRELUDE_SIG_DECODE(bc); return mp_obj_code_get_name(bc); } @@ -197,18 +192,19 @@ STATIC void dump_args(const mp_obj_t *a, size_t sz) { #define DECODE_CODESTATE_SIZE(bytecode, n_state_out_var, state_size_out_var) \ { \ - /* bytecode prelude: state size and exception stack size */ \ - n_state_out_var = mp_decode_uint_value(bytecode); \ - size_t n_exc_stack = mp_decode_uint_value(mp_decode_uint_skip(bytecode)); \ - \ + const uint8_t *ip = bytecode; \ + size_t n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args; \ + MP_BC_PRELUDE_SIG_DECODE_INTO(ip, n_state_out_var, n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args); \ + \ /* state size in bytes */ \ state_size_out_var = n_state_out_var * sizeof(mp_obj_t) \ + n_exc_stack * sizeof(mp_exc_stack_t); \ } -#define INIT_CODESTATE(code_state, _fun_bc, n_args, n_kw, args) \ +#define INIT_CODESTATE(code_state, _fun_bc, _n_state, n_args, n_kw, args) \ code_state->fun_bc = _fun_bc; \ code_state->ip = 0; \ + code_state->n_state = _n_state; \ mp_setup_code_state(code_state, n_args, n_kw, args); \ code_state->old_globals = mp_globals_get(); @@ -235,7 +231,7 @@ mp_code_state_t *mp_obj_fun_bc_prepare_codestate(mp_obj_t self_in, size_t n_args } #endif - INIT_CODESTATE(code_state, self, n_args, n_kw, args); + INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args); // execute the byte code with the correct globals context mp_globals_set(self->globals); @@ -280,7 +276,7 @@ STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const } #endif - INIT_CODESTATE(code_state, self, n_args, n_kw, args); + INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args); // execute the byte code with the correct globals context mp_globals_set(self->globals); @@ -294,9 +290,11 @@ STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const assert(0); } } - const byte *bytecode_ptr = mp_decode_uint_skip(mp_decode_uint_skip(self->bytecode)); - size_t n_pos_args = bytecode_ptr[1]; - size_t n_kwonly_args = bytecode_ptr[2]; + const byte *bytecode_ptr = self->bytecode; + size_t n_state_unused, n_exc_stack_unused, scope_flags_unused; + size_t n_pos_args, n_kwonly_args, n_def_args_unused; + MP_BC_PRELUDE_SIG_DECODE_INTO(bytecode_ptr, n_state_unused, n_exc_stack_unused, + scope_flags_unused, n_pos_args, n_kwonly_args, n_def_args_unused); // We can't check the case when an exception is returned in state[0] // and there are no arguments, because in this case our detection slot may have // been overwritten by the returned exception (which is allowed). @@ -472,7 +470,7 @@ STATIC mp_uint_t convert_obj_for_inline_asm(mp_obj_t obj) { return (mp_uint_t)items; } else { mp_buffer_info_t bufinfo; - if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_WRITE)) { + if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_READ)) { // supports the buffer protocol, return a pointer to the data return (mp_uint_t)bufinfo.buf; } else { diff --git a/python/src/py/objfun.h b/python/src/py/objfun.h index 257b8a65a..905b5dbca 100644 --- a/python/src/py/objfun.h +++ b/python/src/py/objfun.h @@ -33,6 +33,9 @@ typedef struct _mp_obj_fun_bc_t { mp_obj_dict_t *globals; // the context within which this function was defined const byte *bytecode; // bytecode for the function const mp_uint_t *const_table; // constant table + #if MICROPY_PY_SYS_SETTRACE + const struct _mp_raw_code_t *rc; + #endif // the following extra_args array is allocated space to take (in order): // - values of positional default args (if any) // - a single slot for default kw args dict (if it has them) diff --git a/python/src/py/objgenerator.c b/python/src/py/objgenerator.c index 29c7cb16d..903a6469c 100644 --- a/python/src/py/objgenerator.c +++ b/python/src/py/objgenerator.c @@ -30,16 +30,23 @@ #include "py/runtime.h" #include "py/bc.h" +#include "py/objstr.h" #include "py/objgenerator.h" #include "py/objfun.h" #include "py/stackctrl.h" +// Instance of GeneratorExit exception - needed by generator.close() +const mp_obj_exception_t mp_const_GeneratorExit_obj = {{&mp_type_GeneratorExit}, 0, 0, NULL, (mp_obj_tuple_t*)&mp_const_empty_tuple_obj}; + /******************************************************************************/ /* generator wrapper */ typedef struct _mp_obj_gen_instance_t { mp_obj_base_t base; - mp_obj_dict_t *globals; + // mp_const_none: Not-running, no exception. + // MP_OBJ_NULL: Running, no exception. + // other: Not running, pending exception. + mp_obj_t pend_exc; mp_code_state_t code_state; } mp_obj_gen_instance_t; @@ -48,17 +55,18 @@ STATIC mp_obj_t gen_wrap_call(mp_obj_t self_in, size_t n_args, size_t n_kw, cons mp_obj_fun_bc_t *self_fun = MP_OBJ_TO_PTR(self_in); // bytecode prelude: get state size and exception stack size - size_t n_state = mp_decode_uint_value(self_fun->bytecode); - size_t n_exc_stack = mp_decode_uint_value(mp_decode_uint_skip(self_fun->bytecode)); + const uint8_t *ip = self_fun->bytecode; + MP_BC_PRELUDE_SIG_DECODE(ip); // allocate the generator object, with room for local stack and exception stack mp_obj_gen_instance_t *o = m_new_obj_var(mp_obj_gen_instance_t, byte, n_state * sizeof(mp_obj_t) + n_exc_stack * sizeof(mp_exc_stack_t)); o->base.type = &mp_type_gen_instance; - o->globals = self_fun->globals; + o->pend_exc = mp_const_none; o->code_state.fun_bc = self_fun; o->code_state.ip = 0; + o->code_state.n_state = n_state; mp_setup_code_state(&o->code_state, n_args, n_kw, args); return MP_OBJ_FROM_PTR(o); } @@ -84,7 +92,14 @@ STATIC mp_obj_t native_gen_wrap_call(mp_obj_t self_in, size_t n_args, size_t n_k // Determine start of prelude, and extract n_state from it uintptr_t prelude_offset = ((uintptr_t*)self_fun->bytecode)[0]; - size_t n_state = mp_decode_uint_value(self_fun->bytecode + prelude_offset); + #if MICROPY_EMIT_NATIVE_PRELUDE_AS_BYTES_OBJ + // Prelude is in bytes object in const_table, at index prelude_offset + mp_obj_str_t *prelude_bytes = MP_OBJ_TO_PTR(self_fun->const_table[prelude_offset]); + prelude_offset = (const byte*)prelude_bytes->data - self_fun->bytecode; + #endif + const uint8_t *ip = self_fun->bytecode + prelude_offset; + size_t n_state, n_exc_stack_unused, scope_flags, n_pos_args, n_kwonly_args, n_def_args; + MP_BC_PRELUDE_SIG_DECODE_INTO(ip, n_state, n_exc_stack_unused, scope_flags, n_pos_args, n_kwonly_args, n_def_args); size_t n_exc_stack = 0; // Allocate the generator object, with room for local stack and exception stack @@ -93,13 +108,14 @@ STATIC mp_obj_t native_gen_wrap_call(mp_obj_t self_in, size_t n_args, size_t n_k o->base.type = &mp_type_gen_instance; // Parse the input arguments and set up the code state - o->globals = self_fun->globals; + o->pend_exc = mp_const_none; o->code_state.fun_bc = self_fun; o->code_state.ip = (const byte*)prelude_offset; + o->code_state.n_state = n_state; mp_setup_code_state(&o->code_state, n_args, n_kw, args); // Indicate we are a native function, which doesn't use this variable - o->code_state.exc_sp = NULL; + o->code_state.exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_SENTINEL; // Prepare the generator instance for execution uintptr_t start_offset = ((uintptr_t*)self_fun->bytecode)[1]; @@ -138,38 +154,39 @@ mp_vm_return_kind_t mp_obj_gen_resume(mp_obj_t self_in, mp_obj_t send_value, mp_ *ret_val = MP_OBJ_STOP_ITERATION; return MP_VM_RETURN_NORMAL; } + + // Ensure the generator cannot be reentered during execution + if (self->pend_exc == MP_OBJ_NULL) { + mp_raise_ValueError("generator already executing"); + } + + #if MICROPY_PY_GENERATOR_PEND_THROW + // If exception is pending (set using .pend_throw()), process it now. + if (self->pend_exc != mp_const_none) { + throw_value = self->pend_exc; + } + #endif + + // If the generator is started, allow sending a value. if (self->code_state.sp == self->code_state.state - 1) { if (send_value != mp_const_none) { mp_raise_TypeError("can't send non-None value to a just-started generator"); } } else { - #if MICROPY_PY_GENERATOR_PEND_THROW - // If exception is pending (set using .pend_throw()), process it now. - if (*self->code_state.sp != mp_const_none) { - throw_value = *self->code_state.sp; - *self->code_state.sp = MP_OBJ_NULL; - } else - #endif - { - *self->code_state.sp = send_value; - } + *self->code_state.sp = send_value; } - // We set self->globals=NULL while executing, for a sentinel to ensure the generator - // cannot be reentered during execution - if (self->globals == NULL) { - mp_raise_ValueError("generator already executing"); - } + // Mark as running + self->pend_exc = MP_OBJ_NULL; // Set up the correct globals context for the generator and execute it self->code_state.old_globals = mp_globals_get(); - mp_globals_set(self->globals); - self->globals = NULL; + mp_globals_set(self->code_state.fun_bc->globals); mp_vm_return_kind_t ret_kind; #if MICROPY_EMIT_NATIVE - if (self->code_state.exc_sp == NULL) { + if (self->code_state.exc_sp_idx == MP_CODE_STATE_EXC_SP_IDX_SENTINEL) { // A native generator, with entry point 2 words into the "bytecode" pointer typedef uintptr_t (*mp_fun_native_gen_t)(void*, mp_obj_t); mp_fun_native_gen_t fun = MICROPY_MAKE_POINTER_CALLABLE((const void*)(self->code_state.fun_bc->bytecode + 2 * sizeof(uintptr_t))); @@ -181,9 +198,11 @@ mp_vm_return_kind_t mp_obj_gen_resume(mp_obj_t self_in, mp_obj_t send_value, mp_ ret_kind = mp_execute_bytecode(&self->code_state, throw_value); } - self->globals = mp_globals_get(); mp_globals_set(self->code_state.old_globals); + // Mark as not running + self->pend_exc = mp_const_none; + switch (ret_kind) { case MP_VM_RETURN_NORMAL: default: @@ -297,16 +316,18 @@ STATIC mp_obj_t gen_instance_close(mp_obj_t self_in) { } STATIC MP_DEFINE_CONST_FUN_OBJ_1(gen_instance_close_obj, gen_instance_close); +#if MICROPY_PY_GENERATOR_PEND_THROW STATIC mp_obj_t gen_instance_pend_throw(mp_obj_t self_in, mp_obj_t exc_in) { mp_obj_gen_instance_t *self = MP_OBJ_TO_PTR(self_in); - if (self->code_state.sp == self->code_state.state - 1) { - mp_raise_TypeError("can't pend throw to just-started generator"); + if (self->pend_exc == MP_OBJ_NULL) { + mp_raise_ValueError("generator already executing"); } - mp_obj_t prev = *self->code_state.sp; - *self->code_state.sp = exc_in; + mp_obj_t prev = self->pend_exc; + self->pend_exc = exc_in; return prev; } STATIC MP_DEFINE_CONST_FUN_OBJ_2(gen_instance_pend_throw_obj, gen_instance_pend_throw); +#endif STATIC const mp_rom_map_elem_t gen_instance_locals_dict_table[] = { { MP_ROM_QSTR(MP_QSTR_close), MP_ROM_PTR(&gen_instance_close_obj) }, diff --git a/python/src/py/objint.c b/python/src/py/objint.c index 6473767e4..2fdcf5864 100644 --- a/python/src/py/objint.c +++ b/python/src/py/objint.c @@ -137,7 +137,7 @@ STATIC mp_fp_as_int_class_t mp_classify_fp_as_int(mp_float_t val) { mp_obj_t mp_obj_new_int_from_float(mp_float_t val) { int cl = fpclassify(val); if (cl == FP_INFINITE) { - nlr_raise(mp_obj_new_exception_msg(&mp_type_OverflowError, "can't convert inf to int")); + mp_raise_msg(&mp_type_OverflowError, "can't convert inf to int"); } else if (cl == FP_NAN) { mp_raise_ValueError("can't convert NaN to int"); } else { diff --git a/python/src/py/objmodule.c b/python/src/py/objmodule.c index 4a07913c5..81558a02e 100644 --- a/python/src/py/objmodule.c +++ b/python/src/py/objmodule.c @@ -3,7 +3,7 @@ * * The MIT License (MIT) * - * Copyright (c) 2013, 2014 Damien P. George + * Copyright (c) 2013-2019 Damien P. George * Copyright (c) 2014-2015 Paul Sokolovsky * * Permission is hereby granted, free of charge, to any person obtaining a copy @@ -26,6 +26,7 @@ */ #include +#include #include #include "py/objmodule.h" @@ -223,6 +224,9 @@ STATIC const mp_rom_map_elem_t mp_builtin_module_table[] = { #if MICROPY_PY_BTREE { MP_ROM_QSTR(MP_QSTR_btree), MP_ROM_PTR(&mp_module_btree) }, #endif +#if MICROPY_PY_BLUETOOTH + { MP_ROM_QSTR(MP_QSTR_ubluetooth), MP_ROM_PTR(&mp_module_ubluetooth) }, +#endif // extra builtin modules as defined by a port MICROPY_PORT_BUILTIN_MODULES @@ -235,14 +239,6 @@ STATIC const mp_rom_map_elem_t mp_builtin_module_table[] = { MP_DEFINE_CONST_MAP(mp_builtin_module_map, mp_builtin_module_table); -#if MICROPY_MODULE_WEAK_LINKS -STATIC const mp_rom_map_elem_t mp_builtin_module_weak_links_table[] = { - MICROPY_PORT_BUILTIN_MODULE_WEAK_LINKS -}; - -MP_DEFINE_CONST_MAP(mp_builtin_module_weak_links_map, mp_builtin_module_weak_links_table); -#endif - // returns MP_OBJ_NULL if not found mp_obj_t mp_module_get(qstr module_name) { mp_map_t *mp_loaded_modules_map = &MP_STATE_VM(mp_loaded_modules_dict).map; @@ -267,6 +263,21 @@ void mp_module_register(qstr qst, mp_obj_t module) { mp_map_lookup(mp_loaded_modules_map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = module; } +#if MICROPY_MODULE_WEAK_LINKS +// Search for u"foo" in built-in modules, return MP_OBJ_NULL if not found +mp_obj_t mp_module_search_umodule(const char *module_str) { + for (size_t i = 0; i < MP_ARRAY_SIZE(mp_builtin_module_table); ++i) { + const mp_map_elem_t *entry = (const mp_map_elem_t*)&mp_builtin_module_table[i]; + const char *key = qstr_str(MP_OBJ_QSTR_VALUE(entry->key)); + if (key[0] == 'u' && strcmp(&key[1], module_str) == 0) { + return (mp_obj_t)entry->value; + } + + } + return MP_OBJ_NULL; +} +#endif + #if MICROPY_MODULE_BUILTIN_INIT void mp_module_call_init(qstr module_name, mp_obj_t module_obj) { // Look for __init__ and call it if it exists diff --git a/python/src/py/objmodule.h b/python/src/py/objmodule.h index b7702ec50..33a0ff07e 100644 --- a/python/src/py/objmodule.h +++ b/python/src/py/objmodule.h @@ -3,7 +3,7 @@ * * The MIT License (MIT) * - * Copyright (c) 2013, 2014 Damien P. George + * Copyright (c) 2013-2019 Damien P. George * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -34,6 +34,8 @@ extern const mp_map_t mp_builtin_module_weak_links_map; mp_obj_t mp_module_get(qstr module_name); void mp_module_register(qstr qstr, mp_obj_t module); +mp_obj_t mp_module_search_umodule(const char *module_str); + #if MICROPY_MODULE_BUILTIN_INIT void mp_module_call_init(qstr module_name, mp_obj_t module_obj); #else diff --git a/python/src/py/objstr.c b/python/src/py/objstr.c index 1047ea94c..e221982c5 100644 --- a/python/src/py/objstr.c +++ b/python/src/py/objstr.c @@ -169,7 +169,7 @@ mp_obj_t mp_obj_str_make_new(const mp_obj_type_t *type, size_t n_args, size_t n_ // Check if a qstr with this data already exists qstr q = qstr_find_strn((const char*)str_data, str_len); - if (q != MP_QSTR_NULL) { + if (q != MP_QSTRnull) { return MP_OBJ_NEW_QSTR(q); } @@ -1913,9 +1913,6 @@ mp_int_t mp_obj_str_get_buffer(mp_obj_t self_in, mp_buffer_info_t *bufinfo, mp_u return 0; } else { // can't write to a string - bufinfo->buf = NULL; - bufinfo->len = 0; - bufinfo->typecode = -1; return 1; } } @@ -2042,7 +2039,7 @@ mp_obj_t mp_obj_new_str_from_vstr(const mp_obj_type_t *type, vstr_t *vstr) { // if not a bytes object, look if a qstr with this data already exists if (type == &mp_type_str) { qstr q = qstr_find_strn(vstr->buf, vstr->len); - if (q != MP_QSTR_NULL) { + if (q != MP_QSTRnull) { vstr_clear(vstr); vstr->alloc = 0; return MP_OBJ_NEW_QSTR(q); @@ -2067,7 +2064,7 @@ mp_obj_t mp_obj_new_str_from_vstr(const mp_obj_type_t *type, vstr_t *vstr) { mp_obj_t mp_obj_new_str(const char* data, size_t len) { qstr q = qstr_find_strn(data, len); - if (q != MP_QSTR_NULL) { + if (q != MP_QSTRnull) { // qstr with this data already exists return MP_OBJ_NEW_QSTR(q); } else { diff --git a/python/src/py/objstringio.c b/python/src/py/objstringio.c index 89b679031..cca4a8129 100644 --- a/python/src/py/objstringio.c +++ b/python/src/py/objstringio.c @@ -70,9 +70,9 @@ STATIC mp_uint_t stringio_read(mp_obj_t o_in, void *buf, mp_uint_t size, int *er STATIC void stringio_copy_on_write(mp_obj_stringio_t *o) { const void *buf = o->vstr->buf; o->vstr->buf = m_new(char, o->vstr->len); - memcpy(o->vstr->buf, buf, o->vstr->len); o->vstr->fixed_buf = false; o->ref_obj = MP_OBJ_NULL; + memcpy(o->vstr->buf, buf, o->vstr->len); } STATIC mp_uint_t stringio_write(mp_obj_t o_in, const void *buf, mp_uint_t size, int *errcode) { @@ -244,12 +244,6 @@ STATIC const mp_stream_p_t stringio_stream_p = { .is_text = true, }; -STATIC const mp_stream_p_t bytesio_stream_p = { - .read = stringio_read, - .write = stringio_write, - .ioctl = stringio_ioctl, -}; - const mp_obj_type_t mp_type_stringio = { { &mp_type_type }, .name = MP_QSTR_StringIO, @@ -262,6 +256,12 @@ const mp_obj_type_t mp_type_stringio = { }; #if MICROPY_PY_IO_BYTESIO +STATIC const mp_stream_p_t bytesio_stream_p = { + .read = stringio_read, + .write = stringio_write, + .ioctl = stringio_ioctl, +}; + const mp_obj_type_t mp_type_bytesio = { { &mp_type_type }, .name = MP_QSTR_BytesIO, diff --git a/python/src/py/objtuple.c b/python/src/py/objtuple.c index 39eb94023..740e0795b 100644 --- a/python/src/py/objtuple.c +++ b/python/src/py/objtuple.c @@ -31,6 +31,9 @@ #include "py/objtuple.h" #include "py/runtime.h" +// type check is done on getiter method to allow tuple, namedtuple, attrtuple +#define mp_obj_is_tuple_compatible(o) (mp_obj_get_type(o)->getiter == mp_obj_tuple_getiter) + /******************************************************************************/ /* tuple */ @@ -101,8 +104,7 @@ STATIC mp_obj_t mp_obj_tuple_make_new(const mp_obj_type_t *type_in, size_t n_arg // Don't pass MP_BINARY_OP_NOT_EQUAL here STATIC mp_obj_t tuple_cmp_helper(mp_uint_t op, mp_obj_t self_in, mp_obj_t another_in) { - // type check is done on getiter method to allow tuple, namedtuple, attrtuple - mp_check_self(mp_obj_get_type(self_in)->getiter == mp_obj_tuple_getiter); + mp_check_self(mp_obj_is_tuple_compatible(self_in)); mp_obj_type_t *another_type = mp_obj_get_type(another_in); mp_obj_tuple_t *self = MP_OBJ_TO_PTR(self_in); if (another_type->getiter != mp_obj_tuple_getiter) { @@ -249,7 +251,7 @@ mp_obj_t mp_obj_new_tuple(size_t n, const mp_obj_t *items) { } void mp_obj_tuple_get(mp_obj_t self_in, size_t *len, mp_obj_t **items) { - assert(mp_obj_is_type(self_in, &mp_type_tuple)); + assert(mp_obj_is_tuple_compatible(self_in)); mp_obj_tuple_t *self = MP_OBJ_TO_PTR(self_in); *len = self->len; *items = &self->items[0]; diff --git a/python/src/py/objtype.c b/python/src/py/objtype.c index 3e65a32f0..bf089dc49 100644 --- a/python/src/py/objtype.c +++ b/python/src/py/objtype.c @@ -460,7 +460,7 @@ STATIC mp_obj_t instance_unary_op(mp_unary_op_t op, mp_obj_t self_in) { } // Binary-op enum values not listed here will have the default value of 0 in the -// table, corresponding to MP_QSTR_NULL, and are therefore unsupported (a lookup will +// table, corresponding to MP_QSTRnull, and are therefore unsupported (a lookup will // fail). They can be added at the expense of code size for the qstr. // Qstrs for special methods are guaranteed to have a small value, so we use byte // type to represent them. @@ -478,6 +478,7 @@ const byte mp_binary_op_method_name[MP_BINARY_OP_NUM_RUNTIME] = { [MP_BINARY_OP_INPLACE_SUBTRACT] = MP_QSTR___isub__, #if MICROPY_PY_ALL_INPLACE_SPECIAL_METHODS [MP_BINARY_OP_INPLACE_MULTIPLY] = MP_QSTR___imul__, + [MP_BINARY_OP_INPLACE_MAT_MULTIPLY] = MP_QSTR___imatmul__, [MP_BINARY_OP_INPLACE_FLOOR_DIVIDE] = MP_QSTR___ifloordiv__, [MP_BINARY_OP_INPLACE_TRUE_DIVIDE] = MP_QSTR___itruediv__, [MP_BINARY_OP_INPLACE_MODULO] = MP_QSTR___imod__, @@ -493,6 +494,7 @@ const byte mp_binary_op_method_name[MP_BINARY_OP_NUM_RUNTIME] = { [MP_BINARY_OP_SUBTRACT] = MP_QSTR___sub__, #if MICROPY_PY_ALL_SPECIAL_METHODS [MP_BINARY_OP_MULTIPLY] = MP_QSTR___mul__, + [MP_BINARY_OP_MAT_MULTIPLY] = MP_QSTR___matmul__, [MP_BINARY_OP_FLOOR_DIVIDE] = MP_QSTR___floordiv__, [MP_BINARY_OP_TRUE_DIVIDE] = MP_QSTR___truediv__, [MP_BINARY_OP_MODULO] = MP_QSTR___mod__, @@ -510,6 +512,7 @@ const byte mp_binary_op_method_name[MP_BINARY_OP_NUM_RUNTIME] = { [MP_BINARY_OP_REVERSE_SUBTRACT] = MP_QSTR___rsub__, #if MICROPY_PY_ALL_SPECIAL_METHODS [MP_BINARY_OP_REVERSE_MULTIPLY] = MP_QSTR___rmul__, + [MP_BINARY_OP_REVERSE_MAT_MULTIPLY] = MP_QSTR___rmatmul__, [MP_BINARY_OP_REVERSE_FLOOR_DIVIDE] = MP_QSTR___rfloordiv__, [MP_BINARY_OP_REVERSE_TRUE_DIVIDE] = MP_QSTR___rtruediv__, [MP_BINARY_OP_REVERSE_MODULO] = MP_QSTR___rmod__, @@ -1011,6 +1014,21 @@ STATIC void type_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) { dest[0] = MP_OBJ_NEW_QSTR(self->name); return; } + if (attr == MP_QSTR___bases__) { + if (self == &mp_type_object) { + dest[0] = mp_const_empty_tuple; + return; + } + mp_obj_t parent_obj = self->parent ? MP_OBJ_FROM_PTR(self->parent) : MP_OBJ_FROM_PTR(&mp_type_object); + #if MICROPY_MULTIPLE_INHERITANCE + if (mp_obj_is_type(parent_obj, &mp_type_tuple)) { + dest[0] = parent_obj; + return; + } + #endif + dest[0] = mp_obj_new_tuple(1, &parent_obj); + return; + } #endif struct class_lookup_data lookup = { .obj = (mp_obj_instance_t*)self, diff --git a/python/src/py/parse.c b/python/src/py/parse.c index 66110e7c3..82b5413e5 100644 --- a/python/src/py/parse.c +++ b/python/src/py/parse.c @@ -135,8 +135,8 @@ STATIC const uint16_t rule_arg_combined_table[] = { #define RULE_EXPAND(x) x #define RULE_PADDING(rule, ...) RULE_PADDING2(rule, __VA_ARGS__, RULE_PADDING_IDS(rule)) #define RULE_PADDING2(rule, ...) RULE_EXPAND(RULE_PADDING3(rule, __VA_ARGS__)) -#define RULE_PADDING3(rule, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, ...) __VA_ARGS__ -#define RULE_PADDING_IDS(r) PAD12_##r, PAD11_##r, PAD10_##r, PAD9_##r, PAD8_##r, PAD7_##r, PAD6_##r, PAD5_##r, PAD4_##r, PAD3_##r, PAD2_##r, PAD1_##r, +#define RULE_PADDING3(rule, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, ...) __VA_ARGS__ +#define RULE_PADDING_IDS(r) PAD13_##r, PAD12_##r, PAD11_##r, PAD10_##r, PAD9_##r, PAD8_##r, PAD7_##r, PAD6_##r, PAD5_##r, PAD4_##r, PAD3_##r, PAD2_##r, PAD1_##r, // Use an enum to create constants specifying how much room a rule takes in rule_arg_combined_table enum { @@ -155,8 +155,8 @@ enum { // Macro to compute the start of a rule in rule_arg_combined_table #define RULE_ARG_OFFSET(rule, ...) RULE_ARG_OFFSET2(rule, __VA_ARGS__, RULE_ARG_OFFSET_IDS(rule)) #define RULE_ARG_OFFSET2(rule, ...) RULE_EXPAND(RULE_ARG_OFFSET3(rule, __VA_ARGS__)) -#define RULE_ARG_OFFSET3(rule, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, ...) _13 -#define RULE_ARG_OFFSET_IDS(r) PAD12_##r, PAD11_##r, PAD10_##r, PAD9_##r, PAD8_##r, PAD7_##r, PAD6_##r, PAD5_##r, PAD4_##r, PAD3_##r, PAD2_##r, PAD1_##r, PAD0_##r, +#define RULE_ARG_OFFSET3(rule, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, ...) _14 +#define RULE_ARG_OFFSET_IDS(r) PAD13_##r, PAD12_##r, PAD11_##r, PAD10_##r, PAD9_##r, PAD8_##r, PAD7_##r, PAD6_##r, PAD5_##r, PAD4_##r, PAD3_##r, PAD2_##r, PAD1_##r, PAD0_##r, // Use the above enum values to create a table of offsets for each rule's arg // data, which indexes rule_arg_combined_table. The offsets require 9 bits of @@ -502,7 +502,7 @@ STATIC void push_result_token(parser_t *parser, uint8_t rule_id) { } else if (lex->tok_kind == MP_TOKEN_STRING || lex->tok_kind == MP_TOKEN_BYTES) { // Don't automatically intern all strings/bytes. doc strings (which are usually large) // will be discarded by the compiler, and so we shouldn't intern them. - qstr qst = MP_QSTR_NULL; + qstr qst = MP_QSTRnull; if (lex->vstr.len <= MICROPY_ALLOC_PARSE_INTERN_STRING_LEN) { // intern short strings qst = qstr_from_strn(lex->vstr.buf, lex->vstr.len); @@ -510,7 +510,7 @@ STATIC void push_result_token(parser_t *parser, uint8_t rule_id) { // check if this string is already interned qst = qstr_find_strn(lex->vstr.buf, lex->vstr.len); } - if (qst != MP_QSTR_NULL) { + if (qst != MP_QSTRnull) { // qstr exists, make a leaf node pn = mp_parse_node_new_leaf(lex->tok_kind == MP_TOKEN_STRING ? MP_PARSE_NODE_STRING : MP_PARSE_NODE_BYTES, qst); } else { @@ -632,7 +632,7 @@ STATIC bool fold_constants(parser_t *parser, uint8_t rule_id, size_t num_args) { } else if (rule_id == RULE_shift_expr || rule_id == RULE_arith_expr || rule_id == RULE_term) { - // folding for binary ops: << >> + - * / % // + // folding for binary ops: << >> + - * @ / % // mp_parse_node_t pn = peek_result(parser, num_args - 1); if (!mp_parse_node_get_int_maybe(pn, &arg0)) { return false; @@ -644,23 +644,11 @@ STATIC bool fold_constants(parser_t *parser, uint8_t rule_id, size_t num_args) { return false; } mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(peek_result(parser, i)); - static const uint8_t token_to_op[] = { - MP_BINARY_OP_ADD, - MP_BINARY_OP_SUBTRACT, - MP_BINARY_OP_MULTIPLY, - 255,//MP_BINARY_OP_POWER, - 255,//MP_BINARY_OP_TRUE_DIVIDE, - MP_BINARY_OP_FLOOR_DIVIDE, - MP_BINARY_OP_MODULO, - 255,//MP_BINARY_OP_LESS - MP_BINARY_OP_LSHIFT, - 255,//MP_BINARY_OP_MORE - MP_BINARY_OP_RSHIFT, - }; - mp_binary_op_t op = token_to_op[tok - MP_TOKEN_OP_PLUS]; - if (op == (mp_binary_op_t)255) { + if (tok == MP_TOKEN_OP_AT || tok == MP_TOKEN_OP_SLASH || tok == MP_TOKEN_OP_DBL_STAR) { + // Can't fold @ or / or ** return false; } + mp_binary_op_t op = MP_BINARY_OP_LSHIFT + (tok - MP_TOKEN_OP_DBL_LESS); int rhs_sign = mp_obj_int_sign(arg1); if (op <= MP_BINARY_OP_RSHIFT) { // << and >> can't have negative rhs @@ -683,13 +671,11 @@ STATIC bool fold_constants(parser_t *parser, uint8_t rule_id, size_t num_args) { } mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(peek_result(parser, 1)); mp_unary_op_t op; - if (tok == MP_TOKEN_OP_PLUS) { - op = MP_UNARY_OP_POSITIVE; - } else if (tok == MP_TOKEN_OP_MINUS) { - op = MP_UNARY_OP_NEGATIVE; - } else { - assert(tok == MP_TOKEN_OP_TILDE); // should be + if (tok == MP_TOKEN_OP_TILDE) { op = MP_UNARY_OP_INVERT; + } else { + assert(tok == MP_TOKEN_OP_PLUS || tok == MP_TOKEN_OP_MINUS); // should be + op = MP_UNARY_OP_POSITIVE + (tok - MP_TOKEN_OP_PLUS); } arg0 = mp_unary_op(op, arg0); @@ -719,7 +705,7 @@ STATIC bool fold_constants(parser_t *parser, uint8_t rule_id, size_t num_args) { mp_obj_t exc = mp_obj_new_exception_msg(&mp_type_SyntaxError, "constant must be an integer"); mp_obj_exception_add_traceback(exc, parser->lexer->source_name, - ((mp_parse_node_struct_t*)pn1)->source_line, MP_QSTR_NULL); + ((mp_parse_node_struct_t*)pn1)->source_line, MP_QSTRnull); nlr_raise(exc); } @@ -1152,7 +1138,7 @@ mp_parse_tree_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind) { } // add traceback to give info about file name and location // we don't have a 'block' name, so just pass the NULL qstr to indicate this - mp_obj_exception_add_traceback(exc, lex->source_name, lex->tok_line, MP_QSTR_NULL); + mp_obj_exception_add_traceback(exc, lex->source_name, lex->tok_line, MP_QSTRnull); nlr_raise(exc); } diff --git a/python/src/py/parsenum.c b/python/src/py/parsenum.c index ae9b83419..2b01b7259 100644 --- a/python/src/py/parsenum.c +++ b/python/src/py/parsenum.c @@ -41,7 +41,7 @@ STATIC NORETURN void raise_exc(mp_obj_t exc, mp_lexer_t *lex) { // exception's type from ValueError to SyntaxError and add traceback info if (lex != NULL) { ((mp_obj_base_t*)MP_OBJ_TO_PTR(exc))->type = &mp_type_SyntaxError; - mp_obj_exception_add_traceback(exc, lex->source_name, lex->tok_line, MP_QSTR_NULL); + mp_obj_exception_add_traceback(exc, lex->source_name, lex->tok_line, MP_QSTRnull); } nlr_raise(exc); } diff --git a/python/src/py/persistentcode.c b/python/src/py/persistentcode.c index cc6e161f4..7a8a94b5a 100644 --- a/python/src/py/persistentcode.c +++ b/python/src/py/persistentcode.c @@ -30,9 +30,10 @@ #include #include "py/reader.h" -#include "py/emitglue.h" +#include "py/nativeglue.h" #include "py/persistentcode.h" -#include "py/bc.h" +#include "py/bc0.h" +#include "py/objstr.h" #if MICROPY_PERSISTENT_CODE_LOAD || MICROPY_PERSISTENT_CODE_SAVE @@ -40,41 +41,6 @@ #define QSTR_LAST_STATIC MP_QSTR_zip -// Macros to encode/decode flags to/from the feature byte -#define MPY_FEATURE_ENCODE_FLAGS(flags) (flags) -#define MPY_FEATURE_DECODE_FLAGS(feat) ((feat) & 3) - -// Macros to encode/decode native architecture to/from the feature byte -#define MPY_FEATURE_ENCODE_ARCH(arch) ((arch) << 2) -#define MPY_FEATURE_DECODE_ARCH(feat) ((feat) >> 2) - -// The feature flag bits encode the compile-time config options that -// affect the generate bytecode. -#define MPY_FEATURE_FLAGS ( \ - ((MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) << 0) \ - | ((MICROPY_PY_BUILTINS_STR_UNICODE) << 1) \ - ) -// This is a version of the flags that can be configured at runtime. -#define MPY_FEATURE_FLAGS_DYNAMIC ( \ - ((MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE_DYNAMIC) << 0) \ - | ((MICROPY_PY_BUILTINS_STR_UNICODE_DYNAMIC) << 1) \ - ) - -// Define the host architecture -#if MICROPY_EMIT_X86 -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_X86) -#elif MICROPY_EMIT_X64 -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_X64) -#elif MICROPY_EMIT_THUMB -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV7M) -#elif MICROPY_EMIT_ARM -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV6) -#elif MICROPY_EMIT_XTENSA -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_XTENSA) -#else -#define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_NONE) -#endif - #if MICROPY_DYNAMIC_COMPILER #define MPY_FEATURE_ARCH_DYNAMIC mp_dynamic_compiler.native_arch #else @@ -157,33 +123,38 @@ typedef struct _bytecode_prelude_t { uint code_info_size; } bytecode_prelude_t; -#if MICROPY_PERSISTENT_CODE_SAVE || MICROPY_EMIT_NATIVE - // ip will point to start of opcodes -// ip2 will point to simple_name, source_file qstrs -STATIC void extract_prelude(const byte **ip, const byte **ip2, bytecode_prelude_t *prelude) { - prelude->n_state = mp_decode_uint(ip); - prelude->n_exc_stack = mp_decode_uint(ip); - prelude->scope_flags = *(*ip)++; - prelude->n_pos_args = *(*ip)++; - prelude->n_kwonly_args = *(*ip)++; - prelude->n_def_pos_args = *(*ip)++; - *ip2 = *ip; - prelude->code_info_size = mp_decode_uint(ip2); - *ip += prelude->code_info_size; - while (*(*ip)++ != 255) { - } +// return value will point to simple_name, source_file qstrs +STATIC byte *extract_prelude(const byte **ip, bytecode_prelude_t *prelude) { + MP_BC_PRELUDE_SIG_DECODE(*ip); + prelude->n_state = n_state; + prelude->n_exc_stack = n_exc_stack; + prelude->scope_flags = scope_flags; + prelude->n_pos_args = n_pos_args; + prelude->n_kwonly_args = n_kwonly_args; + prelude->n_def_pos_args = n_def_pos_args; + MP_BC_PRELUDE_SIZE_DECODE(*ip); + byte *ip_info = (byte*)*ip; + *ip += n_info; + *ip += n_cell; + return ip_info; } -#endif - #endif // MICROPY_PERSISTENT_CODE_LOAD || MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_LOAD #include "py/parsenum.h" -#if MICROPY_EMIT_NATIVE +STATIC int read_byte(mp_reader_t *reader); +STATIC size_t read_uint(mp_reader_t *reader, byte **out); + +#if MICROPY_EMIT_MACHINE_CODE + +typedef struct _reloc_info_t { + mp_reader_t *reader; + mp_uint_t *const_table; +} reloc_info_t; #if MICROPY_EMIT_THUMB STATIC void asm_thumb_rewrite_mov(uint8_t *pc, uint16_t val) { @@ -200,7 +171,7 @@ STATIC void arch_link_qstr(uint8_t *pc, bool is_obj, qstr qst) { if (is_obj) { val = (mp_uint_t)MP_OBJ_NEW_QSTR(qst); } - #if MICROPY_EMIT_X86 || MICROPY_EMIT_X64 || MICROPY_EMIT_ARM || MICROPY_EMIT_XTENSA + #if MICROPY_EMIT_X86 || MICROPY_EMIT_X64 || MICROPY_EMIT_ARM || MICROPY_EMIT_XTENSA || MICROPY_EMIT_XTENSAWIN pc[0] = val & 0xff; pc[1] = (val >> 8) & 0xff; pc[2] = (val >> 16) & 0xff; @@ -217,6 +188,52 @@ STATIC void arch_link_qstr(uint8_t *pc, bool is_obj, qstr qst) { #endif } +void mp_native_relocate(void *ri_in, uint8_t *text, uintptr_t reloc_text) { + // Relocate native code + reloc_info_t *ri = ri_in; + uint8_t op; + uintptr_t *addr_to_adjust = NULL; + while ((op = read_byte(ri->reader)) != 0xff) { + if (op & 1) { + // Point to new location to make adjustments + size_t addr = read_uint(ri->reader, NULL); + if ((addr & 1) == 0) { + // Point to somewhere in text + addr_to_adjust = &((uintptr_t*)text)[addr >> 1]; + } else { + // Point to somewhere in rodata + addr_to_adjust = &((uintptr_t*)ri->const_table[1])[addr >> 1]; + } + } + op >>= 1; + uintptr_t dest; + size_t n = 1; + if (op <= 5) { + if (op & 1) { + // Read in number of adjustments to make + n = read_uint(ri->reader, NULL); + } + op >>= 1; + if (op == 0) { + // Destination is text + dest = reloc_text; + } else { + // Destination is rodata (op=1) or bss (op=1 if no rodata, else op=2) + dest = ri->const_table[op]; + } + } else if (op == 6) { + // Destination is mp_fun_table itself + dest = (uintptr_t)&mp_fun_table; + } else { + // Destination is an entry in mp_fun_table + dest = ((uintptr_t*)&mp_fun_table)[op - 7]; + } + while (n--) { + *addr_to_adjust++ += dest; + } + } +} + #endif STATIC int read_byte(mp_reader_t *reader) { @@ -284,20 +301,28 @@ STATIC mp_obj_t load_obj(mp_reader_t *reader) { } } -STATIC void load_prelude(mp_reader_t *reader, byte **ip, byte **ip2, bytecode_prelude_t *prelude) { - prelude->n_state = read_uint(reader, ip); - prelude->n_exc_stack = read_uint(reader, ip); - read_bytes(reader, *ip, 4); - prelude->scope_flags = *(*ip)++; - prelude->n_pos_args = *(*ip)++; - prelude->n_kwonly_args = *(*ip)++; - prelude->n_def_pos_args = *(*ip)++; - *ip2 = *ip; - prelude->code_info_size = read_uint(reader, ip2); - read_bytes(reader, *ip2, prelude->code_info_size - (*ip2 - *ip)); - *ip += prelude->code_info_size; - while ((*(*ip)++ = read_byte(reader)) != 255) { - } +STATIC void load_prelude_qstrs(mp_reader_t *reader, qstr_window_t *qw, byte *ip) { + qstr simple_name = load_qstr(reader, qw); + ip[0] = simple_name; ip[1] = simple_name >> 8; + qstr source_file = load_qstr(reader, qw); + ip[2] = source_file; ip[3] = source_file >> 8; +} + +STATIC void load_prelude(mp_reader_t *reader, qstr_window_t *qw, byte **ip, bytecode_prelude_t *prelude) { + // Read in the prelude header + byte *ip_read = *ip; + read_uint(reader, &ip_read); // read in n_state/etc (is effectively a var-uint) + read_uint(reader, &ip_read); // read in n_info/n_cell (is effectively a var-uint) + + // Prelude header has been read into *ip, now decode and extract values from it + extract_prelude((const byte**)ip, prelude); + + // Load qstrs in prelude + load_prelude_qstrs(reader, qw, ip_read); + ip_read += 4; + + // Read remaining code info + read_bytes(reader, ip_read, *ip - ip_read); } STATIC void load_bytecode(mp_reader_t *reader, qstr_window_t *qw, byte *ip, byte *ip_top) { @@ -307,12 +332,12 @@ STATIC void load_bytecode(mp_reader_t *reader, qstr_window_t *qw, byte *ip, byte uint f = mp_opcode_format(ip, &sz, false); ++ip; --sz; - if (f == MP_OPCODE_QSTR) { + if (f == MP_BC_FORMAT_QSTR) { qstr qst = load_qstr(reader, qw); *ip++ = qst; *ip++ = qst >> 8; sz -= 2; - } else if (f == MP_OPCODE_VAR_UINT) { + } else if (f == MP_BC_FORMAT_VAR_UINT) { while ((*ip++ = read_byte(reader)) & 0x80) { } } @@ -327,17 +352,16 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { int kind = (kind_len & 3) + MP_CODE_BYTECODE; size_t fun_data_len = kind_len >> 2; - #if !MICROPY_EMIT_NATIVE + #if !MICROPY_EMIT_MACHINE_CODE if (kind != MP_CODE_BYTECODE) { mp_raise_ValueError("incompatible .mpy file"); } #endif uint8_t *fun_data = NULL; - byte *ip2; bytecode_prelude_t prelude = {0}; - #if MICROPY_EMIT_NATIVE - size_t prelude_offset; + #if MICROPY_EMIT_MACHINE_CODE + size_t prelude_offset = 0; mp_uint_t type_sig = 0; size_t n_qstr_link = 0; #endif @@ -348,12 +372,12 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { // Load prelude byte *ip = fun_data; - load_prelude(reader, &ip, &ip2, &prelude); + load_prelude(reader, qw, &ip, &prelude); // Load bytecode load_bytecode(reader, qw, ip, fun_data + fun_data_len); - #if MICROPY_EMIT_NATIVE + #if MICROPY_EMIT_MACHINE_CODE } else { // Allocate memory for native data and load it size_t fun_alloc; @@ -371,6 +395,9 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { // Generic 16-bit link dest[0] = qst & 0xff; dest[1] = (qst >> 8) & 0xff; + } else if ((off & 3) == 3) { + // Generic, aligned qstr-object link + *(mp_obj_t*)dest = MP_OBJ_NEW_QSTR(qst); } else { // Architecture-specific link arch_link_qstr(dest, (off & 3) == 2, qst); @@ -382,7 +409,9 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { // Extract prelude for later use prelude_offset = read_uint(reader, NULL); const byte *ip = fun_data + prelude_offset; - extract_prelude(&ip, (const byte**)&ip2, &prelude); + byte *ip_info = extract_prelude(&ip, &prelude); + // Load qstrs in prelude + load_prelude_qstrs(reader, qw, ip_info); } else { // Load basic scope info for viper and asm prelude.scope_flags = read_uint(reader, NULL); @@ -396,27 +425,31 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { #endif } - if (kind == MP_CODE_BYTECODE || kind == MP_CODE_NATIVE_PY) { - // Load qstrs in prelude - qstr simple_name = load_qstr(reader, qw); - qstr source_file = load_qstr(reader, qw); - ip2[0] = simple_name; ip2[1] = simple_name >> 8; - ip2[2] = source_file; ip2[3] = source_file >> 8; - } - + size_t n_obj = 0; + size_t n_raw_code = 0; mp_uint_t *const_table = NULL; + if (kind != MP_CODE_NATIVE_ASM) { // Load constant table for bytecode, native and viper // Number of entries in constant table - size_t n_obj = read_uint(reader, NULL); - size_t n_raw_code = read_uint(reader, NULL); + n_obj = read_uint(reader, NULL); + n_raw_code = read_uint(reader, NULL); // Allocate constant table size_t n_alloc = prelude.n_pos_args + prelude.n_kwonly_args + n_obj + n_raw_code; + #if MICROPY_EMIT_MACHINE_CODE if (kind != MP_CODE_BYTECODE) { ++n_alloc; // additional entry for mp_fun_table + if (prelude.scope_flags & MP_SCOPE_FLAG_VIPERRODATA) { + ++n_alloc; // additional entry for rodata + } + if (prelude.scope_flags & MP_SCOPE_FLAG_VIPERBSS) { + ++n_alloc; // additional entry for BSS + } } + #endif + const_table = m_new(mp_uint_t, n_alloc); mp_uint_t *ct = const_table; @@ -426,10 +459,25 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { *ct++ = (mp_uint_t)MP_OBJ_NEW_QSTR(load_qstr(reader, qw)); } - #if MICROPY_EMIT_NATIVE + #if MICROPY_EMIT_MACHINE_CODE if (kind != MP_CODE_BYTECODE) { // Populate mp_fun_table entry - *ct++ = (mp_uint_t)(uintptr_t)mp_fun_table; + *ct++ = (mp_uint_t)(uintptr_t)&mp_fun_table; + + // Allocate and load rodata if needed + if (prelude.scope_flags & MP_SCOPE_FLAG_VIPERRODATA) { + size_t size = read_uint(reader, NULL); + uint8_t *rodata = m_new(uint8_t, size); + read_bytes(reader, rodata, size); + *ct++ = (uintptr_t)rodata; + } + + // Allocate BSS if needed + if (prelude.scope_flags & MP_SCOPE_FLAG_VIPERBSS) { + size_t size = read_uint(reader, NULL); + uint8_t *bss = m_new0(uint8_t, size); + *ct++ = (uintptr_t)bss; + } } #endif @@ -445,6 +493,7 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { // Create raw_code and return it mp_raw_code_t *rc = mp_emit_glue_new_raw_code(); if (kind == MP_CODE_BYTECODE) { + // Assign bytecode to raw code object mp_emit_glue_assign_bytecode(rc, fun_data, #if MICROPY_PERSISTENT_CODE_SAVE || MICROPY_DEBUG_PRINTERS fun_data_len, @@ -455,12 +504,20 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader, qstr_window_t *qw) { #endif prelude.scope_flags); - #if MICROPY_EMIT_NATIVE + #if MICROPY_EMIT_MACHINE_CODE } else { + // Relocate and commit code to executable address space + reloc_info_t ri = {reader, const_table}; #if defined(MP_PLAT_COMMIT_EXEC) - fun_data = MP_PLAT_COMMIT_EXEC(fun_data, fun_data_len); + void *opt_ri = (prelude.scope_flags & MP_SCOPE_FLAG_VIPERRELOC) ? &ri : NULL; + fun_data = MP_PLAT_COMMIT_EXEC(fun_data, fun_data_len, opt_ri); + #else + if (prelude.scope_flags & MP_SCOPE_FLAG_VIPERRELOC) { + mp_native_relocate(&ri, fun_data, (uintptr_t)fun_data); + } #endif + // Assign native code to raw code object mp_emit_glue_assign_native(rc, kind, fun_data, fun_data_len, const_table, #if MICROPY_PERSISTENT_CODE_SAVE @@ -484,9 +541,11 @@ mp_raw_code_t *mp_raw_code_load(mp_reader_t *reader) { || read_uint(reader, NULL) > QSTR_WINDOW_SIZE) { mp_raise_ValueError("incompatible .mpy file"); } - if (MPY_FEATURE_DECODE_ARCH(header[2]) != MP_NATIVE_ARCH_NONE - && MPY_FEATURE_DECODE_ARCH(header[2]) != MPY_FEATURE_ARCH) { - mp_raise_ValueError("incompatible .mpy arch"); + if (MPY_FEATURE_DECODE_ARCH(header[2]) != MP_NATIVE_ARCH_NONE) { + byte arch = MPY_FEATURE_DECODE_ARCH(header[2]); + if (!MPY_FEATURE_ARCH_TEST(arch)) { + mp_raise_ValueError("incompatible .mpy arch"); + } } qstr_window_t qw; qw.idx = 0; @@ -593,11 +652,16 @@ STATIC void save_obj(mp_print_t *print, mp_obj_t o) { } } +STATIC void save_prelude_qstrs(mp_print_t *print, qstr_window_t *qw, const byte *ip) { + save_qstr(print, qw, ip[0] | (ip[1] << 8)); // simple_name + save_qstr(print, qw, ip[2] | (ip[3] << 8)); // source_file +} + STATIC void save_bytecode(mp_print_t *print, qstr_window_t *qw, const byte *ip, const byte *ip_top) { while (ip < ip_top) { size_t sz; uint f = mp_opcode_format(ip, &sz, true); - if (f == MP_OPCODE_QSTR) { + if (f == MP_BC_FORMAT_QSTR) { mp_print_bytes(print, ip, 1); qstr qst = ip[1] | (ip[2] << 8); save_qstr(print, qw, qst); @@ -613,19 +677,23 @@ STATIC void save_raw_code(mp_print_t *print, mp_raw_code_t *rc, qstr_window_t *q // Save function kind and data length mp_print_uint(print, (rc->fun_data_len << 2) | (rc->kind - MP_CODE_BYTECODE)); - const byte *ip2; bytecode_prelude_t prelude; if (rc->kind == MP_CODE_BYTECODE) { - // Save prelude + // Extract prelude const byte *ip = rc->fun_data; - extract_prelude(&ip, &ip2, &prelude); - size_t prelude_len = ip - (const byte*)rc->fun_data; - const byte *ip_top = (const byte*)rc->fun_data + rc->fun_data_len; - mp_print_bytes(print, rc->fun_data, prelude_len); + const byte *ip_info = extract_prelude(&ip, &prelude); + + // Save prelude + mp_print_bytes(print, rc->fun_data, ip_info - (const byte*)rc->fun_data); + save_prelude_qstrs(print, qstr_window, ip_info); + ip_info += 4; + mp_print_bytes(print, ip_info, ip - ip_info); // Save bytecode + const byte *ip_top = (const byte*)rc->fun_data + rc->fun_data_len; save_bytecode(print, qstr_window, ip, ip_top); + #if MICROPY_EMIT_MACHINE_CODE } else { // Save native code mp_print_bytes(print, rc->fun_data, rc->fun_data_len); @@ -640,13 +708,16 @@ STATIC void save_raw_code(mp_print_t *print, mp_raw_code_t *rc, qstr_window_t *q } if (rc->kind == MP_CODE_NATIVE_PY) { - // Save prelude size, and extract prelude for later use + // Save prelude size mp_print_uint(print, rc->prelude_offset); + + // Extract prelude and save qstrs in prelude const byte *ip = (const byte*)rc->fun_data + rc->prelude_offset; - extract_prelude(&ip, &ip2, &prelude); + const byte *ip_info = extract_prelude(&ip, &prelude); + save_prelude_qstrs(print, qstr_window, ip_info); } else { // Save basic scope info for viper and asm - mp_print_uint(print, rc->scope_flags); + mp_print_uint(print, rc->scope_flags & MP_SCOPE_FLAG_ALL_SIG); prelude.n_pos_args = 0; prelude.n_kwonly_args = 0; if (rc->kind == MP_CODE_NATIVE_ASM) { @@ -654,12 +725,7 @@ STATIC void save_raw_code(mp_print_t *print, mp_raw_code_t *rc, qstr_window_t *q mp_print_uint(print, rc->type_sig); } } - } - - if (rc->kind == MP_CODE_BYTECODE || rc->kind == MP_CODE_NATIVE_PY) { - // Save qstrs in prelude - save_qstr(print, qstr_window, ip2[0] | (ip2[1] << 8)); // simple_name - save_qstr(print, qstr_window, ip2[2] | (ip2[3] << 8)); // source_file + #endif } if (rc->kind != MP_CODE_NATIVE_ASM) { @@ -699,9 +765,8 @@ STATIC bool mp_raw_code_has_native(mp_raw_code_t *rc) { } const byte *ip = rc->fun_data; - const byte *ip2; bytecode_prelude_t prelude; - extract_prelude(&ip, &ip2, &prelude); + extract_prelude(&ip, &prelude); const mp_uint_t *const_table = rc->const_table + prelude.n_pos_args + prelude.n_kwonly_args @@ -748,7 +813,7 @@ void mp_raw_code_save(mp_raw_code_t *rc, mp_print_t *print) { // here we define mp_raw_code_save_file depending on the port // TODO abstract this away properly -#if defined(__i386__) || defined(__x86_64__) || defined(__unix__) +#if defined(__i386__) || defined(__x86_64__) || defined(_WIN32) || defined(__unix__) #include #include diff --git a/python/src/py/persistentcode.h b/python/src/py/persistentcode.h index 2074c64fe..8769ef584 100644 --- a/python/src/py/persistentcode.h +++ b/python/src/py/persistentcode.h @@ -31,7 +31,63 @@ #include "py/emitglue.h" // The current version of .mpy files -#define MPY_VERSION 4 +#define MPY_VERSION 5 + +// Macros to encode/decode flags to/from the feature byte +#define MPY_FEATURE_ENCODE_FLAGS(flags) (flags) +#define MPY_FEATURE_DECODE_FLAGS(feat) ((feat) & 3) + +// Macros to encode/decode native architecture to/from the feature byte +#define MPY_FEATURE_ENCODE_ARCH(arch) ((arch) << 2) +#define MPY_FEATURE_DECODE_ARCH(feat) ((feat) >> 2) + +// The feature flag bits encode the compile-time config options that +// affect the generate bytecode. +#define MPY_FEATURE_FLAGS ( \ + ((MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) << 0) \ + | ((MICROPY_PY_BUILTINS_STR_UNICODE) << 1) \ + ) +// This is a version of the flags that can be configured at runtime. +#define MPY_FEATURE_FLAGS_DYNAMIC ( \ + ((MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE_DYNAMIC) << 0) \ + | ((MICROPY_PY_BUILTINS_STR_UNICODE_DYNAMIC) << 1) \ + ) + +// Define the host architecture +#if MICROPY_EMIT_X86 + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_X86) +#elif MICROPY_EMIT_X64 + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_X64) +#elif MICROPY_EMIT_THUMB + #if defined(__thumb2__) + #if defined(__ARM_FP) && (__ARM_FP & 8) == 8 + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV7EMDP) + #elif defined(__ARM_FP) && (__ARM_FP & 4) == 4 + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV7EMSP) + #else + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV7EM) + #endif + #else + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV7M) + #endif + #define MPY_FEATURE_ARCH_TEST(x) (MP_NATIVE_ARCH_ARMV6M <= (x) && (x) <= MPY_FEATURE_ARCH) +#elif MICROPY_EMIT_ARM + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_ARMV6) +#elif MICROPY_EMIT_XTENSA + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_XTENSA) +#elif MICROPY_EMIT_XTENSAWIN + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_XTENSAWIN) +#else + #define MPY_FEATURE_ARCH (MP_NATIVE_ARCH_NONE) +#endif + +#ifndef MPY_FEATURE_ARCH_TEST +#define MPY_FEATURE_ARCH_TEST(x) ((x) == MPY_FEATURE_ARCH) +#endif + +// 16-bit little-endian integer with the second and third bytes of supported .mpy files +#define MPY_FILE_HEADER_INT (MPY_VERSION \ + | (MPY_FEATURE_ENCODE_FLAGS(MPY_FEATURE_FLAGS) | MPY_FEATURE_ENCODE_ARCH(MPY_FEATURE_ARCH)) << 8) enum { MP_NATIVE_ARCH_NONE = 0, @@ -44,6 +100,7 @@ enum { MP_NATIVE_ARCH_ARMV7EMSP, MP_NATIVE_ARCH_ARMV7EMDP, MP_NATIVE_ARCH_XTENSA, + MP_NATIVE_ARCH_XTENSAWIN, }; mp_raw_code_t *mp_raw_code_load(mp_reader_t *reader); @@ -53,4 +110,6 @@ mp_raw_code_t *mp_raw_code_load_file(const char *filename); void mp_raw_code_save(mp_raw_code_t *rc, mp_print_t *print); void mp_raw_code_save_file(mp_raw_code_t *rc, const char *filename); +void mp_native_relocate(void *reloc, uint8_t *text, uintptr_t reloc_text); + #endif // MICROPY_INCLUDED_PY_PERSISTENTCODE_H diff --git a/python/src/py/profile.c b/python/src/py/profile.c new file mode 100644 index 000000000..72726cdf5 --- /dev/null +++ b/python/src/py/profile.c @@ -0,0 +1,984 @@ +/* + * This file is part of the MicroPython project, http://micropython.org/ + * + * The MIT License (MIT) + * + * Copyright (c) SatoshiLabs + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#include "py/profile.h" +#include "py/bc0.h" +#include "py/gc.h" + +#if MICROPY_PY_SYS_SETTRACE + +#define prof_trace_cb MP_STATE_THREAD(prof_trace_callback) + +STATIC uint mp_prof_bytecode_lineno(const mp_raw_code_t *rc, size_t bc) { + const mp_bytecode_prelude_t *prelude = &rc->prelude; + return mp_bytecode_get_source_line(prelude->line_info, bc); +} + +void mp_prof_extract_prelude(const byte *bytecode, mp_bytecode_prelude_t *prelude) { + const byte *ip = bytecode; + + MP_BC_PRELUDE_SIG_DECODE(ip); + prelude->n_state = n_state; + prelude->n_exc_stack = n_exc_stack; + prelude->scope_flags = scope_flags; + prelude->n_pos_args = n_pos_args; + prelude->n_kwonly_args = n_kwonly_args; + prelude->n_def_pos_args = n_def_pos_args; + + MP_BC_PRELUDE_SIZE_DECODE(ip); + + prelude->line_info = ip + 4; + prelude->opcodes = ip + n_info + n_cell; + + qstr block_name = ip[0] | (ip[1] << 8); + qstr source_file = ip[2] | (ip[3] << 8); + prelude->qstr_block_name = block_name; + prelude->qstr_source_file = source_file; +} + +/******************************************************************************/ +// code object + +STATIC void code_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_t kind) { + (void)kind; + mp_obj_code_t *o = MP_OBJ_TO_PTR(o_in); + const mp_raw_code_t *rc = o->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + mp_printf(print, + "", + prelude->qstr_block_name, + o, + prelude->qstr_source_file, + rc->line_of_definition + ); +} + +STATIC mp_obj_tuple_t* code_consts(const mp_raw_code_t *rc) { + const mp_bytecode_prelude_t *prelude = &rc->prelude; + int start = prelude->n_pos_args + prelude->n_kwonly_args + rc->n_obj; + int stop = prelude->n_pos_args + prelude->n_kwonly_args + rc->n_obj + rc->n_raw_code; + mp_obj_tuple_t *consts = MP_OBJ_TO_PTR(mp_obj_new_tuple(stop - start + 1, NULL)); + + size_t const_no = 0; + for (int i = start; i < stop; ++i) { + mp_obj_t code = mp_obj_new_code((const mp_raw_code_t*)MP_OBJ_TO_PTR(rc->const_table[i])); + if (code == MP_OBJ_NULL) { + m_malloc_fail(sizeof(mp_obj_code_t)); + } + consts->items[const_no++] = code; + } + consts->items[const_no++] = mp_const_none; + + return consts; +} + +STATIC mp_obj_t raw_code_lnotab(const mp_raw_code_t *rc) { + // const mp_bytecode_prelude_t *prelude = &rc->prelude; + uint start = 0; + uint stop = rc->fun_data_len - start; + + uint last_lineno = mp_prof_bytecode_lineno(rc, start); + uint lasti = 0; + + const uint buffer_chunk_size = (stop-start) >> 2; // heuristic magic + uint buffer_size = buffer_chunk_size; + byte *buffer = m_new(byte, buffer_size); + uint buffer_index = 0; + + for (uint i = start; i < stop; ++i) { + uint lineno = mp_prof_bytecode_lineno(rc, i); + size_t line_diff = lineno - last_lineno; + if (line_diff > 0) { + uint instr_diff = (i - start) - lasti; + + assert(instr_diff < 256); + assert(line_diff < 256); + + if (buffer_index + 2 > buffer_size) { + buffer = m_renew(byte, buffer, buffer_size, buffer_size + buffer_chunk_size); + buffer_size = buffer_size + buffer_chunk_size; + } + last_lineno = lineno; + lasti = i - start; + buffer[buffer_index++] = instr_diff; + buffer[buffer_index++] = line_diff; + } + } + + mp_obj_t o = mp_obj_new_bytes(buffer, buffer_index); + m_del(byte, buffer, buffer_size); + return o; +} + +STATIC void code_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) { + if (dest[0] != MP_OBJ_NULL) { + // not load attribute + return; + } + mp_obj_code_t *o = MP_OBJ_TO_PTR(self_in); + const mp_raw_code_t *rc = o->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + switch(attr) { + case MP_QSTR_co_code: + dest[0] = mp_obj_new_bytes( + (void*)prelude->opcodes, + rc->fun_data_len - (prelude->opcodes - (const byte*)rc->fun_data) + ); + break; + case MP_QSTR_co_consts: + dest[0] = MP_OBJ_FROM_PTR(code_consts(rc)); + break; + case MP_QSTR_co_filename: + dest[0] = MP_OBJ_NEW_QSTR(prelude->qstr_source_file); + break; + case MP_QSTR_co_firstlineno: + dest[0] = MP_OBJ_NEW_SMALL_INT(mp_prof_bytecode_lineno(rc, 0)); + break; + case MP_QSTR_co_name: + dest[0] = MP_OBJ_NEW_QSTR(prelude->qstr_block_name); + break; + case MP_QSTR_co_names: + dest[0] = MP_OBJ_FROM_PTR(o->dict_locals); + break; + case MP_QSTR_co_lnotab: + if (!o->lnotab) { + o->lnotab = raw_code_lnotab(rc); + } + dest[0] = o->lnotab; + break; + } +} + +const mp_obj_type_t mp_type_code = { + { &mp_type_type }, + .name = MP_QSTR_code, + .print = code_print, + .unary_op = mp_generic_unary_op, + .attr = code_attr, +}; + +mp_obj_t mp_obj_new_code(const mp_raw_code_t *rc) { + mp_obj_code_t *o = m_new_obj_maybe(mp_obj_code_t); + if (o == NULL) { + return MP_OBJ_NULL; + } + o->base.type = &mp_type_code; + o->rc = rc; + o->dict_locals = mp_locals_get(); // this is a wrong! how to do this properly? + o->lnotab = MP_OBJ_NULL; + return MP_OBJ_FROM_PTR(o); +} + +/******************************************************************************/ +// frame object + +STATIC void frame_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_t kind) { + (void)kind; + mp_obj_frame_t *frame = MP_OBJ_TO_PTR(o_in); + mp_obj_code_t *code = frame->code; + const mp_raw_code_t *rc = code->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + mp_printf(print, + "", + frame, + prelude->qstr_source_file, + frame->lineno, + prelude->qstr_block_name + ); +} + +STATIC void frame_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) { + if (dest[0] != MP_OBJ_NULL) { + // not load attribute + return; + } + + mp_obj_frame_t *o = MP_OBJ_TO_PTR(self_in); + + switch(attr) { + case MP_QSTR_f_back: + dest[0] = mp_const_none; + if (o->code_state->prev_state) { + dest[0] = MP_OBJ_FROM_PTR(o->code_state->prev_state->frame); + } + break; + case MP_QSTR_f_code: + dest[0] = MP_OBJ_FROM_PTR(o->code); + break; + case MP_QSTR_f_globals: + dest[0] = MP_OBJ_FROM_PTR(o->code_state->fun_bc->globals); + break; + case MP_QSTR_f_lasti: + dest[0] = MP_OBJ_NEW_SMALL_INT(o->lasti); + break; + case MP_QSTR_f_lineno: + dest[0] = MP_OBJ_NEW_SMALL_INT(o->lineno); + break; + } +} + +const mp_obj_type_t mp_type_frame = { + { &mp_type_type }, + .name = MP_QSTR_frame, + .print = frame_print, + .unary_op = mp_generic_unary_op, + .attr = frame_attr, +}; + +mp_obj_t mp_obj_new_frame(const mp_code_state_t *code_state) { + if (gc_is_locked()) { + return MP_OBJ_NULL; + } + + mp_obj_frame_t *o = m_new_obj_maybe(mp_obj_frame_t); + if (o == NULL) { + return MP_OBJ_NULL; + } + + mp_obj_code_t *code = o->code = MP_OBJ_TO_PTR(mp_obj_new_code(code_state->fun_bc->rc)); + if (code == NULL) { + return MP_OBJ_NULL; + } + + const mp_raw_code_t *rc = code->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + o->code_state = code_state; + o->base.type = &mp_type_frame; + o->back = NULL; + o->code = code; + o->lasti = code_state->ip - prelude->opcodes; + o->lineno = mp_prof_bytecode_lineno(rc, o->lasti); + o->trace_opcodes = false; + o->callback = MP_OBJ_NULL; + + return MP_OBJ_FROM_PTR(o); +} + + +/******************************************************************************/ +// Trace logic + +typedef struct { + struct _mp_obj_frame_t * frame; + mp_obj_t event; + mp_obj_t arg; +} prof_callback_args_t; + +STATIC mp_obj_t mp_prof_callback_invoke(mp_obj_t callback, prof_callback_args_t *args) { + assert(mp_obj_is_callable(callback)); + + mp_prof_is_executing = true; + + mp_obj_t a[3] = {MP_OBJ_FROM_PTR(args->frame), args->event, args->arg}; + mp_obj_t top = mp_call_function_n_kw(callback, 3, 0, a); + + mp_prof_is_executing = false; + + if (MP_STATE_VM(mp_pending_exception) != MP_OBJ_NULL) { + mp_obj_t obj = MP_STATE_VM(mp_pending_exception); + MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL; + nlr_raise(obj); + } + return top; +} + +mp_obj_t mp_prof_settrace(mp_obj_t callback) { + if (mp_obj_is_callable(callback)) { + prof_trace_cb = callback; + } else { + prof_trace_cb = MP_OBJ_NULL; + } + return mp_const_none; +} + +mp_obj_t mp_prof_frame_enter(mp_code_state_t *code_state) { + assert(!mp_prof_is_executing); + + mp_obj_frame_t *frame = MP_OBJ_TO_PTR(mp_obj_new_frame(code_state)); + if (frame == NULL) { + // Couldn't allocate a frame object + return MP_OBJ_NULL; + } + + if (code_state->prev_state && code_state->frame == NULL) { + // We are entering not-yet-traced frame + // which means it's a CALL event (not a GENERATOR) + // so set the function definition line. + const mp_raw_code_t *rc = code_state->fun_bc->rc; + frame->lineno = rc->line_of_definition; + if (!rc->line_of_definition) { + frame->lineno = mp_prof_bytecode_lineno(rc, 0); + } + } + code_state->frame = frame; + + if (!prof_trace_cb) { + return MP_OBJ_NULL; + } + + mp_obj_t top; + prof_callback_args_t _args, *args=&_args; + args->frame = code_state->frame; + + // SETTRACE event CALL + args->event = MP_OBJ_NEW_QSTR(MP_QSTR_call); + args->arg = mp_const_none; + top = mp_prof_callback_invoke(prof_trace_cb, args); + + code_state->frame->callback = mp_obj_is_callable(top) ? top : MP_OBJ_NULL; + + // Invalidate the last executed line number so the LINE trace can trigger after this CALL. + frame->lineno = 0; + + return top; +} + +mp_obj_t mp_prof_frame_update(const mp_code_state_t *code_state) { + mp_obj_frame_t *frame = code_state->frame; + if (frame == NULL) { + // Frame was not allocated (eg because there was no memory available) + return MP_OBJ_NULL; + } + + mp_obj_frame_t *o = frame; + mp_obj_code_t *code = o->code; + const mp_raw_code_t *rc = code->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + + assert(o->code_state == code_state); + + o->lasti = code_state->ip - prelude->opcodes; + o->lineno = mp_prof_bytecode_lineno(rc, o->lasti); + + return MP_OBJ_FROM_PTR(o); +} + +mp_obj_t mp_prof_instr_tick(mp_code_state_t *code_state, bool is_exception) { + // Detect execution recursion + assert(!mp_prof_is_executing); + assert(code_state->frame); + assert(mp_obj_get_type(code_state->frame) == &mp_type_frame); + + // Detect data recursion + assert(code_state != code_state->prev_state); + + mp_obj_t top = mp_const_none; + mp_obj_t callback = code_state->frame->callback; + + prof_callback_args_t _args, *args=&_args; + args->frame = code_state->frame; + args->event = mp_const_none; + args->arg = mp_const_none; + + // Call event's are handled inside mp_prof_frame_enter + + // SETTRACE event EXCEPTION + if (is_exception) { + args->event = MP_OBJ_NEW_QSTR(MP_QSTR_exception); + top = mp_prof_callback_invoke(callback, args); + return top; + } + + // SETTRACE event LINE + const mp_raw_code_t *rc = code_state->fun_bc->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + size_t prev_line_no = args->frame->lineno; + size_t current_line_no = mp_prof_bytecode_lineno(rc, code_state->ip - prelude->opcodes); + if (prev_line_no != current_line_no) { + args->frame->lineno = current_line_no; + args->event = MP_OBJ_NEW_QSTR(MP_QSTR_line); + top = mp_prof_callback_invoke(callback, args); + } + + // SETTRACE event RETURN + const byte *ip = code_state->ip; + if (*ip == MP_BC_RETURN_VALUE || *ip == MP_BC_YIELD_VALUE) { + args->event = MP_OBJ_NEW_QSTR(MP_QSTR_return); + top = mp_prof_callback_invoke(callback, args); + if (code_state->prev_state && *ip == MP_BC_RETURN_VALUE) { + code_state->frame->callback = MP_OBJ_NULL; + } + } + + // SETTRACE event OPCODE + // TODO: frame.f_trace_opcodes=True + if (false) { + args->event = MP_OBJ_NEW_QSTR(MP_QSTR_opcode); + } + + return top; +} + +/******************************************************************************/ +// DEBUG + +// This section is for debugging the settrace feature itself, and is not intended +// to be included in production/release builds. The code structure for this block +// was taken from py/showbc.c and should not be used as a reference. To enable +// this debug feature enable MICROPY_PROF_INSTR_DEBUG_PRINT_ENABLE in py/profile.h. +#if MICROPY_PROF_INSTR_DEBUG_PRINT_ENABLE + +#include "runtime0.h" + +#define DECODE_UINT { \ + unum = 0; \ + do { \ + unum = (unum << 7) + (*ip & 0x7f); \ + } while ((*ip++ & 0x80) != 0); \ +} +#define DECODE_ULABEL do { unum = (ip[0] | (ip[1] << 8)); ip += 2; } while (0) +#define DECODE_SLABEL do { unum = (ip[0] | (ip[1] << 8)) - 0x8000; ip += 2; } while (0) + +#define DECODE_QSTR \ + qst = ip[0] | ip[1] << 8; \ + ip += 2; +#define DECODE_PTR \ + DECODE_UINT; \ + ptr = (const byte*)const_table[unum] +#define DECODE_OBJ \ + DECODE_UINT; \ + obj = (mp_obj_t)const_table[unum] + +typedef struct _mp_dis_instruction_t { + mp_uint_t qstr_opname; + mp_uint_t arg; + mp_obj_t argobj; + mp_obj_t argobjex_cache; +} mp_dis_instruction_t; + +STATIC const byte *mp_prof_opcode_decode(const byte *ip, const mp_uint_t *const_table, mp_dis_instruction_t *instruction) { + mp_uint_t unum; + const byte* ptr; + mp_obj_t obj; + qstr qst; + + instruction->qstr_opname = MP_QSTR_; + instruction->arg = 0; + instruction->argobj= mp_const_none; + instruction->argobjex_cache = mp_const_none; + + switch (*ip++) { + case MP_BC_LOAD_CONST_FALSE: + instruction->qstr_opname = MP_QSTR_LOAD_CONST_FALSE; + break; + + case MP_BC_LOAD_CONST_NONE: + instruction->qstr_opname = MP_QSTR_LOAD_CONST_NONE; + break; + + case MP_BC_LOAD_CONST_TRUE: + instruction->qstr_opname = MP_QSTR_LOAD_CONST_TRUE; + break; + + case MP_BC_LOAD_CONST_SMALL_INT: { + mp_int_t num = 0; + if ((ip[0] & 0x40) != 0) { + // Number is negative + num--; + } + do { + num = (num << 7) | (*ip & 0x7f); + } while ((*ip++ & 0x80) != 0); + instruction->qstr_opname = MP_QSTR_LOAD_CONST_SMALL_INT; + instruction->arg = num; + break; + } + + case MP_BC_LOAD_CONST_STRING: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_CONST_STRING; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_LOAD_CONST_OBJ: + DECODE_OBJ; + instruction->qstr_opname = MP_QSTR_LOAD_CONST_OBJ; + instruction->arg = unum; + instruction->argobj= obj; + break; + + case MP_BC_LOAD_NULL: + instruction->qstr_opname = MP_QSTR_LOAD_NULL; + break; + + case MP_BC_LOAD_FAST_N: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_LOAD_FAST_N; + instruction->arg = unum; + break; + + case MP_BC_LOAD_DEREF: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_LOAD_DEREF; + instruction->arg = unum; + break; + + case MP_BC_LOAD_NAME: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_NAME; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) { + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(*ip++); + } + break; + + case MP_BC_LOAD_GLOBAL: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_GLOBAL; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) { + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(*ip++); + } + break; + + case MP_BC_LOAD_ATTR: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_ATTR; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) { + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(*ip++); + } + break; + + case MP_BC_LOAD_METHOD: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_METHOD; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_LOAD_SUPER_METHOD: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_LOAD_SUPER_METHOD; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_LOAD_BUILD_CLASS: + instruction->qstr_opname = MP_QSTR_LOAD_BUILD_CLASS; + break; + + case MP_BC_LOAD_SUBSCR: + instruction->qstr_opname = MP_QSTR_LOAD_SUBSCR; + break; + + case MP_BC_STORE_FAST_N: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_STORE_FAST_N; + instruction->arg = unum; + break; + + case MP_BC_STORE_DEREF: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_STORE_DEREF; + instruction->arg = unum; + break; + + case MP_BC_STORE_NAME: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_STORE_NAME; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_STORE_GLOBAL: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_STORE_GLOBAL; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_STORE_ATTR: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_STORE_ATTR; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + if (MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE) { + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(*ip++); + } + break; + + case MP_BC_STORE_SUBSCR: + instruction->qstr_opname = MP_QSTR_STORE_SUBSCR; + break; + + case MP_BC_DELETE_FAST: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_DELETE_FAST; + instruction->arg = unum; + break; + + case MP_BC_DELETE_DEREF: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_DELETE_DEREF; + instruction->arg = unum; + break; + + case MP_BC_DELETE_NAME: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_DELETE_NAME; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_DELETE_GLOBAL: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_DELETE_GLOBAL; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_DUP_TOP: + instruction->qstr_opname = MP_QSTR_DUP_TOP; + break; + + case MP_BC_DUP_TOP_TWO: + instruction->qstr_opname = MP_QSTR_DUP_TOP_TWO; + break; + + case MP_BC_POP_TOP: + instruction->qstr_opname = MP_QSTR_POP_TOP; + break; + + case MP_BC_ROT_TWO: + instruction->qstr_opname = MP_QSTR_ROT_TWO; + break; + + case MP_BC_ROT_THREE: + instruction->qstr_opname = MP_QSTR_ROT_THREE; + break; + + case MP_BC_JUMP: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_JUMP; + instruction->arg = unum; + break; + + case MP_BC_POP_JUMP_IF_TRUE: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_POP_JUMP_IF_TRUE; + instruction->arg = unum; + break; + + case MP_BC_POP_JUMP_IF_FALSE: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_POP_JUMP_IF_FALSE; + instruction->arg = unum; + break; + + case MP_BC_JUMP_IF_TRUE_OR_POP: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_JUMP_IF_TRUE_OR_POP; + instruction->arg = unum; + break; + + case MP_BC_JUMP_IF_FALSE_OR_POP: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_JUMP_IF_FALSE_OR_POP; + instruction->arg = unum; + break; + + case MP_BC_SETUP_WITH: + DECODE_ULABEL; // loop-like labels are always forward + instruction->qstr_opname = MP_QSTR_SETUP_WITH; + instruction->arg = unum; + break; + + case MP_BC_WITH_CLEANUP: + instruction->qstr_opname = MP_QSTR_WITH_CLEANUP; + break; + + case MP_BC_UNWIND_JUMP: + DECODE_SLABEL; + instruction->qstr_opname = MP_QSTR_UNWIND_JUMP; + instruction->arg = unum; + break; + + case MP_BC_SETUP_EXCEPT: + DECODE_ULABEL; // except labels are always forward + instruction->qstr_opname = MP_QSTR_SETUP_EXCEPT; + instruction->arg = unum; + break; + + case MP_BC_SETUP_FINALLY: + DECODE_ULABEL; // except labels are always forward + instruction->qstr_opname = MP_QSTR_SETUP_FINALLY; + instruction->arg = unum; + break; + + case MP_BC_END_FINALLY: + // if TOS is an exception, reraises the exception (3 values on TOS) + // if TOS is an integer, does something else + // if TOS is None, just pops it and continues + // else error + instruction->qstr_opname = MP_QSTR_END_FINALLY; + break; + + case MP_BC_GET_ITER: + instruction->qstr_opname = MP_QSTR_GET_ITER; + break; + + case MP_BC_GET_ITER_STACK: + instruction->qstr_opname = MP_QSTR_GET_ITER_STACK; + break; + + case MP_BC_FOR_ITER: + DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward + instruction->qstr_opname = MP_QSTR_FOR_ITER; + instruction->arg = unum; + break; + + case MP_BC_BUILD_TUPLE: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_BUILD_TUPLE; + instruction->arg = unum; + break; + + case MP_BC_BUILD_LIST: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_BUILD_LIST; + instruction->arg = unum; + break; + + case MP_BC_BUILD_MAP: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_BUILD_MAP; + instruction->arg = unum; + break; + + case MP_BC_STORE_MAP: + instruction->qstr_opname = MP_QSTR_STORE_MAP; + break; + + case MP_BC_BUILD_SET: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_BUILD_SET; + instruction->arg = unum; + break; + + #if MICROPY_PY_BUILTINS_SLICE + case MP_BC_BUILD_SLICE: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_BUILD_SLICE; + instruction->arg = unum; + break; + #endif + + case MP_BC_STORE_COMP: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_STORE_COMP; + instruction->arg = unum; + break; + + case MP_BC_UNPACK_SEQUENCE: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_UNPACK_SEQUENCE; + instruction->arg = unum; + break; + + case MP_BC_UNPACK_EX: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_UNPACK_EX; + instruction->arg = unum; + break; + + case MP_BC_MAKE_FUNCTION: + DECODE_PTR; + instruction->qstr_opname = MP_QSTR_MAKE_FUNCTION; + instruction->arg = unum; + instruction->argobj= mp_obj_new_int_from_ull((uint64_t)ptr); + break; + + case MP_BC_MAKE_FUNCTION_DEFARGS: + DECODE_PTR; + instruction->qstr_opname = MP_QSTR_MAKE_FUNCTION_DEFARGS; + instruction->arg = unum; + instruction->argobj= mp_obj_new_int_from_ull((uint64_t)ptr); + break; + + case MP_BC_MAKE_CLOSURE: { + DECODE_PTR; + mp_uint_t n_closed_over = *ip++; + instruction->qstr_opname = MP_QSTR_MAKE_CLOSURE; + instruction->arg = unum; + instruction->argobj= mp_obj_new_int_from_ull((uint64_t)ptr); + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(n_closed_over); + break; + } + + case MP_BC_MAKE_CLOSURE_DEFARGS: { + DECODE_PTR; + mp_uint_t n_closed_over = *ip++; + instruction->qstr_opname = MP_QSTR_MAKE_CLOSURE_DEFARGS; + instruction->arg = unum; + instruction->argobj= mp_obj_new_int_from_ull((uint64_t)ptr); + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT(n_closed_over); + break; + } + + case MP_BC_CALL_FUNCTION: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_CALL_FUNCTION; + instruction->arg = unum & 0xff; + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT((unum >> 8) & 0xff); + break; + + case MP_BC_CALL_FUNCTION_VAR_KW: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_CALL_FUNCTION_VAR_KW; + instruction->arg = unum & 0xff; + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT((unum >> 8) & 0xff); + break; + + case MP_BC_CALL_METHOD: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_CALL_METHOD; + instruction->arg = unum & 0xff; + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT((unum >> 8) & 0xff); + break; + + case MP_BC_CALL_METHOD_VAR_KW: + DECODE_UINT; + instruction->qstr_opname = MP_QSTR_CALL_METHOD_VAR_KW; + instruction->arg = unum & 0xff; + instruction->argobjex_cache = MP_OBJ_NEW_SMALL_INT((unum >> 8) & 0xff); + break; + + case MP_BC_RETURN_VALUE: + instruction->qstr_opname = MP_QSTR_RETURN_VALUE; + break; + + case MP_BC_RAISE_LAST: + instruction->qstr_opname = MP_QSTR_RAISE_LAST; + break; + + case MP_BC_RAISE_OBJ: + instruction->qstr_opname = MP_QSTR_RAISE_OBJ; + break; + + case MP_BC_RAISE_FROM: + instruction->qstr_opname = MP_QSTR_RAISE_FROM; + break; + + case MP_BC_YIELD_VALUE: + instruction->qstr_opname = MP_QSTR_YIELD_VALUE; + break; + + case MP_BC_YIELD_FROM: + instruction->qstr_opname = MP_QSTR_YIELD_FROM; + break; + + case MP_BC_IMPORT_NAME: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_IMPORT_NAME; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_IMPORT_FROM: + DECODE_QSTR; + instruction->qstr_opname = MP_QSTR_IMPORT_FROM; + instruction->arg = qst; + instruction->argobj= MP_OBJ_NEW_QSTR(qst); + break; + + case MP_BC_IMPORT_STAR: + instruction->qstr_opname = MP_QSTR_IMPORT_STAR; + break; + + default: + if (ip[-1] < MP_BC_LOAD_CONST_SMALL_INT_MULTI + 64) { + instruction->qstr_opname = MP_QSTR_LOAD_CONST_SMALL_INT; + instruction->arg = (mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - 16; + } else if (ip[-1] < MP_BC_LOAD_FAST_MULTI + 16) { + instruction->qstr_opname = MP_QSTR_LOAD_FAST; + instruction->arg = (mp_uint_t)ip[-1] - MP_BC_LOAD_FAST_MULTI; + } else if (ip[-1] < MP_BC_STORE_FAST_MULTI + 16) { + instruction->qstr_opname = MP_QSTR_STORE_FAST; + instruction->arg = (mp_uint_t)ip[-1] - MP_BC_STORE_FAST_MULTI; + } else if (ip[-1] < MP_BC_UNARY_OP_MULTI + MP_UNARY_OP_NUM_BYTECODE) { + instruction->qstr_opname = MP_QSTR_UNARY_OP; + instruction->arg = (mp_uint_t)ip[-1] - MP_BC_UNARY_OP_MULTI; + } else if (ip[-1] < MP_BC_BINARY_OP_MULTI + MP_BINARY_OP_NUM_BYTECODE) { + mp_uint_t op = ip[-1] - MP_BC_BINARY_OP_MULTI; + instruction->qstr_opname = MP_QSTR_BINARY_OP; + instruction->arg = op; + } else { + mp_printf(&mp_plat_print, "code %p, opcode 0x%02x not implemented\n", ip-1, ip[-1]); + assert(0); + return ip; + } + break; + } + + return ip; +} + +void mp_prof_print_instr(const byte* ip, mp_code_state_t *code_state) { + mp_dis_instruction_t _instruction, *instruction = &_instruction; + mp_prof_opcode_decode(ip, code_state->fun_bc->rc->const_table, instruction); + const mp_raw_code_t *rc = code_state->fun_bc->rc; + const mp_bytecode_prelude_t *prelude = &rc->prelude; + + mp_uint_t offset = ip - prelude->opcodes; + mp_printf(&mp_plat_print, "instr"); + + /* long path */ if (1) { + mp_printf(&mp_plat_print, + "@0x%p:%q:%q+0x%04x:%d", + ip, + prelude->qstr_source_file, + prelude->qstr_block_name, + offset, + mp_prof_bytecode_lineno(rc, offset) + ); + } + + /* bytecode */ if (0) { + mp_printf(&mp_plat_print, " %02x %02x %02x %02x", ip[0], ip[1], ip[2], ip[3]); + } + + mp_printf(&mp_plat_print, " 0x%02x %q [%d]", *ip, instruction->qstr_opname, instruction->arg); + + if (instruction->argobj != mp_const_none) { + mp_printf(&mp_plat_print, " $"); + mp_obj_print_helper(&mp_plat_print, instruction->argobj, PRINT_REPR); + } + if (instruction->argobjex_cache != mp_const_none) { + mp_printf(&mp_plat_print, " #"); + mp_obj_print_helper(&mp_plat_print, instruction->argobjex_cache, PRINT_REPR); + } + + mp_printf(&mp_plat_print, "\n"); +} + +#endif // MICROPY_PROF_INSTR_DEBUG_PRINT_ENABLE + +#endif // MICROPY_PY_SYS_SETTRACE diff --git a/python/src/py/profile.h b/python/src/py/profile.h new file mode 100644 index 000000000..0293e262f --- /dev/null +++ b/python/src/py/profile.h @@ -0,0 +1,79 @@ +/* + * This file is part of the MicroPython project, http://micropython.org/ + * + * The MIT License (MIT) + * + * Copyright (c) SatoshiLabs + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#ifndef MICROPY_INCLUDED_PY_PROFILING_H +#define MICROPY_INCLUDED_PY_PROFILING_H + +#include "py/emitglue.h" + +#if MICROPY_PY_SYS_SETTRACE + +#define mp_prof_is_executing MP_STATE_THREAD(prof_callback_is_executing) + +typedef struct _mp_obj_code_t { + mp_obj_base_t base; + const mp_raw_code_t *rc; + mp_obj_dict_t *dict_locals; + mp_obj_t lnotab; +} mp_obj_code_t; + +typedef struct _mp_obj_frame_t { + mp_obj_base_t base; + const mp_code_state_t *code_state; + struct _mp_obj_frame_t *back; + mp_obj_t callback; + mp_obj_code_t *code; + mp_uint_t lasti; + mp_uint_t lineno; + bool trace_opcodes; +} mp_obj_frame_t; + +void mp_prof_extract_prelude(const byte *bytecode, mp_bytecode_prelude_t *prelude); + +mp_obj_t mp_obj_new_code(const mp_raw_code_t *rc); +mp_obj_t mp_obj_new_frame(const mp_code_state_t *code_state); + +// This is the implementation for the sys.settrace +mp_obj_t mp_prof_settrace(mp_obj_t callback); + +mp_obj_t mp_prof_frame_enter(mp_code_state_t *code_state); +mp_obj_t mp_prof_frame_update(const mp_code_state_t *code_state); + +// For every VM instruction tick this function deduces events from the state +mp_obj_t mp_prof_instr_tick(mp_code_state_t *code_state, bool is_exception); + +// This section is for debugging the settrace feature itself, and is not intended +// to be included in production/release builds. +#define MICROPY_PROF_INSTR_DEBUG_PRINT_ENABLE 0 +#if MICROPY_PROF_INSTR_DEBUG_PRINT_ENABLE +void mp_prof_print_instr(const byte* ip, mp_code_state_t *code_state); +#define MP_PROF_INSTR_DEBUG_PRINT(current_ip) mp_prof_print_instr((current_ip), code_state) +#else +#define MP_PROF_INSTR_DEBUG_PRINT(current_ip) +#endif + +#endif // MICROPY_PY_SYS_SETTRACE +#endif // MICROPY_INCLUDED_PY_PROFILING_H diff --git a/python/src/py/qstr.h b/python/src/py/qstr.h index f4375ee0e..8153ef29f 100644 --- a/python/src/py/qstr.h +++ b/python/src/py/qstr.h @@ -35,7 +35,7 @@ // Note: it would be possible to define MP_QSTR_xxx as qstr_from_str_static("xxx") // for qstrs that are referenced this way, but you don't want to have them in ROM. -// first entry in enum will be MP_QSTR_NULL=0, which indicates invalid/no qstr +// first entry in enum will be MP_QSTRnull=0, which indicates invalid/no qstr enum { #ifndef NO_QSTR #define QDEF(id, str) id, @@ -61,7 +61,7 @@ typedef struct _qstr_pool_t { void qstr_init(void); mp_uint_t qstr_compute_hash(const byte *data, size_t len); -qstr qstr_find_strn(const char *str, size_t str_len); // returns MP_QSTR_NULL if not found +qstr qstr_find_strn(const char *str, size_t str_len); // returns MP_QSTRnull if not found qstr qstr_from_str(const char *str); qstr qstr_from_strn(const char *str, size_t len); diff --git a/python/src/py/repl.c b/python/src/py/repl.c index da0fefb3a..9389b3424 100644 --- a/python/src/py/repl.c +++ b/python/src/py/repl.c @@ -159,7 +159,7 @@ size_t mp_repl_autocomplete(const char *str, size_t len, const mp_print_t *print if (str < top) { // a complete word, lookup in current object qstr q = qstr_find_strn(s_start, s_len); - if (q == MP_QSTR_NULL) { + if (q == MP_QSTRnull) { // lookup will fail return 0; } diff --git a/python/src/py/ringbuf.c b/python/src/py/ringbuf.c new file mode 100644 index 000000000..83887b300 --- /dev/null +++ b/python/src/py/ringbuf.c @@ -0,0 +1,73 @@ +/* + * This file is part of the MicroPython project, http://micropython.org/ + * + * The MIT License (MIT) + * + * Copyright (c) 2019 Jim Mussared + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ +#include "ringbuf.h" + +int ringbuf_get16(ringbuf_t *r) { + int v = ringbuf_peek16(r); + if (v == -1) { + return v; + } + r->iget += 2; + if (r->iget >= r->size) { + r->iget -= r->size; + } + return v; +} + +int ringbuf_peek16(ringbuf_t *r) { + if (r->iget == r->iput) { + return -1; + } + uint32_t iget_a = r->iget + 1; + if (iget_a == r->size) { + iget_a = 0; + } + if (iget_a == r->iput) { + return -1; + } + return (r->buf[r->iget] << 8) | (r->buf[iget_a]); +} + +int ringbuf_put16(ringbuf_t *r, uint16_t v) { + uint32_t iput_a = r->iput + 1; + if (iput_a == r->size) { + iput_a = 0; + } + if (iput_a == r->iget) { + return -1; + } + uint32_t iput_b = iput_a + 1; + if (iput_b == r->size) { + iput_b = 0; + } + if (iput_b == r->iget) { + return -1; + } + r->buf[r->iput] = (v >> 8) & 0xff; + r->buf[iput_a] = v & 0xff; + r->iput = iput_b; + return 0; +} diff --git a/python/src/py/ringbuf.h b/python/src/py/ringbuf.h index b41692706..8d4ed1643 100644 --- a/python/src/py/ringbuf.h +++ b/python/src/py/ringbuf.h @@ -26,6 +26,9 @@ #ifndef MICROPY_INCLUDED_PY_RINGBUF_H #define MICROPY_INCLUDED_PY_RINGBUF_H +#include +#include + typedef struct _ringbuf_t { uint8_t *buf; uint16_t size; @@ -37,7 +40,7 @@ typedef struct _ringbuf_t { // byte buf_array[N]; // ringbuf_t buf = {buf_array, sizeof(buf_array)}; -// Dynamic initialization. This creates root pointer! +// Dynamic initialization. This needs to become findable as a root pointer! #define ringbuf_alloc(r, sz) \ { \ (r)->buf = m_new(uint8_t, sz); \ @@ -69,4 +72,17 @@ static inline int ringbuf_put(ringbuf_t *r, uint8_t v) { return 0; } +static inline size_t ringbuf_free(ringbuf_t *r) { + return (r->size + r->iget - r->iput - 1) % r->size; +} + +static inline size_t ringbuf_avail(ringbuf_t *r) { + return (r->size + r->iput - r->iget) % r->size; +} + +// Note: big-endian. No-op if not enough room available for both bytes. +int ringbuf_get16(ringbuf_t *r); +int ringbuf_peek16(ringbuf_t *r); +int ringbuf_put16(ringbuf_t *r, uint16_t v); + #endif // MICROPY_INCLUDED_PY_RINGBUF_H diff --git a/python/src/py/runtime.c b/python/src/py/runtime.c index e50256605..deb82e935 100644 --- a/python/src/py/runtime.c +++ b/python/src/py/runtime.c @@ -88,6 +88,9 @@ void mp_init(void) { #if MICROPY_ENABLE_COMPILER // optimization disabled by default MP_STATE_VM(mp_optimise_value) = 0; + #if MICROPY_EMIT_NATIVE + MP_STATE_VM(default_emit_opt) = MP_EMIT_OPT_NONE; + #endif #endif // init global module dict @@ -118,6 +121,20 @@ void mp_init(void) { MP_STATE_VM(vfs_mount_table) = NULL; #endif + #if MICROPY_PY_SYS_ATEXIT + MP_STATE_VM(sys_exitfunc) = mp_const_none; + #endif + + #if MICROPY_PY_SYS_SETTRACE + MP_STATE_THREAD(prof_trace_callback) = MP_OBJ_NULL; + MP_STATE_THREAD(prof_callback_is_executing) = false; + MP_STATE_THREAD(current_code_state) = NULL; + #endif + + #if MICROPY_PY_BLUETOOTH + MP_STATE_VM(bluetooth) = MP_OBJ_NULL; + #endif + #if MICROPY_PY_THREAD_GIL mp_thread_mutex_init(&MP_STATE_VM(gil_mutex)); #endif @@ -553,16 +570,17 @@ generic_binary_op: } #if MICROPY_PY_REVERSE_SPECIAL_METHODS - if (op >= MP_BINARY_OP_OR && op <= MP_BINARY_OP_REVERSE_POWER) { + if (op >= MP_BINARY_OP_OR && op <= MP_BINARY_OP_POWER) { mp_obj_t t = rhs; rhs = lhs; lhs = t; - if (op <= MP_BINARY_OP_POWER) { - op += MP_BINARY_OP_REVERSE_OR - MP_BINARY_OP_OR; - goto generic_binary_op; - } - + op += MP_BINARY_OP_REVERSE_OR - MP_BINARY_OP_OR; + goto generic_binary_op; + } else if (op >= MP_BINARY_OP_REVERSE_OR) { // Convert __rop__ back to __op__ for error message + mp_obj_t t = rhs; + rhs = lhs; + lhs = t; op -= MP_BINARY_OP_REVERSE_OR - MP_BINARY_OP_OR; } #endif @@ -1302,7 +1320,12 @@ mp_vm_return_kind_t mp_resume(mp_obj_t self_in, mp_obj_t send_value, mp_obj_t th // will be propagated up. This behavior is approved by test_pep380.py // test_delegation_of_close_to_non_generator(), // test_delegating_throw_to_non_generator() - *ret_val = mp_make_raise_obj(throw_value); + if (mp_obj_exception_match(throw_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))) { + // PEP479: if StopIteration is raised inside a generator it is replaced with RuntimeError + *ret_val = mp_obj_new_exception_msg(&mp_type_RuntimeError, "generator raised StopIteration"); + } else { + *ret_val = mp_make_raise_obj(throw_value); + } return MP_VM_RETURN_EXCEPTION; } } @@ -1335,7 +1358,17 @@ mp_obj_t mp_import_name(qstr name, mp_obj_t fromlist, mp_obj_t level) { args[3] = fromlist; args[4] = level; - // TODO lookup __import__ and call that instead of going straight to builtin implementation + #if MICROPY_CAN_OVERRIDE_BUILTINS + // Lookup __import__ and call that if it exists + mp_obj_dict_t *bo_dict = MP_STATE_VM(mp_module_builtins_override_dict); + if (bo_dict != NULL) { + mp_map_elem_t *import = mp_map_lookup(&bo_dict->map, MP_OBJ_NEW_QSTR(MP_QSTR___import__), MP_MAP_LOOKUP); + if (import != NULL) { + return mp_call_function_n_kw(import->value, 5, 0, args); + } + } + #endif + return mp_builtin___import__(5, args); } @@ -1407,7 +1440,6 @@ void mp_import_all(mp_obj_t module) { #if MICROPY_ENABLE_COMPILER -// this is implemented in this file so it can optimise access to locals/globals mp_obj_t mp_parse_compile_execute(mp_lexer_t *lex, mp_parse_input_kind_t parse_input_kind, mp_obj_dict_t *globals, mp_obj_dict_t *locals) { // save context mp_obj_dict_t *volatile old_globals = mp_globals_get(); @@ -1421,7 +1453,7 @@ mp_obj_t mp_parse_compile_execute(mp_lexer_t *lex, mp_parse_input_kind_t parse_i if (nlr_push(&nlr) == 0) { qstr source_name = lex->source_name; mp_parse_tree_t parse_tree = mp_parse(lex, parse_input_kind); - mp_obj_t module_fun = mp_compile(&parse_tree, source_name, MP_EMIT_OPT_NONE, false); + mp_obj_t module_fun = mp_compile(&parse_tree, source_name, false); mp_obj_t ret; if (MICROPY_PY_BUILTINS_COMPILE && globals == NULL) { diff --git a/python/src/py/runtime.h b/python/src/py/runtime.h index 0eb15d461..b54b17b68 100644 --- a/python/src/py/runtime.h +++ b/python/src/py/runtime.h @@ -151,7 +151,6 @@ mp_obj_t mp_import_from(mp_obj_t module, qstr name); void mp_import_all(mp_obj_t module); NORETURN void mp_raise_msg(const mp_obj_type_t *exc_type, const char *msg); -//NORETURN void nlr_raise_msg_varg(const mp_obj_type_t *exc_type, const char *fmt, ...); NORETURN void mp_raise_ValueError(const char *msg); NORETURN void mp_raise_TypeError(const char *msg); NORETURN void mp_raise_NotImplementedError(const char *msg); @@ -172,9 +171,6 @@ NORETURN void mp_raise_recursion_depth(void); int mp_native_type_from_qstr(qstr qst); mp_uint_t mp_native_from_obj(mp_obj_t obj, mp_uint_t type); mp_obj_t mp_native_to_obj(mp_uint_t val, mp_uint_t type); -mp_obj_dict_t *mp_native_swap_globals(mp_obj_dict_t *new_globals); -mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, size_t n_args_kw, const mp_obj_t *args); -void mp_native_raise(mp_obj_t o); #define mp_sys_path (MP_OBJ_FROM_PTR(&MP_STATE_VM(mp_sys_path_obj))) #define mp_sys_argv (MP_OBJ_FROM_PTR(&MP_STATE_VM(mp_sys_argv_obj))) diff --git a/python/src/py/runtime0.h b/python/src/py/runtime0.h index efd439196..e6eeff97d 100644 --- a/python/src/py/runtime0.h +++ b/python/src/py/runtime0.h @@ -28,13 +28,17 @@ // The first four must fit in 8 bits, see emitbc.c // The remaining must fit in 16 bits, see scope.h -#define MP_SCOPE_FLAG_VARARGS (0x01) +#define MP_SCOPE_FLAG_ALL_SIG (0x0f) +#define MP_SCOPE_FLAG_GENERATOR (0x01) #define MP_SCOPE_FLAG_VARKEYWORDS (0x02) -#define MP_SCOPE_FLAG_GENERATOR (0x04) +#define MP_SCOPE_FLAG_VARARGS (0x04) #define MP_SCOPE_FLAG_DEFKWARGS (0x08) #define MP_SCOPE_FLAG_REFGLOBALS (0x10) // used only if native emitter enabled #define MP_SCOPE_FLAG_HASCONSTS (0x20) // used only if native emitter enabled -#define MP_SCOPE_FLAG_VIPERRET_POS (6) // 3 bits used for viper return type +#define MP_SCOPE_FLAG_VIPERRET_POS (6) // 3 bits used for viper return type, to pass from compiler to native emitter +#define MP_SCOPE_FLAG_VIPERRELOC (0x10) // used only when loading viper from .mpy +#define MP_SCOPE_FLAG_VIPERRODATA (0x20) // used only when loading viper from .mpy +#define MP_SCOPE_FLAG_VIPERBSS (0x40) // used only when loading viper from .mpy // types for native (viper) function signature #define MP_NATIVE_TYPE_OBJ (0x00) @@ -46,6 +50,18 @@ #define MP_NATIVE_TYPE_PTR16 (0x06) #define MP_NATIVE_TYPE_PTR32 (0x07) +// Bytecode and runtime boundaries for unary ops +#define MP_UNARY_OP_NUM_BYTECODE (MP_UNARY_OP_NOT + 1) +#define MP_UNARY_OP_NUM_RUNTIME (MP_UNARY_OP_SIZEOF + 1) + +// Bytecode and runtime boundaries for binary ops +#define MP_BINARY_OP_NUM_BYTECODE (MP_BINARY_OP_POWER + 1) +#if MICROPY_PY_REVERSE_SPECIAL_METHODS +#define MP_BINARY_OP_NUM_RUNTIME (MP_BINARY_OP_REVERSE_POWER + 1) +#else +#define MP_BINARY_OP_NUM_RUNTIME (MP_BINARY_OP_CONTAINS + 1) +#endif + typedef enum { // These ops may appear in the bytecode. Changing this group // in any way requires changing the bytecode version. @@ -55,22 +71,19 @@ typedef enum { MP_UNARY_OP_NOT, // Following ops cannot appear in the bytecode - MP_UNARY_OP_NUM_BYTECODE, - - MP_UNARY_OP_BOOL = MP_UNARY_OP_NUM_BYTECODE, // __bool__ + MP_UNARY_OP_BOOL, // __bool__ MP_UNARY_OP_LEN, // __len__ MP_UNARY_OP_HASH, // __hash__; must return a small int MP_UNARY_OP_ABS, // __abs__ MP_UNARY_OP_INT, // __int__ MP_UNARY_OP_SIZEOF, // for sys.getsizeof() - - MP_UNARY_OP_NUM_RUNTIME, } mp_unary_op_t; -// Note: the first 9+12+12 of these are used in bytecode and changing -// them requires changing the bytecode version. typedef enum { - // 9 relational operations, should return a bool + // The following 9+13+13 ops are used in bytecode and changing + // them requires changing the bytecode version. + + // 9 relational operations, should return a bool; order of first 6 matches corresponding mp_token_kind_t MP_BINARY_OP_LESS, MP_BINARY_OP_MORE, MP_BINARY_OP_EQUAL, @@ -81,7 +94,7 @@ typedef enum { MP_BINARY_OP_IS, MP_BINARY_OP_EXCEPTION_MATCH, - // 12 inplace arithmetic operations + // 13 inplace arithmetic operations; order matches corresponding mp_token_kind_t MP_BINARY_OP_INPLACE_OR, MP_BINARY_OP_INPLACE_XOR, MP_BINARY_OP_INPLACE_AND, @@ -90,12 +103,13 @@ typedef enum { MP_BINARY_OP_INPLACE_ADD, MP_BINARY_OP_INPLACE_SUBTRACT, MP_BINARY_OP_INPLACE_MULTIPLY, + MP_BINARY_OP_INPLACE_MAT_MULTIPLY, MP_BINARY_OP_INPLACE_FLOOR_DIVIDE, MP_BINARY_OP_INPLACE_TRUE_DIVIDE, MP_BINARY_OP_INPLACE_MODULO, MP_BINARY_OP_INPLACE_POWER, - // 12 normal arithmetic operations + // 13 normal arithmetic operations; order matches corresponding mp_token_kind_t MP_BINARY_OP_OR, MP_BINARY_OP_XOR, MP_BINARY_OP_AND, @@ -104,6 +118,7 @@ typedef enum { MP_BINARY_OP_ADD, MP_BINARY_OP_SUBTRACT, MP_BINARY_OP_MULTIPLY, + MP_BINARY_OP_MAT_MULTIPLY, MP_BINARY_OP_FLOOR_DIVIDE, MP_BINARY_OP_TRUE_DIVIDE, MP_BINARY_OP_MODULO, @@ -111,11 +126,18 @@ typedef enum { // Operations below this line don't appear in bytecode, they // just identify special methods. - MP_BINARY_OP_NUM_BYTECODE, - // MP_BINARY_OP_REVERSE_* must follow immediately after MP_BINARY_OP_* -#if MICROPY_PY_REVERSE_SPECIAL_METHODS - MP_BINARY_OP_REVERSE_OR = MP_BINARY_OP_NUM_BYTECODE, + // This is not emitted by the compiler but is supported by the runtime. + // It must follow immediately after MP_BINARY_OP_POWER. + MP_BINARY_OP_DIVMOD, + + // The runtime will convert MP_BINARY_OP_IN to this operator with swapped args. + // A type should implement this containment operator instead of MP_BINARY_OP_IN. + MP_BINARY_OP_CONTAINS, + + // 13 MP_BINARY_OP_REVERSE_* operations must be in the same order as MP_BINARY_OP_*, + // and be the last ones supported by the runtime. + MP_BINARY_OP_REVERSE_OR, MP_BINARY_OP_REVERSE_XOR, MP_BINARY_OP_REVERSE_AND, MP_BINARY_OP_REVERSE_LSHIFT, @@ -123,88 +145,15 @@ typedef enum { MP_BINARY_OP_REVERSE_ADD, MP_BINARY_OP_REVERSE_SUBTRACT, MP_BINARY_OP_REVERSE_MULTIPLY, + MP_BINARY_OP_REVERSE_MAT_MULTIPLY, MP_BINARY_OP_REVERSE_FLOOR_DIVIDE, MP_BINARY_OP_REVERSE_TRUE_DIVIDE, MP_BINARY_OP_REVERSE_MODULO, MP_BINARY_OP_REVERSE_POWER, -#endif - - // This is not emitted by the compiler but is supported by the runtime - MP_BINARY_OP_DIVMOD - #if !MICROPY_PY_REVERSE_SPECIAL_METHODS - = MP_BINARY_OP_NUM_BYTECODE - #endif - , - - // The runtime will convert MP_BINARY_OP_IN to this operator with swapped args. - // A type should implement this containment operator instead of MP_BINARY_OP_IN. - MP_BINARY_OP_CONTAINS, - - MP_BINARY_OP_NUM_RUNTIME, // These 2 are not supported by the runtime and must be synthesised by the emitter MP_BINARY_OP_NOT_IN, MP_BINARY_OP_IS_NOT, } mp_binary_op_t; -typedef enum { - MP_F_CONST_NONE_OBJ = 0, - MP_F_CONST_FALSE_OBJ, - MP_F_CONST_TRUE_OBJ, - MP_F_CONVERT_OBJ_TO_NATIVE, - MP_F_CONVERT_NATIVE_TO_OBJ, - MP_F_NATIVE_SWAP_GLOBALS, - MP_F_LOAD_NAME, - MP_F_LOAD_GLOBAL, - MP_F_LOAD_BUILD_CLASS, - MP_F_LOAD_ATTR, - MP_F_LOAD_METHOD, - MP_F_LOAD_SUPER_METHOD, - MP_F_STORE_NAME, - MP_F_STORE_GLOBAL, - MP_F_STORE_ATTR, - MP_F_OBJ_SUBSCR, - MP_F_OBJ_IS_TRUE, - MP_F_UNARY_OP, - MP_F_BINARY_OP, - MP_F_BUILD_TUPLE, - MP_F_BUILD_LIST, - MP_F_LIST_APPEND, - MP_F_BUILD_MAP, - MP_F_STORE_MAP, -#if MICROPY_PY_BUILTINS_SET - MP_F_STORE_SET, - MP_F_BUILD_SET, -#endif - MP_F_MAKE_FUNCTION_FROM_RAW_CODE, - MP_F_NATIVE_CALL_FUNCTION_N_KW, - MP_F_CALL_METHOD_N_KW, - MP_F_CALL_METHOD_N_KW_VAR, - MP_F_NATIVE_GETITER, - MP_F_NATIVE_ITERNEXT, - MP_F_NLR_PUSH, - MP_F_NLR_POP, - MP_F_NATIVE_RAISE, - MP_F_IMPORT_NAME, - MP_F_IMPORT_FROM, - MP_F_IMPORT_ALL, -#if MICROPY_PY_BUILTINS_SLICE - MP_F_NEW_SLICE, -#endif - MP_F_UNPACK_SEQUENCE, - MP_F_UNPACK_EX, - MP_F_DELETE_NAME, - MP_F_DELETE_GLOBAL, - MP_F_NEW_CELL, - MP_F_MAKE_CLOSURE_FROM_RAW_CODE, - MP_F_ARG_CHECK_NUM_SIG, - MP_F_SETUP_CODE_STATE, - MP_F_SMALL_INT_FLOOR_DIVIDE, - MP_F_SMALL_INT_MODULO, - MP_F_NATIVE_YIELD_FROM, - MP_F_NUMBER_OF, -} mp_fun_kind_t; - -extern const void *const mp_fun_table[MP_F_NUMBER_OF]; - #endif // MICROPY_INCLUDED_PY_RUNTIME0_H diff --git a/python/src/py/scheduler.c b/python/src/py/scheduler.c index 5edff45b6..e7cbb524d 100644 --- a/python/src/py/scheduler.c +++ b/python/src/py/scheduler.c @@ -65,7 +65,7 @@ void mp_handle_pending(void) { void mp_handle_pending_tail(mp_uint_t atomic_state) { MP_STATE_VM(sched_state) = MP_SCHED_LOCKED; if (!mp_sched_empty()) { - mp_sched_item_t item = MP_STATE_VM(sched_stack)[MP_STATE_VM(sched_idx)]; + mp_sched_item_t item = MP_STATE_VM(sched_queue)[MP_STATE_VM(sched_idx)]; MP_STATE_VM(sched_idx) = IDX_MASK(MP_STATE_VM(sched_idx) + 1); --MP_STATE_VM(sched_len); MICROPY_END_ATOMIC_SECTION(atomic_state); @@ -107,11 +107,11 @@ bool mp_sched_schedule(mp_obj_t function, mp_obj_t arg) { MP_STATE_VM(sched_state) = MP_SCHED_PENDING; } uint8_t iput = IDX_MASK(MP_STATE_VM(sched_idx) + MP_STATE_VM(sched_len)++); - MP_STATE_VM(sched_stack)[iput].func = function; - MP_STATE_VM(sched_stack)[iput].arg = arg; + MP_STATE_VM(sched_queue)[iput].func = function; + MP_STATE_VM(sched_queue)[iput].arg = arg; ret = true; } else { - // schedule stack is full + // schedule queue is full ret = false; } MICROPY_END_ATOMIC_SECTION(atomic_state); diff --git a/python/src/py/sequence.c b/python/src/py/sequence.c index c66fde98f..4c19fc69e 100644 --- a/python/src/py/sequence.c +++ b/python/src/py/sequence.c @@ -165,7 +165,7 @@ bool mp_seq_cmp_bytes(mp_uint_t op, const byte *data1, size_t len1, const byte * size_t min_len = len1 < len2 ? len1 : len2; int res = memcmp(data1, data2, min_len); if (op == MP_BINARY_OP_EQUAL) { - // If we are checking for equality, here're the answer + // If we are checking for equality, here's the answer return res == 0; } if (res < 0) { diff --git a/python/src/py/showbc.c b/python/src/py/showbc.c index b9024b716..d154511dc 100644 --- a/python/src/py/showbc.c +++ b/python/src/py/showbc.c @@ -83,17 +83,10 @@ const mp_uint_t *mp_showbc_const_table; void mp_bytecode_print(const void *descr, const byte *ip, mp_uint_t len, const mp_uint_t *const_table) { mp_showbc_code_start = ip; - // get bytecode parameters - mp_uint_t n_state = mp_decode_uint(&ip); - mp_uint_t n_exc_stack = mp_decode_uint(&ip); - /*mp_uint_t scope_flags =*/ ip++; - mp_uint_t n_pos_args = *ip++; - mp_uint_t n_kwonly_args = *ip++; - /*mp_uint_t n_def_pos_args =*/ ip++; - + // Decode prelude + MP_BC_PRELUDE_SIG_DECODE(ip); + MP_BC_PRELUDE_SIZE_DECODE(ip); const byte *code_info = ip; - mp_uint_t code_info_size = mp_decode_uint(&code_info); - ip += code_info_size; #if MICROPY_PERSISTENT_CODE qstr block_name = code_info[0] | (code_info[1] << 8); @@ -107,7 +100,9 @@ void mp_bytecode_print(const void *descr, const byte *ip, mp_uint_t len, const m qstr_str(source_file), qstr_str(block_name), descr, mp_showbc_code_start, len); // raw bytecode dump - printf("Raw bytecode (code_info_size=" UINT_FMT ", bytecode_size=" UINT_FMT "):\n", code_info_size, len - code_info_size); + size_t prelude_size = ip - mp_showbc_code_start + n_info + n_cell; + printf("Raw bytecode (code_info_size=" UINT_FMT ", bytecode_size=" UINT_FMT "):\n", + prelude_size, len - prelude_size); for (mp_uint_t i = 0; i < len; i++) { if (i > 0 && i % 16 == 0) { printf("\n"); @@ -123,24 +118,21 @@ void mp_bytecode_print(const void *descr, const byte *ip, mp_uint_t len, const m } printf("\n"); - printf("(N_STATE " UINT_FMT ")\n", n_state); - printf("(N_EXC_STACK " UINT_FMT ")\n", n_exc_stack); + printf("(N_STATE %u)\n", (unsigned)n_state); + printf("(N_EXC_STACK %u)\n", (unsigned)n_exc_stack); - // for printing line number info - const byte *bytecode_start = ip; + // skip over code_info + ip += n_info; // bytecode prelude: initialise closed over variables - { - uint local_num; - while ((local_num = *ip++) != 255) { - printf("(INIT_CELL %u)\n", local_num); - } - len -= ip - mp_showbc_code_start; + for (size_t i = 0; i < n_cell; ++i) { + uint local_num = *ip++; + printf("(INIT_CELL %u)\n", local_num); } // print out line number info { - mp_int_t bc = bytecode_start - ip; + mp_int_t bc = 0; mp_uint_t source_line = 1; printf(" bc=" INT_FMT " line=" UINT_FMT "\n", bc, source_line); for (const byte* ci = code_info; *ci;) { @@ -158,7 +150,7 @@ void mp_bytecode_print(const void *descr, const byte *ip, mp_uint_t len, const m printf(" bc=" INT_FMT " line=" UINT_FMT "\n", bc, source_line); } } - mp_bytecode_print2(ip, len - 0, const_table); + mp_bytecode_print2(ip, len - prelude_size, const_table); } const byte *mp_bytecode_print_str(const byte *ip) { @@ -500,9 +492,16 @@ const byte *mp_bytecode_print_str(const byte *ip) { printf("RETURN_VALUE"); break; - case MP_BC_RAISE_VARARGS: - unum = *ip++; - printf("RAISE_VARARGS " UINT_FMT, unum); + case MP_BC_RAISE_LAST: + printf("RAISE_LAST"); + break; + + case MP_BC_RAISE_OBJ: + printf("RAISE_OBJ"); + break; + + case MP_BC_RAISE_FROM: + printf("RAISE_FROM"); break; case MP_BC_YIELD_VALUE: @@ -540,7 +539,7 @@ const byte *mp_bytecode_print_str(const byte *ip) { mp_uint_t op = ip[-1] - MP_BC_BINARY_OP_MULTI; printf("BINARY_OP " UINT_FMT " %s", op, qstr_str(mp_binary_op_method_name[op])); } else { - printf("code %p, byte code 0x%02x not implemented\n", ip, ip[-1]); + printf("code %p, byte code 0x%02x not implemented\n", ip - 1, ip[-1]); assert(0); return ip; } diff --git a/python/src/py/stream.h b/python/src/py/stream.h index b6019bb38..3ebd4e042 100644 --- a/python/src/py/stream.h +++ b/python/src/py/stream.h @@ -45,10 +45,11 @@ #define MP_STREAM_GET_FILENO (10) // Get fileno of underlying file // These poll ioctl values are compatible with Linux -#define MP_STREAM_POLL_RD (0x0001) -#define MP_STREAM_POLL_WR (0x0004) -#define MP_STREAM_POLL_ERR (0x0008) -#define MP_STREAM_POLL_HUP (0x0010) +#define MP_STREAM_POLL_RD (0x0001) +#define MP_STREAM_POLL_WR (0x0004) +#define MP_STREAM_POLL_ERR (0x0008) +#define MP_STREAM_POLL_HUP (0x0010) +#define MP_STREAM_POLL_NVAL (0x0020) // Argument structure for MP_STREAM_SEEK struct mp_stream_seek_t { diff --git a/python/src/py/vm.c b/python/src/py/vm.c index 260a7f38b..11dbffaff 100644 --- a/python/src/py/vm.c +++ b/python/src/py/vm.c @@ -34,6 +34,7 @@ #include "py/runtime.h" #include "py/bc0.h" #include "py/bc.h" +#include "py/profile.h" #if 0 #define TRACE(ip) printf("sp=%d ", (int)(sp - &code_state->state[0] + 1)); mp_bytecode_print2(ip, 1, code_state->fun_bc->const_table); @@ -108,6 +109,87 @@ exc_sp--; /* pop back to previous exception handler */ \ CLEAR_SYS_EXC_INFO() /* just clear sys.exc_info(), not compliant, but it shouldn't be used in 1st place */ +#define CANCEL_ACTIVE_FINALLY(sp) do { \ + if (mp_obj_is_small_int(sp[-1])) { \ + /* Stack: (..., prev_dest_ip, prev_cause, dest_ip) */ \ + /* Cancel the unwind through the previous finally, replace with current one */ \ + sp[-2] = sp[0]; \ + sp -= 2; \ + } else { \ + assert(sp[-1] == mp_const_none || mp_obj_is_exception_instance(sp[-1])); \ + /* Stack: (..., None/exception, dest_ip) */ \ + /* Silence the finally's exception value (may be None or an exception) */ \ + sp[-1] = sp[0]; \ + --sp; \ + } \ +} while (0) + +#if MICROPY_PY_SYS_SETTRACE + +#define FRAME_SETUP() do { \ + assert(code_state != code_state->prev_state); \ + MP_STATE_THREAD(current_code_state) = code_state; \ + assert(code_state != code_state->prev_state); \ +} while(0) + +#define FRAME_ENTER() do { \ + assert(code_state != code_state->prev_state); \ + code_state->prev_state = MP_STATE_THREAD(current_code_state); \ + assert(code_state != code_state->prev_state); \ + if (!mp_prof_is_executing) { \ + mp_prof_frame_enter(code_state); \ + } \ +} while(0) + +#define FRAME_LEAVE() do { \ + assert(code_state != code_state->prev_state); \ + MP_STATE_THREAD(current_code_state) = code_state->prev_state; \ + assert(code_state != code_state->prev_state); \ +} while(0) + +#define FRAME_UPDATE() do { \ + assert(MP_STATE_THREAD(current_code_state) == code_state); \ + if (!mp_prof_is_executing) { \ + code_state->frame = MP_OBJ_TO_PTR(mp_prof_frame_update(code_state)); \ + } \ +} while(0) + +#define TRACE_TICK(current_ip, current_sp, is_exception) do { \ + assert(code_state != code_state->prev_state); \ + assert(MP_STATE_THREAD(current_code_state) == code_state); \ + if (!mp_prof_is_executing && code_state->frame && MP_STATE_THREAD(prof_trace_callback)) { \ + MP_PROF_INSTR_DEBUG_PRINT(code_state->ip); \ + } \ + if (!mp_prof_is_executing && code_state->frame && code_state->frame->callback) { \ + mp_prof_instr_tick(code_state, is_exception); \ + } \ +} while(0) + +#else // MICROPY_PY_SYS_SETTRACE +#define FRAME_SETUP() +#define FRAME_ENTER() +#define FRAME_LEAVE() +#define FRAME_UPDATE() +#define TRACE_TICK(current_ip, current_sp, is_exception) +#endif // MICROPY_PY_SYS_SETTRACE + +#if MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE +static inline mp_map_elem_t *mp_map_cached_lookup(mp_map_t *map, qstr qst, uint8_t *idx_cache) { + size_t idx = *idx_cache; + mp_obj_t key = MP_OBJ_NEW_QSTR(qst); + mp_map_elem_t *elem = NULL; + if (idx < map->alloc && map->table[idx].key == key) { + elem = &map->table[idx]; + } else { + elem = mp_map_lookup(map, key, MP_MAP_LOOKUP); + if (elem != NULL) { + *idx_cache = (elem - &map->table[0]) & 0xff; + } + } + return elem; +} +#endif + // fastn has items in reverse order (fastn[0] is local[0], fastn[-1] is local[1], etc) // sp points to bottom of stack which grows up // returns: @@ -128,6 +210,7 @@ mp_vm_return_kind_t mp_execute_bytecode(mp_code_state_t *code_state, volatile mp #define DISPATCH() do { \ TRACE(ip); \ MARK_EXC_IP_GLOBAL(); \ + TRACE_TICK(ip, sp, false); \ goto *entry_table[*ip++]; \ } while (0) #define DISPATCH_WITH_PEND_EXC_CHECK() goto pending_exception_check @@ -149,17 +232,24 @@ mp_vm_return_kind_t mp_execute_bytecode(mp_code_state_t *code_state, volatile mp #if MICROPY_STACKLESS run_code_state: ; #endif +FRAME_ENTER(); + +#if MICROPY_STACKLESS +run_code_state_from_return: ; +#endif +FRAME_SETUP(); + // Pointers which are constant for particular invocation of mp_execute_bytecode() mp_obj_t * /*const*/ fastn; mp_exc_stack_t * /*const*/ exc_stack; { - size_t n_state = mp_decode_uint_value(code_state->fun_bc->bytecode); + size_t n_state = code_state->n_state; fastn = &code_state->state[n_state - 1]; exc_stack = (mp_exc_stack_t*)(code_state->state + n_state); } // variables that are visible to the exception handler (declared volatile) - mp_exc_stack_t *volatile exc_sp = MP_TAGPTR_PTR(code_state->exc_sp); // stack grows up, exc_sp points to top of stack + mp_exc_stack_t *volatile exc_sp = MP_CODE_STATE_EXC_SP_IDX_TO_PTR(exc_stack, code_state->exc_sp_idx); // stack grows up, exc_sp points to top of stack #if MICROPY_PY_THREAD_GIL && MICROPY_PY_THREAD_GIL_VM_DIVISOR // This needs to be volatile and outside the VM loop so it persists across handling @@ -179,7 +269,7 @@ outer_dispatch_loop: MICROPY_VM_HOOK_INIT // If we have exception to inject, now that we finish setting up - // execution context, raise it. This works as if RAISE_VARARGS + // execution context, raise it. This works as if MP_BC_RAISE_OBJ // bytecode was executed. // Injecting exc into yield from generator is a special case, // handled by MP_BC_YIELD_FROM itself @@ -198,6 +288,7 @@ dispatch_loop: #else TRACE(ip); MARK_EXC_IP_GLOBAL(); + TRACE_TICK(ip, sp, false); switch (*ip++) { #endif @@ -274,19 +365,14 @@ dispatch_loop: ENTRY(MP_BC_LOAD_NAME): { MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; - mp_obj_t key = MP_OBJ_NEW_QSTR(qst); - mp_uint_t x = *ip; - if (x < mp_locals_get()->map.alloc && mp_locals_get()->map.table[x].key == key) { - PUSH(mp_locals_get()->map.table[x].value); + mp_map_elem_t *elem = mp_map_cached_lookup(&mp_locals_get()->map, qst, (uint8_t*)ip); + mp_obj_t obj; + if (elem != NULL) { + obj = elem->value; } else { - mp_map_elem_t *elem = mp_map_lookup(&mp_locals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP); - if (elem != NULL) { - *(byte*)ip = (elem - &mp_locals_get()->map.table[0]) & 0xff; - PUSH(elem->value); - } else { - PUSH(mp_load_name(MP_OBJ_QSTR_VALUE(key))); - } + obj = mp_load_name(qst); } + PUSH(obj); ip++; DISPATCH(); } @@ -303,19 +389,14 @@ dispatch_loop: ENTRY(MP_BC_LOAD_GLOBAL): { MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; - mp_obj_t key = MP_OBJ_NEW_QSTR(qst); - mp_uint_t x = *ip; - if (x < mp_globals_get()->map.alloc && mp_globals_get()->map.table[x].key == key) { - PUSH(mp_globals_get()->map.table[x].value); + mp_map_elem_t *elem = mp_map_cached_lookup(&mp_globals_get()->map, qst, (uint8_t*)ip); + mp_obj_t obj; + if (elem != NULL) { + obj = elem->value; } else { - mp_map_elem_t *elem = mp_map_lookup(&mp_globals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP); - if (elem != NULL) { - *(byte*)ip = (elem - &mp_globals_get()->map.table[0]) & 0xff; - PUSH(elem->value); - } else { - PUSH(mp_load_global(MP_OBJ_QSTR_VALUE(key))); - } + obj = mp_load_global(qst); } + PUSH(obj); ip++; DISPATCH(); } @@ -323,6 +404,7 @@ dispatch_loop: #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE ENTRY(MP_BC_LOAD_ATTR): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; SET_TOP(mp_load_attr(TOP(), qst)); @@ -330,30 +412,22 @@ dispatch_loop: } #else ENTRY(MP_BC_LOAD_ATTR): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; mp_obj_t top = TOP(); + mp_map_elem_t *elem = NULL; if (mp_obj_is_instance_type(mp_obj_get_type(top))) { mp_obj_instance_t *self = MP_OBJ_TO_PTR(top); - mp_uint_t x = *ip; - mp_obj_t key = MP_OBJ_NEW_QSTR(qst); - mp_map_elem_t *elem; - if (x < self->members.alloc && self->members.table[x].key == key) { - elem = &self->members.table[x]; - } else { - elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP); - if (elem != NULL) { - *(byte*)ip = elem - &self->members.table[0]; - } else { - goto load_attr_cache_fail; - } - } - SET_TOP(elem->value); - ip++; - DISPATCH(); + elem = mp_map_cached_lookup(&self->members, qst, (uint8_t*)ip); } - load_attr_cache_fail: - SET_TOP(mp_load_attr(top, qst)); + mp_obj_t obj; + if (elem != NULL) { + obj = elem->value; + } else { + obj = mp_load_attr(top, qst); + } + SET_TOP(obj); ip++; DISPATCH(); } @@ -415,6 +489,7 @@ dispatch_loop: #if !MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE ENTRY(MP_BC_STORE_ATTR): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; mp_store_attr(sp[0], qst, sp[-1]); @@ -428,31 +503,20 @@ dispatch_loop: // consequence of this is that we can't use MP_MAP_LOOKUP_ADD_IF_NOT_FOUND // in the fast-path below, because that store could override a property. ENTRY(MP_BC_STORE_ATTR): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; + mp_map_elem_t *elem = NULL; mp_obj_t top = TOP(); if (mp_obj_is_instance_type(mp_obj_get_type(top)) && sp[-1] != MP_OBJ_NULL) { mp_obj_instance_t *self = MP_OBJ_TO_PTR(top); - mp_uint_t x = *ip; - mp_obj_t key = MP_OBJ_NEW_QSTR(qst); - mp_map_elem_t *elem; - if (x < self->members.alloc && self->members.table[x].key == key) { - elem = &self->members.table[x]; - } else { - elem = mp_map_lookup(&self->members, key, MP_MAP_LOOKUP); - if (elem != NULL) { - *(byte*)ip = elem - &self->members.table[0]; - } else { - goto store_attr_cache_fail; - } - } - elem->value = sp[-1]; - sp -= 2; - ip++; - DISPATCH(); + elem = mp_map_cached_lookup(&self->members, qst, (uint8_t*)ip); + } + if (elem != NULL) { + elem->value = sp[-1]; + } else { + mp_store_attr(sp[0], qst, sp[-1]); } - store_attr_cache_fail: - mp_store_attr(sp[0], qst, sp[-1]); sp -= 2; ip++; DISPATCH(); @@ -649,21 +713,28 @@ unwind_jump:; while ((unum & 0x7f) > 0) { unum -= 1; assert(exc_sp >= exc_stack); - if (MP_TAGPTR_TAG1(exc_sp->val_sp) && exc_sp->handler > ip) { - // Getting here the stack looks like: - // (..., X, dest_ip) - // where X is pointed to by exc_sp->val_sp and in the case - // of a "with" block contains the context manager info. - // We're going to run "finally" code as a coroutine - // (not calling it recursively). Set up a sentinel - // on the stack so it can return back to us when it is - // done (when WITH_CLEANUP or END_FINALLY reached). - // The sentinel is the number of exception handlers left to - // unwind, which is a non-negative integer. - PUSH(MP_OBJ_NEW_SMALL_INT(unum)); - ip = exc_sp->handler; // get exception handler byte code address - exc_sp--; // pop exception handler - goto dispatch_loop; // run the exception handler + + if (MP_TAGPTR_TAG1(exc_sp->val_sp)) { + if (exc_sp->handler > ip) { + // Found a finally handler that isn't active; run it. + // Getting here the stack looks like: + // (..., X, dest_ip) + // where X is pointed to by exc_sp->val_sp and in the case + // of a "with" block contains the context manager info. + assert(&sp[-1] == MP_TAGPTR_PTR(exc_sp->val_sp)); + // We're going to run "finally" code as a coroutine + // (not calling it recursively). Set up a sentinel + // on the stack so it can return back to us when it is + // done (when WITH_CLEANUP or END_FINALLY reached). + // The sentinel is the number of exception handlers left to + // unwind, which is a non-negative integer. + PUSH(MP_OBJ_NEW_SMALL_INT(unum)); + ip = exc_sp->handler; + goto dispatch_loop; + } else { + // Found a finally handler that is already active; cancel it. + CANCEL_ACTIVE_FINALLY(sp); + } } POP_EXC_BLOCK(); } @@ -691,9 +762,9 @@ unwind_jump:; // if TOS is None, just pops it and continues // if TOS is an integer, finishes coroutine and returns control to caller // if TOS is an exception, reraises the exception + assert(exc_sp >= exc_stack); + POP_EXC_BLOCK(); if (TOP() == mp_const_none) { - assert(exc_sp >= exc_stack); - POP_EXC_BLOCK(); sp--; } else if (mp_obj_is_small_int(TOP())) { // We finished "finally" coroutine and now dispatch back @@ -738,6 +809,7 @@ unwind_jump:; } ENTRY(MP_BC_FOR_ITER): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_ULABEL; // the jump offset if iteration finishes; for labels are always forward code_state->sp = sp; @@ -753,6 +825,12 @@ unwind_jump:; ip += ulab; // jump to after for-block } else { PUSH(value); // push the next iteration value + #if MICROPY_PY_SYS_SETTRACE + // LINE event should trigger for every iteration so invalidate last trigger + if (code_state->frame) { + code_state->frame->lineno = 0; + } + #endif } DISPATCH(); } @@ -887,6 +965,7 @@ unwind_jump:; } ENTRY(MP_BC_CALL_FUNCTION): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_UINT; // unum & 0xff == n_positional @@ -896,7 +975,7 @@ unwind_jump:; if (mp_obj_get_type(*sp) == &mp_type_fun_bc) { code_state->ip = ip; code_state->sp = sp; - code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, 0); + code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp); mp_code_state_t *new_state = mp_obj_fun_bc_prepare_codestate(*sp, unum & 0xff, (unum >> 8) & 0xff, sp + 1); #if !MICROPY_ENABLE_PYSTACK if (new_state == NULL) { @@ -921,6 +1000,7 @@ unwind_jump:; } ENTRY(MP_BC_CALL_FUNCTION_VAR_KW): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_UINT; // unum & 0xff == n_positional @@ -932,7 +1012,7 @@ unwind_jump:; if (mp_obj_get_type(*sp) == &mp_type_fun_bc) { code_state->ip = ip; code_state->sp = sp; - code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, 0); + code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp); mp_call_args_t out_args; mp_call_prepare_args_n_kw_var(false, unum, sp, &out_args); @@ -966,6 +1046,7 @@ unwind_jump:; } ENTRY(MP_BC_CALL_METHOD): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_UINT; // unum & 0xff == n_positional @@ -975,7 +1056,7 @@ unwind_jump:; if (mp_obj_get_type(*sp) == &mp_type_fun_bc) { code_state->ip = ip; code_state->sp = sp; - code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, 0); + code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp); size_t n_args = unum & 0xff; size_t n_kw = (unum >> 8) & 0xff; @@ -1004,6 +1085,7 @@ unwind_jump:; } ENTRY(MP_BC_CALL_METHOD_VAR_KW): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_UINT; // unum & 0xff == n_positional @@ -1015,7 +1097,7 @@ unwind_jump:; if (mp_obj_get_type(*sp) == &mp_type_fun_bc) { code_state->ip = ip; code_state->sp = sp; - code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, 0); + code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp); mp_call_args_t out_args; mp_call_prepare_args_n_kw_var(true, unum, sp, &out_args); @@ -1053,28 +1135,32 @@ unwind_jump:; unwind_return: // Search for and execute finally handlers that aren't already active while (exc_sp >= exc_stack) { - if (MP_TAGPTR_TAG1(exc_sp->val_sp) && exc_sp->handler > ip) { - // Found a finally handler that isn't active. - // Getting here the stack looks like: - // (..., X, [iter0, iter1, ...,] ret_val) - // where X is pointed to by exc_sp->val_sp and in the case - // of a "with" block contains the context manager info. - // There may be 0 or more for-iterators between X and the - // return value, and these must be removed before control can - // pass to the finally code. We simply copy the ret_value down - // over these iterators, if they exist. If they don't then the - // following is a null operation. - mp_obj_t *finally_sp = MP_TAGPTR_PTR(exc_sp->val_sp); - finally_sp[1] = sp[0]; - sp = &finally_sp[1]; - // We're going to run "finally" code as a coroutine - // (not calling it recursively). Set up a sentinel - // on a stack so it can return back to us when it is - // done (when WITH_CLEANUP or END_FINALLY reached). - PUSH(MP_OBJ_NEW_SMALL_INT(-1)); - ip = exc_sp->handler; - POP_EXC_BLOCK(); - goto dispatch_loop; + if (MP_TAGPTR_TAG1(exc_sp->val_sp)) { + if (exc_sp->handler > ip) { + // Found a finally handler that isn't active; run it. + // Getting here the stack looks like: + // (..., X, [iter0, iter1, ...,] ret_val) + // where X is pointed to by exc_sp->val_sp and in the case + // of a "with" block contains the context manager info. + // There may be 0 or more for-iterators between X and the + // return value, and these must be removed before control can + // pass to the finally code. We simply copy the ret_value down + // over these iterators, if they exist. If they don't then the + // following is a null operation. + mp_obj_t *finally_sp = MP_TAGPTR_PTR(exc_sp->val_sp); + finally_sp[1] = sp[0]; + sp = &finally_sp[1]; + // We're going to run "finally" code as a coroutine + // (not calling it recursively). Set up a sentinel + // on a stack so it can return back to us when it is + // done (when WITH_CLEANUP or END_FINALLY reached). + PUSH(MP_OBJ_NEW_SMALL_INT(-1)); + ip = exc_sp->handler; + goto dispatch_loop; + } else { + // Found a finally handler that is already active; cancel it. + CANCEL_ACTIVE_FINALLY(sp); + } } POP_EXC_BLOCK(); } @@ -1096,37 +1182,39 @@ unwind_return: #endif code_state = new_code_state; *code_state->sp = res; - goto run_code_state; + goto run_code_state_from_return; } #endif + FRAME_LEAVE(); return MP_VM_RETURN_NORMAL; - ENTRY(MP_BC_RAISE_VARARGS): { + ENTRY(MP_BC_RAISE_LAST): { MARK_EXC_IP_SELECTIVE(); - mp_uint_t unum = *ip; - mp_obj_t obj; - if (unum == 2) { - mp_warning(NULL, "exception chaining not supported"); - // ignore (pop) "from" argument - sp--; - } - if (unum == 0) { - // search for the inner-most previous exception, to reraise it - obj = MP_OBJ_NULL; - for (mp_exc_stack_t *e = exc_sp; e >= exc_stack; e--) { - if (e->prev_exc != NULL) { - obj = MP_OBJ_FROM_PTR(e->prev_exc); - break; - } + // search for the inner-most previous exception, to reraise it + mp_obj_t obj = MP_OBJ_NULL; + for (mp_exc_stack_t *e = exc_sp; e >= exc_stack; --e) { + if (e->prev_exc != NULL) { + obj = MP_OBJ_FROM_PTR(e->prev_exc); + break; } - if (obj == MP_OBJ_NULL) { - obj = mp_obj_new_exception_msg(&mp_type_RuntimeError, "no active exception to reraise"); - RAISE(obj); - } - } else { - obj = TOP(); } - obj = mp_make_raise_obj(obj); + if (obj == MP_OBJ_NULL) { + obj = mp_obj_new_exception_msg(&mp_type_RuntimeError, "no active exception to reraise"); + } + RAISE(obj); + } + + ENTRY(MP_BC_RAISE_OBJ): { + MARK_EXC_IP_SELECTIVE(); + mp_obj_t obj = mp_make_raise_obj(TOP()); + RAISE(obj); + } + + ENTRY(MP_BC_RAISE_FROM): { + MARK_EXC_IP_SELECTIVE(); + mp_warning(NULL, "exception chaining not supported"); + sp--; // ignore (pop) "from" argument + mp_obj_t obj = mp_make_raise_obj(TOP()); RAISE(obj); } @@ -1135,7 +1223,8 @@ yield: nlr_pop(); code_state->ip = ip; code_state->sp = sp; - code_state->exc_sp = MP_TAGPTR_MAKE(exc_sp, 0); + code_state->exc_sp_idx = MP_CODE_STATE_EXC_SP_IDX_FROM_PTR(exc_stack, exc_sp); + FRAME_LEAVE(); return MP_VM_RETURN_YIELD; ENTRY(MP_BC_YIELD_FROM): { @@ -1177,21 +1266,15 @@ yield: DISPATCH(); } else { assert(ret_kind == MP_VM_RETURN_EXCEPTION); + assert(!EXC_MATCH(ret_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))); // Pop exhausted gen sp--; - if (EXC_MATCH(ret_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))) { - PUSH(mp_obj_exception_get_value(ret_value)); - // If we injected GeneratorExit downstream, then even - // if it was swallowed, we re-raise GeneratorExit - GENERATOR_EXIT_IF_NEEDED(t_exc); - DISPATCH(); - } else { - RAISE(ret_value); - } + RAISE(ret_value); } } ENTRY(MP_BC_IMPORT_NAME): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; mp_obj_t obj = POP(); @@ -1200,6 +1283,7 @@ yield: } ENTRY(MP_BC_IMPORT_FROM): { + FRAME_UPDATE(); MARK_EXC_IP_SELECTIVE(); DECODE_QSTR; mp_obj_t obj = mp_import_from(TOP(), qst); @@ -1214,7 +1298,7 @@ yield: #if MICROPY_OPT_COMPUTED_GOTO ENTRY(MP_BC_LOAD_CONST_SMALL_INT_MULTI): - PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - 16)); + PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS)); DISPATCH(); ENTRY(MP_BC_LOAD_FAST_MULTI): @@ -1242,19 +1326,19 @@ yield: MARK_EXC_IP_SELECTIVE(); #else ENTRY_DEFAULT: - if (ip[-1] < MP_BC_LOAD_CONST_SMALL_INT_MULTI + 64) { - PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - 16)); + if (ip[-1] < MP_BC_LOAD_CONST_SMALL_INT_MULTI + MP_BC_LOAD_CONST_SMALL_INT_MULTI_NUM) { + PUSH(MP_OBJ_NEW_SMALL_INT((mp_int_t)ip[-1] - MP_BC_LOAD_CONST_SMALL_INT_MULTI - MP_BC_LOAD_CONST_SMALL_INT_MULTI_EXCESS)); DISPATCH(); - } else if (ip[-1] < MP_BC_LOAD_FAST_MULTI + 16) { + } else if (ip[-1] < MP_BC_LOAD_FAST_MULTI + MP_BC_LOAD_FAST_MULTI_NUM) { obj_shared = fastn[MP_BC_LOAD_FAST_MULTI - (mp_int_t)ip[-1]]; goto load_check; - } else if (ip[-1] < MP_BC_STORE_FAST_MULTI + 16) { + } else if (ip[-1] < MP_BC_STORE_FAST_MULTI + MP_BC_STORE_FAST_MULTI_NUM) { fastn[MP_BC_STORE_FAST_MULTI - (mp_int_t)ip[-1]] = POP(); DISPATCH(); - } else if (ip[-1] < MP_BC_UNARY_OP_MULTI + MP_UNARY_OP_NUM_BYTECODE) { + } else if (ip[-1] < MP_BC_UNARY_OP_MULTI + MP_BC_UNARY_OP_MULTI_NUM) { SET_TOP(mp_unary_op(ip[-1] - MP_BC_UNARY_OP_MULTI, TOP())); DISPATCH(); - } else if (ip[-1] < MP_BC_BINARY_OP_MULTI + MP_BINARY_OP_NUM_BYTECODE) { + } else if (ip[-1] < MP_BC_BINARY_OP_MULTI + MP_BC_BINARY_OP_MULTI_NUM) { mp_obj_t rhs = POP(); mp_obj_t lhs = TOP(); SET_TOP(mp_binary_op(ip[-1] - MP_BC_BINARY_OP_MULTI, lhs, rhs)); @@ -1262,9 +1346,11 @@ yield: } else #endif { - mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NotImplementedError, "byte code not implemented"); + + mp_obj_t obj = mp_obj_new_exception_msg(&mp_type_NotImplementedError, "opcode"); nlr_pop(); code_state->state[0] = obj; + FRAME_LEAVE(); return MP_VM_RETURN_EXCEPTION; } @@ -1355,24 +1441,32 @@ exception_handler: } } + #if MICROPY_PY_SYS_SETTRACE + // Exceptions are traced here + if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type), MP_OBJ_FROM_PTR(&mp_type_Exception))) { + TRACE_TICK(code_state->ip, code_state->sp, true /* yes, it's an exception */); + } + #endif + #if MICROPY_STACKLESS unwind_loop: #endif - // set file and line number that the exception occurred at - // TODO: don't set traceback for exceptions re-raised by END_FINALLY. - // But consider how to handle nested exceptions. - if (nlr.ret_val != &mp_const_GeneratorExit_obj) { + // Set traceback info (file and line number) where the exception occurred, but not for: + // - constant GeneratorExit object, because it's const + // - exceptions re-raised by END_FINALLY + // - exceptions re-raised explicitly by "raise" + if (nlr.ret_val != &mp_const_GeneratorExit_obj + && *code_state->ip != MP_BC_END_FINALLY + && *code_state->ip != MP_BC_RAISE_LAST) { const byte *ip = code_state->fun_bc->bytecode; - ip = mp_decode_uint_skip(ip); // skip n_state - ip = mp_decode_uint_skip(ip); // skip n_exc_stack - ip++; // skip scope_params - ip++; // skip n_pos_args - ip++; // skip n_kwonly_args - ip++; // skip n_def_pos_args - size_t bc = code_state->ip - ip; - size_t code_info_size = mp_decode_uint_value(ip); - ip = mp_decode_uint_skip(ip); // skip code_info_size - bc -= code_info_size; + MP_BC_PRELUDE_SIG_DECODE(ip); + MP_BC_PRELUDE_SIZE_DECODE(ip); + const byte *bytecode_start = ip + n_info + n_cell; + #if !MICROPY_PERSISTENT_CODE + // so bytecode is aligned + bytecode_start = MP_ALIGN(bytecode_start, sizeof(mp_uint_t)); + #endif + size_t bc = code_state->ip - bytecode_start; #if MICROPY_PERSISTENT_CODE qstr block_name = ip[0] | (ip[1] << 8); qstr source_file = ip[2] | (ip[3] << 8); @@ -1383,29 +1477,7 @@ unwind_loop: qstr source_file = mp_decode_uint_value(ip); ip = mp_decode_uint_skip(ip); #endif - size_t source_line = 1; - size_t c; - while ((c = *ip)) { - size_t b, l; - if ((c & 0x80) == 0) { - // 0b0LLBBBBB encoding - b = c & 0x1f; - l = c >> 5; - ip += 1; - } else { - // 0b1LLLBBBB 0bLLLLLLLL encoding (l's LSB in second byte) - b = c & 0xf; - l = ((c << 4) & 0x700) | ip[1]; - ip += 2; - } - if (bc >= b) { - bc -= b; - source_line += l; - } else { - // found source line corresponding to bytecode offset - break; - } - } + size_t source_line = mp_bytecode_get_source_line(ip, bc); mp_obj_exception_add_traceback(MP_OBJ_FROM_PTR(nlr.ret_val), source_file, source_line, block_name); } @@ -1444,11 +1516,11 @@ unwind_loop: mp_nonlocal_free(code_state, sizeof(mp_code_state_t)); #endif code_state = new_code_state; - size_t n_state = mp_decode_uint_value(code_state->fun_bc->bytecode); + size_t n_state = code_state->n_state; fastn = &code_state->state[n_state - 1]; exc_stack = (mp_exc_stack_t*)(code_state->state + n_state); // variables that are visible to the exception handler (declared volatile) - exc_sp = MP_TAGPTR_PTR(code_state->exc_sp); // stack grows up, exc_sp points to top of stack + exc_sp = MP_CODE_STATE_EXC_SP_IDX_TO_PTR(exc_stack, code_state->exc_sp_idx); // stack grows up, exc_sp points to top of stack goto unwind_loop; #endif @@ -1456,6 +1528,7 @@ unwind_loop: // propagate exception to higher level // Note: ip and sp don't have usable values at this point code_state->state[0] = MP_OBJ_FROM_PTR(nlr.ret_val); // put exception here because sp is invalid + FRAME_LEAVE(); return MP_VM_RETURN_EXCEPTION; } } diff --git a/python/src/py/vmentrytable.h b/python/src/py/vmentrytable.h index 641c8ee42..7f55a4806 100644 --- a/python/src/py/vmentrytable.h +++ b/python/src/py/vmentrytable.h @@ -99,17 +99,19 @@ static const void *const entry_table[256] = { [MP_BC_CALL_METHOD] = &&entry_MP_BC_CALL_METHOD, [MP_BC_CALL_METHOD_VAR_KW] = &&entry_MP_BC_CALL_METHOD_VAR_KW, [MP_BC_RETURN_VALUE] = &&entry_MP_BC_RETURN_VALUE, - [MP_BC_RAISE_VARARGS] = &&entry_MP_BC_RAISE_VARARGS, + [MP_BC_RAISE_LAST] = &&entry_MP_BC_RAISE_LAST, + [MP_BC_RAISE_OBJ] = &&entry_MP_BC_RAISE_OBJ, + [MP_BC_RAISE_FROM] = &&entry_MP_BC_RAISE_FROM, [MP_BC_YIELD_VALUE] = &&entry_MP_BC_YIELD_VALUE, [MP_BC_YIELD_FROM] = &&entry_MP_BC_YIELD_FROM, [MP_BC_IMPORT_NAME] = &&entry_MP_BC_IMPORT_NAME, [MP_BC_IMPORT_FROM] = &&entry_MP_BC_IMPORT_FROM, [MP_BC_IMPORT_STAR] = &&entry_MP_BC_IMPORT_STAR, - [MP_BC_LOAD_CONST_SMALL_INT_MULTI ... MP_BC_LOAD_CONST_SMALL_INT_MULTI + 63] = &&entry_MP_BC_LOAD_CONST_SMALL_INT_MULTI, - [MP_BC_LOAD_FAST_MULTI ... MP_BC_LOAD_FAST_MULTI + 15] = &&entry_MP_BC_LOAD_FAST_MULTI, - [MP_BC_STORE_FAST_MULTI ... MP_BC_STORE_FAST_MULTI + 15] = &&entry_MP_BC_STORE_FAST_MULTI, - [MP_BC_UNARY_OP_MULTI ... MP_BC_UNARY_OP_MULTI + MP_UNARY_OP_NUM_BYTECODE - 1] = &&entry_MP_BC_UNARY_OP_MULTI, - [MP_BC_BINARY_OP_MULTI ... MP_BC_BINARY_OP_MULTI + MP_BINARY_OP_NUM_BYTECODE - 1] = &&entry_MP_BC_BINARY_OP_MULTI, + [MP_BC_LOAD_CONST_SMALL_INT_MULTI ... MP_BC_LOAD_CONST_SMALL_INT_MULTI + MP_BC_LOAD_CONST_SMALL_INT_MULTI_NUM - 1] = &&entry_MP_BC_LOAD_CONST_SMALL_INT_MULTI, + [MP_BC_LOAD_FAST_MULTI ... MP_BC_LOAD_FAST_MULTI + MP_BC_LOAD_FAST_MULTI_NUM - 1] = &&entry_MP_BC_LOAD_FAST_MULTI, + [MP_BC_STORE_FAST_MULTI ... MP_BC_STORE_FAST_MULTI + MP_BC_STORE_FAST_MULTI_NUM - 1] = &&entry_MP_BC_STORE_FAST_MULTI, + [MP_BC_UNARY_OP_MULTI ... MP_BC_UNARY_OP_MULTI + MP_BC_UNARY_OP_MULTI_NUM - 1] = &&entry_MP_BC_UNARY_OP_MULTI, + [MP_BC_BINARY_OP_MULTI ... MP_BC_BINARY_OP_MULTI + MP_BC_BINARY_OP_MULTI_NUM - 1] = &&entry_MP_BC_BINARY_OP_MULTI, }; #if __clang__