Introduce, use getters for fields which will be optional in types
This commit is contained in:
parent
23baa02c11
commit
cd119ca3ec
@ -455,10 +455,12 @@ STATIC mp_obj_t ffifunc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const
|
||||
} else if (mp_obj_is_str(a)) {
|
||||
const char *s = mp_obj_str_get_str(a);
|
||||
values[i].ffi = (ffi_arg)(intptr_t)s;
|
||||
} else if (((mp_obj_base_t *)MP_OBJ_TO_PTR(a))->type->buffer_p.get_buffer != NULL) {
|
||||
} else {
|
||||
mp_getbuffer_fun_t get_buffer = mp_type_getbuffer(((mp_obj_base_t *)MP_OBJ_TO_PTR(a))->type);
|
||||
if (get_buffer != NULL) {
|
||||
mp_obj_base_t *o = (mp_obj_base_t *)MP_OBJ_TO_PTR(a);
|
||||
mp_buffer_info_t bufinfo;
|
||||
int ret = o->type->buffer_p.get_buffer(MP_OBJ_FROM_PTR(o), &bufinfo, MP_BUFFER_READ); // TODO: MP_BUFFER_READ?
|
||||
int ret = get_buffer(MP_OBJ_FROM_PTR(o), &bufinfo, MP_BUFFER_READ); // TODO: MP_BUFFER_READ?
|
||||
if (ret != 0) {
|
||||
goto error;
|
||||
}
|
||||
@ -469,6 +471,7 @@ STATIC mp_obj_t ffifunc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const
|
||||
} else {
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
valueptrs[i] = &values[i];
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,9 @@ static inline mp_obj_t mp_obj_cast_to_native_base_dyn(mp_obj_t self_in, mp_const
|
||||
|
||||
if (MP_OBJ_FROM_PTR(self_type) == native_type) {
|
||||
return self_in;
|
||||
} else if (self_type->parent != native_type) {
|
||||
}
|
||||
mp_parent_t parent = mp_type_parent(self_type);
|
||||
if (parent != native_type) {
|
||||
// The self_in object is not a direct descendant of native_type, so fail the cast.
|
||||
// This is a very simple version of mp_obj_is_subclass_fast that could be improved.
|
||||
return MP_OBJ_NULL;
|
||||
|
83
py/obj.c
83
py/obj.c
@ -195,8 +195,9 @@ bool PLACE_IN_ITCM(mp_obj_is_true)(mp_obj_t arg) {
|
||||
}
|
||||
} else {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(arg);
|
||||
if (type->unary_op != NULL) {
|
||||
mp_obj_t result = type->unary_op(MP_UNARY_OP_BOOL, arg);
|
||||
mp_unary_op_fun_t unary_op = mp_type_unary_op(type);
|
||||
if (unary_op) {
|
||||
mp_obj_t result = unary_op(MP_UNARY_OP_BOOL, arg);
|
||||
if (result != MP_OBJ_NULL) {
|
||||
return result == mp_const_true;
|
||||
}
|
||||
@ -214,7 +215,7 @@ bool PLACE_IN_ITCM(mp_obj_is_true)(mp_obj_t arg) {
|
||||
}
|
||||
|
||||
bool mp_obj_is_callable(mp_obj_t o_in) {
|
||||
const mp_call_fun_t call = mp_obj_get_type(o_in)->call;
|
||||
const mp_call_fun_t call = mp_type_call(mp_obj_get_type(o_in));
|
||||
if (call != mp_obj_instance_call) {
|
||||
return call != NULL;
|
||||
}
|
||||
@ -281,19 +282,20 @@ mp_obj_t mp_obj_equal_not_equal(mp_binary_op_t op, mp_obj_t o1, mp_obj_t o2) {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(o1);
|
||||
// If a full equality test is not needed and the other object is a different
|
||||
// type then we don't need to bother trying the comparison.
|
||||
if (type->binary_op != NULL &&
|
||||
mp_binary_op_fun_t binary_op = mp_type_binary_op(type);
|
||||
if (binary_op != NULL &&
|
||||
((type->flags & MP_TYPE_FLAG_EQ_CHECKS_OTHER_TYPE) || mp_obj_get_type(o2) == type)) {
|
||||
// CPython is asymmetric: it will try __eq__ if there's no __ne__ but not the
|
||||
// other way around. If the class doesn't need a full test we can skip __ne__.
|
||||
if (op == MP_BINARY_OP_NOT_EQUAL && (type->flags & MP_TYPE_FLAG_EQ_HAS_NEQ_TEST)) {
|
||||
mp_obj_t r = type->binary_op(MP_BINARY_OP_NOT_EQUAL, o1, o2);
|
||||
mp_obj_t r = binary_op(MP_BINARY_OP_NOT_EQUAL, o1, o2);
|
||||
if (r != MP_OBJ_NULL) {
|
||||
return r;
|
||||
}
|
||||
}
|
||||
|
||||
// Try calling __eq__.
|
||||
mp_obj_t r = type->binary_op(MP_BINARY_OP_EQUAL, o1, o2);
|
||||
mp_obj_t r = binary_op(MP_BINARY_OP_EQUAL, o1, o2);
|
||||
if (r != MP_OBJ_NULL) {
|
||||
if (op == MP_BINARY_OP_EQUAL) {
|
||||
return r;
|
||||
@ -556,8 +558,9 @@ mp_obj_t mp_obj_len_maybe(mp_obj_t o_in) {
|
||||
return MP_OBJ_NEW_SMALL_INT(l);
|
||||
} else {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(o_in);
|
||||
if (type->unary_op != NULL) {
|
||||
return type->unary_op(MP_UNARY_OP_LEN, o_in);
|
||||
mp_unary_op_fun_t unary_op = mp_type_unary_op(type);
|
||||
if (unary_op != NULL) {
|
||||
return unary_op(MP_UNARY_OP_LEN, o_in);
|
||||
} else {
|
||||
return MP_OBJ_NULL;
|
||||
}
|
||||
@ -566,8 +569,9 @@ mp_obj_t mp_obj_len_maybe(mp_obj_t o_in) {
|
||||
|
||||
mp_obj_t mp_obj_subscr(mp_obj_t base, mp_obj_t index, mp_obj_t value) {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(base);
|
||||
if (type->subscr != NULL) {
|
||||
mp_obj_t ret = type->subscr(base, index, value);
|
||||
mp_subscr_fun_t subscr = mp_type_subscr(type);
|
||||
if (subscr != NULL) {
|
||||
mp_obj_t ret = subscr(base, index, value);
|
||||
// May have called port specific C code. Make sure it didn't mess up the heap.
|
||||
assert_heap_ok();
|
||||
if (ret != MP_OBJ_NULL) {
|
||||
@ -620,7 +624,7 @@ typedef struct {
|
||||
STATIC mp_obj_t generic_it_iternext(mp_obj_t self_in) {
|
||||
mp_obj_generic_it_t *self = MP_OBJ_TO_PTR(self_in);
|
||||
const mp_obj_type_t *type = mp_obj_get_type(self->obj);
|
||||
mp_obj_t current_length = type->unary_op(MP_UNARY_OP_LEN, self->obj);
|
||||
mp_obj_t current_length = mp_type_unary_op(type)(MP_UNARY_OP_LEN, self->obj);
|
||||
if (self->cur < MP_OBJ_SMALL_INT_VALUE(current_length)) {
|
||||
mp_obj_t o_out = type->subscr(self->obj, MP_OBJ_NEW_SMALL_INT(self->cur), MP_OBJ_SENTINEL);
|
||||
self->cur += 1;
|
||||
@ -642,10 +646,11 @@ mp_obj_t mp_obj_new_generic_iterator(mp_obj_t obj, mp_obj_iter_buf_t *iter_buf)
|
||||
|
||||
bool mp_get_buffer(mp_obj_t obj, mp_buffer_info_t *bufinfo, mp_uint_t flags) {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(obj);
|
||||
if (type->buffer_p.get_buffer == NULL) {
|
||||
const mp_getbuffer_fun_t get_buffer = mp_type_getbuffer(type);
|
||||
if (get_buffer == NULL) {
|
||||
return false;
|
||||
}
|
||||
int ret = type->buffer_p.get_buffer(obj, bufinfo, flags);
|
||||
int ret = get_buffer(obj, bufinfo, flags);
|
||||
if (ret != 0) {
|
||||
return false;
|
||||
}
|
||||
@ -666,3 +671,55 @@ mp_obj_t mp_generic_unary_op(mp_unary_op_t op, mp_obj_t o_in) {
|
||||
return MP_OBJ_NULL; // op not supported
|
||||
}
|
||||
}
|
||||
|
||||
mp_call_fun_t mp_type_call(const mp_obj_type_t *type) {
|
||||
return type->call;
|
||||
}
|
||||
|
||||
mp_unary_op_fun_t mp_type_unary_op(const mp_obj_type_t *type) {
|
||||
return type->unary_op;
|
||||
}
|
||||
|
||||
|
||||
mp_binary_op_fun_t mp_type_binary_op(const mp_obj_type_t *type) {
|
||||
return type->binary_op;
|
||||
}
|
||||
|
||||
|
||||
mp_attr_fun_t mp_type_attr(const mp_obj_type_t *type) {
|
||||
return type->attr;
|
||||
}
|
||||
|
||||
|
||||
mp_subscr_fun_t mp_type_subscr(const mp_obj_type_t *type) {
|
||||
return type->subscr;
|
||||
}
|
||||
|
||||
|
||||
mp_getiter_fun_t mp_type_getiter(const mp_obj_type_t *type) {
|
||||
return type->getiter;
|
||||
}
|
||||
|
||||
|
||||
mp_fun_1_t mp_type_iternext(const mp_obj_type_t *type) {
|
||||
return type->iternext;
|
||||
}
|
||||
|
||||
|
||||
mp_getbuffer_fun_t mp_type_getbuffer(const mp_obj_type_t *type) {
|
||||
return type->buffer_p.get_buffer;
|
||||
}
|
||||
|
||||
|
||||
const void *mp_type_protocol(const mp_obj_type_t *type) {
|
||||
return type->protocol;
|
||||
}
|
||||
|
||||
|
||||
const void *mp_type_parent(const mp_obj_type_t *type) {
|
||||
return type->parent;
|
||||
}
|
||||
|
||||
size_t mp_type_size(const mp_obj_type_t *type) {
|
||||
return sizeof(mp_obj_type_t);
|
||||
}
|
||||
|
19
py/obj.h
19
py/obj.h
@ -534,6 +534,8 @@ typedef struct _mp_obj_iter_buf_t {
|
||||
// It's rounded up in case mp_obj_base_t is smaller than mp_obj_t (eg for OBJ_REPR_D).
|
||||
#define MP_OBJ_ITER_BUF_NSLOTS ((sizeof(mp_obj_iter_buf_t) + sizeof(mp_obj_t) - 1) / sizeof(mp_obj_t))
|
||||
|
||||
struct _mp_buffer_info_t;
|
||||
|
||||
typedef void (*mp_print_fun_t)(const mp_print_t *print, mp_obj_t o, mp_print_kind_t kind);
|
||||
typedef mp_obj_t (*mp_make_new_fun_t)(const mp_obj_type_t *type, size_t n_args, const mp_obj_t *args, mp_map_t *kw_args);
|
||||
typedef mp_obj_t (*mp_call_fun_t)(mp_obj_t fun, size_t n_args, size_t n_kw, const mp_obj_t *args);
|
||||
@ -542,6 +544,7 @@ typedef mp_obj_t (*mp_binary_op_fun_t)(mp_binary_op_t op, mp_obj_t, mp_obj_t);
|
||||
typedef void (*mp_attr_fun_t)(mp_obj_t self_in, qstr attr, mp_obj_t *dest);
|
||||
typedef mp_obj_t (*mp_subscr_fun_t)(mp_obj_t self_in, mp_obj_t index, mp_obj_t value);
|
||||
typedef mp_obj_t (*mp_getiter_fun_t)(mp_obj_t self_in, mp_obj_iter_buf_t *iter_buf);
|
||||
typedef mp_int_t (*mp_getbuffer_fun_t)(mp_obj_t obj, struct _mp_buffer_info_t *bufinfo, mp_uint_t flags);
|
||||
|
||||
// Buffer protocol
|
||||
typedef struct _mp_buffer_info_t {
|
||||
@ -627,6 +630,18 @@ struct _mp_obj_type_t {
|
||||
struct _mp_obj_dict_t *locals_dict;
|
||||
};
|
||||
|
||||
extern size_t mp_type_size(const mp_obj_type_t *);
|
||||
extern mp_call_fun_t mp_type_call(const mp_obj_type_t *);
|
||||
extern mp_unary_op_fun_t mp_type_unary_op(const mp_obj_type_t *);
|
||||
extern mp_binary_op_fun_t mp_type_binary_op(const mp_obj_type_t *);
|
||||
extern mp_attr_fun_t mp_type_attr(const mp_obj_type_t *);
|
||||
extern mp_subscr_fun_t mp_type_subscr(const mp_obj_type_t *);
|
||||
extern mp_getiter_fun_t mp_type_getiter(const mp_obj_type_t *);
|
||||
extern mp_fun_1_t mp_type_iternext(const mp_obj_type_t *);
|
||||
extern mp_getbuffer_fun_t mp_type_getbuffer(const mp_obj_type_t *);
|
||||
extern const void *mp_type_protocol(const mp_obj_type_t *);
|
||||
extern const void *mp_type_parent(const mp_obj_type_t *);
|
||||
|
||||
// Constant types, globally accessible
|
||||
extern const mp_obj_type_t mp_type_type;
|
||||
extern const mp_obj_type_t mp_type_object;
|
||||
@ -761,11 +776,11 @@ extern const struct _mp_obj_exception_t mp_const_GeneratorExit_obj;
|
||||
#endif
|
||||
#define mp_obj_is_int(o) (mp_obj_is_small_int(o) || mp_obj_is_type(o, &mp_type_int))
|
||||
#define mp_obj_is_str(o) (mp_obj_is_qstr(o) || mp_obj_is_type(o, &mp_type_str))
|
||||
#define mp_obj_is_str_or_bytes(o) (mp_obj_is_qstr(o) || (mp_obj_is_obj(o) && ((mp_obj_base_t *)MP_OBJ_TO_PTR(o))->type->binary_op == mp_obj_str_binary_op))
|
||||
#define mp_obj_is_str_or_bytes(o) (mp_obj_is_qstr(o) || (mp_obj_is_obj(o) && mp_type_binary_op(((mp_obj_base_t *)MP_OBJ_TO_PTR(o))->type) == mp_obj_str_binary_op))
|
||||
#define mp_obj_is_dict_or_ordereddict(o) (mp_obj_is_obj(o) && ((mp_obj_base_t *)MP_OBJ_TO_PTR(o))->type->make_new == mp_obj_dict_make_new)
|
||||
#define mp_obj_is_fun(o) (mp_obj_is_obj(o) && (((mp_obj_base_t *)MP_OBJ_TO_PTR(o))->type->name == MP_QSTR_function))
|
||||
// type check is done on getiter method to allow tuple, namedtuple, attrtuple
|
||||
#define mp_obj_is_tuple_compatible(o) (mp_obj_get_type(o)->getiter == mp_obj_tuple_getiter)
|
||||
#define mp_obj_is_tuple_compatible(o) (mp_type_getiter(mp_obj_get_type(o)) == mp_obj_tuple_getiter)
|
||||
|
||||
mp_obj_t mp_obj_new_type(qstr name, mp_obj_t bases_tuple, mp_obj_t locals_dict);
|
||||
static inline mp_obj_t mp_obj_new_bool(mp_int_t x) {
|
||||
|
@ -564,7 +564,7 @@ STATIC mp_obj_t array_subscr(mp_obj_t self_in, mp_obj_t index_in, mp_obj_t value
|
||||
size_t src_len;
|
||||
void *src_items;
|
||||
size_t item_sz = mp_binary_get_size('@', o->typecode & TYPECODE_MASK, NULL);
|
||||
if (mp_obj_is_obj(value) && ((mp_obj_base_t *)MP_OBJ_TO_PTR(value))->type->subscr == array_subscr) {
|
||||
if (mp_obj_is_obj(value) && mp_type_subscr(((mp_obj_base_t *)MP_OBJ_TO_PTR(value))->type) == array_subscr) {
|
||||
// value is array, bytearray or memoryview
|
||||
mp_obj_array_t *src_slice = MP_OBJ_TO_PTR(value);
|
||||
if (item_sz != mp_binary_get_size('@', src_slice->typecode & TYPECODE_MASK, NULL)) {
|
||||
|
@ -108,7 +108,7 @@ STATIC mp_obj_t tuple_cmp_helper(mp_uint_t op, mp_obj_t self_in, mp_obj_t anothe
|
||||
mp_check_self(mp_obj_is_tuple_compatible(self_in));
|
||||
const mp_obj_type_t *another_type = mp_obj_get_type(another_in);
|
||||
mp_obj_tuple_t *self = MP_OBJ_TO_PTR(self_in);
|
||||
if (another_type->getiter != mp_obj_tuple_getiter) {
|
||||
if (mp_type_getiter(another_type) != mp_obj_tuple_getiter) {
|
||||
// Slow path for user subclasses
|
||||
another_in = mp_obj_cast_to_native_base(another_in, MP_OBJ_FROM_PTR(&mp_type_tuple));
|
||||
if (another_in == MP_OBJ_NULL) {
|
||||
@ -184,7 +184,7 @@ mp_obj_t mp_obj_tuple_subscr(mp_obj_t self_in, mp_obj_t index, mp_obj_t value) {
|
||||
// load
|
||||
mp_obj_tuple_t *self = MP_OBJ_TO_PTR(self_in);
|
||||
// when called with a native type (eg namedtuple) using mp_obj_tuple_subscr, get the native self
|
||||
if (self->base.type->subscr != &mp_obj_tuple_subscr) {
|
||||
if (mp_type_subscr(self->base.type) != &mp_obj_tuple_subscr) {
|
||||
self = mp_obj_cast_to_native_base(self_in, &mp_type_tuple);
|
||||
}
|
||||
|
||||
|
53
py/objtype.c
53
py/objtype.c
@ -59,17 +59,20 @@ STATIC int instance_count_native_bases(const mp_obj_type_t *type, const mp_obj_t
|
||||
if (type == &mp_type_object) {
|
||||
// Not a "real" type, end search here.
|
||||
return count;
|
||||
} else if (mp_obj_is_native_type(type)) {
|
||||
}
|
||||
if (mp_obj_is_native_type(type)) {
|
||||
// Native types don't have parents (at least not from our perspective) so end.
|
||||
*last_native_base = type;
|
||||
return count + 1;
|
||||
} else if (type->parent == NULL) {
|
||||
}
|
||||
const void *parent = mp_type_parent(type);
|
||||
if (parent == NULL) {
|
||||
// No parents so end search here.
|
||||
return count;
|
||||
#if MICROPY_MULTIPLE_INHERITANCE
|
||||
} else if (((mp_obj_base_t *)type->parent)->type == &mp_type_tuple) {
|
||||
} else if (((mp_obj_base_t *)parent)->type == &mp_type_tuple) {
|
||||
// Multiple parents, search through them all recursively.
|
||||
const mp_obj_tuple_t *parent_tuple = type->parent;
|
||||
const mp_obj_tuple_t *parent_tuple = parent;
|
||||
const mp_obj_t *item = parent_tuple->items;
|
||||
const mp_obj_t *top = item + parent_tuple->len;
|
||||
for (; item < top; ++item) {
|
||||
@ -81,7 +84,7 @@ STATIC int instance_count_native_bases(const mp_obj_type_t *type, const mp_obj_t
|
||||
#endif
|
||||
} else {
|
||||
// A single parent, use iteration to continue the search.
|
||||
type = type->parent;
|
||||
type = parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -159,7 +162,8 @@ STATIC void mp_obj_class_lookup(struct class_lookup_data *lookup, const mp_obj_t
|
||||
if (lookup->meth_offset != 0 && mp_obj_is_native_type(type)) {
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-align"
|
||||
if (*(void **)((char *)type + lookup->meth_offset) != NULL) {
|
||||
size_t sz = mp_type_size(type);
|
||||
if (lookup->meth_offset < sz && *(void **)((char *)type + lookup->meth_offset) != NULL) {
|
||||
#pragma GCC diagnostic pop
|
||||
DEBUG_printf("mp_obj_class_lookup: Matched special meth slot (off=%d) for %s\n",
|
||||
lookup->meth_offset, qstr_str(lookup->attr));
|
||||
@ -211,12 +215,14 @@ STATIC void mp_obj_class_lookup(struct class_lookup_data *lookup, const mp_obj_t
|
||||
|
||||
// attribute not found, keep searching base classes
|
||||
|
||||
if (type->parent == NULL) {
|
||||
const void *parent = mp_type_parent(type);
|
||||
if (parent == NULL) {
|
||||
DEBUG_printf("mp_obj_class_lookup: No more parents\n");
|
||||
return;
|
||||
}
|
||||
#if MICROPY_MULTIPLE_INHERITANCE
|
||||
} else if (((mp_obj_base_t *)type->parent)->type == &mp_type_tuple) {
|
||||
const mp_obj_tuple_t *parent_tuple = type->parent;
|
||||
if (((mp_obj_base_t *)parent)->type == &mp_type_tuple) {
|
||||
const mp_obj_tuple_t *parent_tuple = parent;
|
||||
const mp_obj_t *item = parent_tuple->items;
|
||||
const mp_obj_t *top = item + parent_tuple->len - 1;
|
||||
for (; item < top; ++item) {
|
||||
@ -235,10 +241,10 @@ STATIC void mp_obj_class_lookup(struct class_lookup_data *lookup, const mp_obj_t
|
||||
// search last base (simple tail recursion elimination)
|
||||
assert(mp_obj_is_type(*item, &mp_type_type));
|
||||
type = (mp_obj_type_t *)MP_OBJ_TO_PTR(*item);
|
||||
#endif
|
||||
} else {
|
||||
type = type->parent;
|
||||
continue;
|
||||
}
|
||||
#endif
|
||||
type = parent;
|
||||
if (type == &mp_type_object) {
|
||||
// Not a "real" type
|
||||
return;
|
||||
@ -1079,7 +1085,8 @@ STATIC void type_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
|
||||
dest[0] = mp_const_empty_tuple;
|
||||
return;
|
||||
}
|
||||
mp_obj_t parent_obj = self->parent ? MP_OBJ_FROM_PTR(self->parent) : MP_OBJ_FROM_PTR(&mp_type_object);
|
||||
const void *parent = mp_type_parent(self);
|
||||
mp_obj_t parent_obj = parent ? MP_OBJ_FROM_PTR(parent) : MP_OBJ_FROM_PTR(&mp_type_object);
|
||||
#if MICROPY_MULTIPLE_INHERITANCE
|
||||
if (mp_obj_is_type(parent_obj, &mp_type_tuple)) {
|
||||
dest[0] = parent_obj;
|
||||
@ -1324,11 +1331,12 @@ STATIC void super_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
|
||||
lookup.meth_offset = offsetof(mp_obj_type_t, make_new);
|
||||
}
|
||||
|
||||
if (type->parent == NULL) {
|
||||
const void *parent = mp_type_parent(type);
|
||||
if (parent == NULL) {
|
||||
// no parents, do nothing
|
||||
#if MICROPY_MULTIPLE_INHERITANCE
|
||||
} else if (((mp_obj_base_t *)type->parent)->type == &mp_type_tuple) {
|
||||
const mp_obj_tuple_t *parent_tuple = type->parent;
|
||||
} else if (((mp_obj_base_t *)parent)->type == &mp_type_tuple) {
|
||||
const mp_obj_tuple_t *parent_tuple = parent;
|
||||
size_t len = parent_tuple->len;
|
||||
const mp_obj_t *items = parent_tuple->items;
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
@ -1344,8 +1352,8 @@ STATIC void super_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
|
||||
}
|
||||
}
|
||||
#endif
|
||||
} else if (type->parent != &mp_type_object) {
|
||||
mp_obj_class_lookup(&lookup, type->parent);
|
||||
} else if (parent != &mp_type_object) {
|
||||
mp_obj_class_lookup(&lookup, parent);
|
||||
}
|
||||
|
||||
if (dest[0] != MP_OBJ_NULL) {
|
||||
@ -1419,14 +1427,15 @@ bool mp_obj_is_subclass_fast(mp_const_obj_t object, mp_const_obj_t classinfo) {
|
||||
}
|
||||
|
||||
const mp_obj_type_t *self = MP_OBJ_TO_PTR(object);
|
||||
const void *parent = mp_type_parent(self);
|
||||
|
||||
if (self->parent == NULL) {
|
||||
if (parent == NULL) {
|
||||
// type has no parents
|
||||
return false;
|
||||
#if MICROPY_MULTIPLE_INHERITANCE
|
||||
} else if (((mp_obj_base_t *)self->parent)->type == &mp_type_tuple) {
|
||||
} else if (((mp_obj_base_t *)parent)->type == &mp_type_tuple) {
|
||||
// get the base objects (they should be type objects)
|
||||
const mp_obj_tuple_t *parent_tuple = self->parent;
|
||||
const mp_obj_tuple_t *parent_tuple = parent;
|
||||
const mp_obj_t *item = parent_tuple->items;
|
||||
const mp_obj_t *top = item + parent_tuple->len - 1;
|
||||
|
||||
@ -1442,7 +1451,7 @@ bool mp_obj_is_subclass_fast(mp_const_obj_t object, mp_const_obj_t classinfo) {
|
||||
#endif
|
||||
} else {
|
||||
// type has 1 parent
|
||||
object = MP_OBJ_FROM_PTR(self->parent);
|
||||
object = MP_OBJ_FROM_PTR(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,12 +31,13 @@
|
||||
#ifndef MICROPY_UNSAFE_PROTO
|
||||
const void *mp_proto_get(uint16_t name, mp_const_obj_t obj) {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(obj);
|
||||
if (!type->protocol) {
|
||||
const void *protocol = mp_type_protocol(type);
|
||||
if (!protocol) {
|
||||
return NULL;
|
||||
}
|
||||
uint16_t proto_name = *(const uint16_t *)type->protocol;
|
||||
uint16_t proto_name = *(const uint16_t *)protocol;
|
||||
if (proto_name == name) {
|
||||
return type->protocol;
|
||||
return protocol;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
@ -31,7 +31,7 @@
|
||||
#define MP_PROTOCOL_HEAD /* NOTHING */
|
||||
#define MP_PROTO_IMPLEMENT(name) /* NOTHING */
|
||||
static inline void *mp_proto_get(uint16_t name, mp_const_obj_type_t obj) {
|
||||
return mp_obj_get_type(obj)->protocol;
|
||||
return mp_type_protocol(mp_obj_get_type(obj));
|
||||
}
|
||||
#else
|
||||
#define MP_PROTOCOL_HEAD \
|
||||
|
60
py/runtime.c
60
py/runtime.c
@ -282,8 +282,9 @@ mp_obj_t mp_unary_op(mp_unary_op_t op, mp_obj_t arg) {
|
||||
return MP_OBJ_NEW_SMALL_INT(h);
|
||||
} else {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(arg);
|
||||
if (type->unary_op != NULL) {
|
||||
mp_obj_t result = type->unary_op(op, arg);
|
||||
mp_unary_op_fun_t unary_op = mp_type_unary_op(type);
|
||||
if (unary_op != NULL) {
|
||||
mp_obj_t result = unary_op(op, arg);
|
||||
if (result != MP_OBJ_NULL) {
|
||||
return result;
|
||||
}
|
||||
@ -571,8 +572,9 @@ mp_obj_t PLACE_IN_ITCM(mp_binary_op)(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t r
|
||||
const mp_obj_type_t *type;
|
||||
generic_binary_op:
|
||||
type = mp_obj_get_type(lhs);
|
||||
if (type->binary_op != NULL) {
|
||||
mp_obj_t result = type->binary_op(op, lhs, rhs);
|
||||
mp_binary_op_fun_t binary_op = mp_type_binary_op(type);
|
||||
if (binary_op != NULL) {
|
||||
mp_obj_t result = binary_op(op, lhs, rhs);
|
||||
if (result != MP_OBJ_NULL) {
|
||||
return result;
|
||||
}
|
||||
@ -647,8 +649,9 @@ mp_obj_t mp_call_function_n_kw(mp_obj_t fun_in, size_t n_args, size_t n_kw, cons
|
||||
const mp_obj_type_t *type = mp_obj_get_type(fun_in);
|
||||
|
||||
// do the call
|
||||
if (type->call != NULL) {
|
||||
return type->call(fun_in, n_args, n_kw, args);
|
||||
mp_call_fun_t call = mp_type_call(type);
|
||||
if (call) {
|
||||
return call(fun_in, n_args, n_kw, args);
|
||||
}
|
||||
|
||||
#if MICROPY_ERROR_REPORTING <= MICROPY_ERROR_REPORTING_TERSE
|
||||
@ -1108,15 +1111,18 @@ void mp_load_method_maybe(mp_obj_t obj, qstr attr, mp_obj_t *dest) {
|
||||
}
|
||||
#endif
|
||||
|
||||
if (attr == MP_QSTR___next__ && type->iternext != NULL) {
|
||||
if (attr == MP_QSTR___next__ && mp_type_iternext(type) != NULL) {
|
||||
dest[0] = MP_OBJ_FROM_PTR(&mp_builtin_next_obj);
|
||||
dest[1] = obj;
|
||||
|
||||
} else if (type->attr != NULL) {
|
||||
return;
|
||||
}
|
||||
mp_attr_fun_t attr_fun = mp_type_attr(type);
|
||||
if (attr_fun != NULL) {
|
||||
// this type can do its own load, so call it
|
||||
type->attr(obj, attr, dest);
|
||||
|
||||
} else if (type->locals_dict != NULL) {
|
||||
attr_fun(obj, attr, dest);
|
||||
return;
|
||||
}
|
||||
if (type->locals_dict != NULL) {
|
||||
// generic method lookup
|
||||
// this is a lookup in the object (ie not class or type)
|
||||
assert(type->locals_dict->base.type == &mp_type_dict); // MicroPython restriction, for now
|
||||
@ -1171,9 +1177,10 @@ void mp_load_method_protected(mp_obj_t obj, qstr attr, mp_obj_t *dest, bool catc
|
||||
void mp_store_attr(mp_obj_t base, qstr attr, mp_obj_t value) {
|
||||
DEBUG_OP_printf("store attr %p.%s <- %p\n", base, qstr_str(attr), value);
|
||||
const mp_obj_type_t *type = mp_obj_get_type(base);
|
||||
if (type->attr != NULL) {
|
||||
mp_attr_fun_t attr_fun = mp_type_attr(type);
|
||||
if (attr_fun != NULL) {
|
||||
mp_obj_t dest[2] = {MP_OBJ_SENTINEL, value};
|
||||
type->attr(base, attr, dest);
|
||||
attr_fun(base, attr, dest);
|
||||
if (dest[0] == MP_OBJ_NULL) {
|
||||
// success
|
||||
return;
|
||||
@ -1221,21 +1228,21 @@ void mp_store_attr(mp_obj_t base, qstr attr, mp_obj_t value) {
|
||||
mp_obj_t mp_getiter(mp_obj_t o_in, mp_obj_iter_buf_t *iter_buf) {
|
||||
assert(o_in);
|
||||
const mp_obj_type_t *type = mp_obj_get_type(o_in);
|
||||
|
||||
mp_getiter_fun_t getiter = mp_type_getiter(type);
|
||||
// Check for native getiter which is the identity. We handle this case explicitly
|
||||
// so we don't unnecessarily allocate any RAM for the iter_buf, which won't be used.
|
||||
if (type->getiter == mp_identity_getiter) {
|
||||
if (getiter == mp_identity_getiter) {
|
||||
return o_in;
|
||||
}
|
||||
|
||||
// check for native getiter (corresponds to __iter__)
|
||||
if (type->getiter != NULL) {
|
||||
if (iter_buf == NULL && type->getiter != mp_obj_instance_getiter) {
|
||||
if (getiter != NULL) {
|
||||
if (iter_buf == NULL && getiter != mp_obj_instance_getiter) {
|
||||
// if caller did not provide a buffer then allocate one on the heap
|
||||
// mp_obj_instance_getiter is special, it will allocate only if needed
|
||||
iter_buf = m_new_obj(mp_obj_iter_buf_t);
|
||||
}
|
||||
mp_obj_t iter = type->getiter(o_in, iter_buf);
|
||||
mp_obj_t iter = getiter(o_in, iter_buf);
|
||||
if (iter != MP_OBJ_NULL) {
|
||||
return iter;
|
||||
}
|
||||
@ -1266,8 +1273,9 @@ mp_obj_t mp_getiter(mp_obj_t o_in, mp_obj_iter_buf_t *iter_buf) {
|
||||
// may also raise StopIteration()
|
||||
mp_obj_t mp_iternext_allow_raise(mp_obj_t o_in) {
|
||||
const mp_obj_type_t *type = mp_obj_get_type(o_in);
|
||||
if (type->iternext != NULL) {
|
||||
return type->iternext(o_in);
|
||||
mp_fun_1_t iternext = mp_type_iternext(type);
|
||||
if (iternext != NULL) {
|
||||
return iternext(o_in);
|
||||
} else {
|
||||
// check for __next__ method
|
||||
mp_obj_t dest[2];
|
||||
@ -1291,8 +1299,9 @@ mp_obj_t mp_iternext_allow_raise(mp_obj_t o_in) {
|
||||
mp_obj_t mp_iternext(mp_obj_t o_in) {
|
||||
MP_STACK_CHECK(); // enumerate, filter, map and zip can recursively call mp_iternext
|
||||
const mp_obj_type_t *type = mp_obj_get_type(o_in);
|
||||
if (type->iternext != NULL) {
|
||||
return type->iternext(o_in);
|
||||
mp_fun_1_t iternext = mp_type_iternext(type);
|
||||
if (iternext != NULL) {
|
||||
return iternext(o_in);
|
||||
} else {
|
||||
// check for __next__ method
|
||||
mp_obj_t dest[2];
|
||||
@ -1331,8 +1340,9 @@ mp_vm_return_kind_t mp_resume(mp_obj_t self_in, mp_obj_t send_value, mp_obj_t th
|
||||
return mp_obj_gen_resume(self_in, send_value, throw_value, ret_val);
|
||||
}
|
||||
|
||||
if (type->iternext != NULL && send_value == mp_const_none) {
|
||||
mp_obj_t ret = type->iternext(self_in);
|
||||
mp_fun_1_t iternext = mp_type_iternext(type);
|
||||
if (iternext != NULL && send_value == mp_const_none) {
|
||||
mp_obj_t ret = iternext(self_in);
|
||||
*ret_val = ret;
|
||||
if (ret != MP_OBJ_STOP_ITERATION) {
|
||||
return MP_VM_RETURN_YIELD;
|
||||
|
Loading…
Reference in New Issue
Block a user