diff options
author | Jemma Issroff <[email protected]> | 2022-11-08 15:35:31 -0500 |
---|---|---|
committer | Peter Zhu <[email protected]> | 2022-11-10 10:11:34 -0500 |
commit | 5246f4027ec574e77809845e1b1f7822cc2a5cef () | |
tree | a29c972df6a589c7ab8c2541ea2eea1f7caf5f70 /shape.c | |
parent | 9986697b621e5345177a1c395489dcc9fab8602b (diff) |
Transition shape when object's capacity changes
This commit adds a `capacity` field to shapes, and adds shape transitions whenever an object's capacity changes. Objects which are allocated out of a bigger size pool will also make a transition from the root shape to the shape with the correct capacity for their size pool when they are allocated. This commit will allow us to remove numiv from objects completely, and will also mean we can guarantee that if two objects share shapes, their IVs are in the same positions (an embedded and extended object cannot share shapes). This will enable us to implement ivar sets in YJIT using object shapes. Co-Authored-By: Aaron Patterson <[email protected]>
Notes: Merged: https://.com/ruby/ruby/pull/6699
-rw-r--r-- | shape.c | 203 |
1 files changed, 158 insertions, 45 deletions
@@ -1,15 +1,19 @@ #include "vm_core.h" #include "vm_sync.h" #include "shape.h" #include "internal/class.h" #include "internal/symbol.h" #include "internal/variable.h" #include <stdbool.h> /* * Shape getters */ -static rb_shape_t* rb_shape_get_root_shape(void) { return GET_VM()->root_shape; @@ -21,12 +25,6 @@ rb_shape_id(rb_shape_t * shape) return (shape_id_t)(shape - GET_VM()->shape_list); } -static rb_shape_t* -rb_shape_get_frozen_root_shape(void) -{ - return GET_VM()->frozen_root_shape; -} - bool rb_shape_root_shape_p(rb_shape_t* shape) { @@ -68,7 +66,7 @@ shape_id_t rb_shape_get_shape_id(VALUE obj) { if (RB_SPECIAL_CONST_P(obj)) { - return FROZEN_ROOT_SHAPE_ID; } #if SHAPE_IN_BASIC_FLAGS @@ -113,12 +111,9 @@ rb_shape_lookup_id(rb_shape_t* shape, ID id, enum shape_type shape_type) } static rb_shape_t* -get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type shape_type) { rb_shape_t *res = NULL; - - RUBY_ASSERT(SHAPE_FROZEN != (enum shape_type)shape->type || RB_TYPE_P(obj, T_MODULE) || RB_TYPE_P(obj, T_CLASS)); - RB_VM_LOCK_ENTER(); { if (rb_shape_lookup_id(shape, id, shape_type)) { @@ -142,23 +137,18 @@ get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type sha rb_shape_t * new_shape = rb_shape_alloc(id, shape); new_shape->type = (uint8_t)shape_type; switch (shape_type) { case SHAPE_IVAR: - new_shape->next_iv_index = rb_shape_get_shape_by_id(new_shape->parent_id)->next_iv_index + 1; - - // Check if we should update next_iv_index on the object's class - if (BUILTIN_TYPE(obj) == T_OBJECT) { - VALUE klass = rb_obj_class(obj); - if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) { - RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index; - } - } break; case SHAPE_IVAR_UNDEF: case SHAPE_FROZEN: - new_shape->next_iv_index = rb_shape_get_shape_by_id(new_shape->parent_id)->next_iv_index; break; case SHAPE_ROOT: rb_bug("Unreachable"); break; @@ -183,7 +173,7 @@ rb_shape_frozen_shape_p(rb_shape_t* shape) void rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape) { - rb_shape_t* next_shape = get_next_shape_internal(shape, id, obj, SHAPE_IVAR_UNDEF); if (shape == next_shape) { return; @@ -206,16 +196,11 @@ rb_shape_transition_shape_frozen(VALUE obj) rb_shape_t* next_shape; if (shape == rb_shape_get_root_shape()) { - next_shape = rb_shape_get_frozen_root_shape(); } - else { - static ID id_frozen; - if (!id_frozen) { - id_frozen = rb_make_internal_id(); - } - next_shape = get_next_shape_internal(shape, (ID)id_frozen, obj, SHAPE_FROZEN); - } RUBY_ASSERT(next_shape); rb_shape_set_shape(obj, next_shape); @@ -231,10 +216,39 @@ rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape) rb_shape_set_shape(obj, next_shape); } -rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id) { - return get_next_shape_internal(shape, id, obj, SHAPE_IVAR); } bool @@ -250,11 +264,13 @@ rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value) RUBY_ASSERT(shape->next_iv_index > 0); *value = shape->next_iv_index - 1; return true; case SHAPE_IVAR_UNDEF: case SHAPE_ROOT: return false; case SHAPE_FROZEN: - rb_bug("Ivar should not exist on frozen transition\n"); } } shape = rb_shape_get_shape_by_id(shape->parent_id); @@ -290,9 +306,18 @@ rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id) } rb_shape_t * rb_shape_alloc(ID edge_name, rb_shape_t * parent) { - return rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent)); } MJIT_FUNC_EXPORTED void @@ -307,6 +332,39 @@ rb_shape_flags_mask(void) return SHAPE_FLAG_MASK; } #if VM_CHECK_MODE > 0 VALUE rb_cShape; @@ -336,6 +394,14 @@ rb_shape_type(VALUE self) } static VALUE rb_shape_parent_id(VALUE self) { rb_shape_t * shape; @@ -398,11 +464,16 @@ rb_shape_edge_name(VALUE self) rb_shape_t* shape; TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape); - if (shape->edge_name) { - return ID2SYM(shape->edge_name); } else { - return Qnil; } } @@ -416,6 +487,15 @@ rb_shape_next_iv_index(VALUE self) } static VALUE rb_shape_export_depth(VALUE self) { rb_shape_t* shape; @@ -454,12 +534,6 @@ rb_shape_root_shape(VALUE self) return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape()); } -static VALUE -rb_shape_frozen_root_shape(VALUE self) -{ - return rb_shape_t_to_rb_cShape(rb_shape_get_frozen_root_shape()); -} - VALUE rb_obj_shape(rb_shape_t* shape); static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref) @@ -519,6 +593,43 @@ rb_shape_find_by_id(VALUE mod, VALUE id) #endif void Init_shape(void) { #if VM_CHECK_MODE > 0 @@ -530,21 +641,23 @@ Init_shape(void) rb_define_method(rb_cShape, "edges", rb_shape_edges, 0); rb_define_method(rb_cShape, "edge_name", rb_shape_edge_name, 0); rb_define_method(rb_cShape, "next_iv_index", rb_shape_next_iv_index, 0); rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0); rb_define_method(rb_cShape, "id", rb_wrapped_shape_id, 0); rb_define_method(rb_cShape, "type", rb_shape_type, 0); rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT)); rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR)); rb_define_const(rb_cShape, "SHAPE_IVAR_UNDEF", INT2NUM(SHAPE_IVAR_UNDEF)); rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN)); rb_define_const(rb_cShape, "SHAPE_BITS", INT2NUM(SHAPE_BITS)); rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT)); rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0); rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1); rb_define_singleton_method(rb_cShape, "next_shape_id", next_shape_id, 0); rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1); rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0); - rb_define_singleton_method(rb_cShape, "frozen_root_shape", rb_shape_frozen_root_shape, 0); #endif } |