summaryrefslogtreecommitdiff
path: root/prism_compile.c
diff options
context:
space:
mode:
authoreileencodes <[email protected]>2024-08-22 12:04:16 -0400
committerKevin Newton <[email protected]>2024-08-27 16:01:46 -0400
commit7462cc77431b5e7f9a98a59c7e3996dd9c23a540 ()
tree44d9e17543c57c089a3bb36bde30d89362253ed7 /prism_compile.c
parent8f700db4edb2ae335c3c340a8bf5a6cc0f527a4b (diff)
[PRISM] Fix allocations for keyword splat params
Fixes the following allocations tests: * `test_keyword_and_keyword_splat_parameter` * `test_keyword_parameter` * `test_keyword_splat_parameter` * `test_no_array_allocation_with_splat_and_nonstatic_keywords` * `test_no_parameters` * `test_positional_splat_and_keyword_splat_parameter` * `test_ruby2_keywords` * Checks for `first_chunk` and if `stack_length == 0` to match the upstream parser. Otherwise, this optimization is skipped. * Subtracts the index, otherwise it will skip the hash allocation for the following: `keyword(*empty_array, a: 2, **empty_hash)`. * Sets `dup_rest` in order to determine when to set the correct flags * Doesn't set `VM_CALL_KW_SPLAT_MUT` flag unless `dup_rest` doesn't match `initial_dup_rest`. Given the following code: ```ruby keyword(*empty_array, a: 2) ``` Instructions before: ``` == disasm: #<ISeq:[email protected]:4 (4,0)-(8,3)> local table (size: 2, argc: 1 [opts: 0, rest: -1, post: 0, block: -1, kw: -1@-1, kwrest: -1]) [ 2] empty_hash@0<Arg>[ 1] empty_array@1 0000 newarray 0 ( 5)[LiCa] 0002 setlocal_WC_0 empty_array@1 0004 putself ( 7)[Li] 0005 getlocal_WC_0 empty_array@1 0007 splatarray true 0009 putobject :a 0011 putobject 2 0013 newhash 2 0015 opt_send_without_block <calldata!mid:keyword, argc:2, ARGS_SPLAT|ARGS_SPLAT_MUT|FCALL|KW_SPLAT> 0017 leave ( 8)[Re] ``` Instructions after: ``` == disasm: #<ISeq:[email protected]:4 (4,0)-(8,3)> local table (size: 2, argc: 1 [opts: 0, rest: -1, post: 0, block: -1, kw: -1@-1, kwrest: -1]) [ 2] empty_hash@0<Arg>[ 1] empty_array@1 0000 newarray 0 ( 5)[LiCa] 0002 setlocal_WC_0 empty_array@1 0004 putself ( 7)[Li] 0005 getlocal_WC_0 empty_array@1 0007 splatarray false 0009 putobject {:a=>2} 0011 opt_send_without_block <calldata!mid:keyword, argc:2, ARGS_SPLAT|FCALL|KW_SPLAT> 0013 leave ( 8)[Re] ``` Differences: * `splatarray` is `false` not `true * `putobject`, `putobject`, `newhash` is simply `putobject` with optimizations on * Remove `ARGS_SPLAT_MUT` flag Related: ruby/prism#2994 Co-authored-by: Kevin Newton <[email protected]>
Notes: Merged: https://.com/ruby/ruby/pull/11438
-rw-r--r--prism_compile.c72
1 files changed, 64 insertions, 8 deletions
@@ -1395,15 +1395,19 @@ pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_l
for (size_t index = 0; index < elements->size; index++) {
const pm_node_t *element = elements->nodes[index];
switch (PM_NODE_TYPE(element)) {
case PM_ASSOC_NODE: {
// Pre-allocation check (this branch can be omitted).
- if (PM_NODE_FLAG_P(element, PM_NODE_FLAG_STATIC_LITERAL) && !static_literal && ((index + min_tmp_hash_length) < elements->size)) {
// Count the elements that are statically-known.
size_t count = 1;
while (index + count < elements->size && PM_NODE_FLAG_P(elements->nodes[index + count], PM_NODE_FLAG_STATIC_LITERAL)) count++;
- if (count >= min_tmp_hash_length) {
// The subsequence of elements in this hash is long enough
// to merit its own hash.
VALUE ary = rb_ary_hidden_new(count);
@@ -1419,6 +1423,7 @@ pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_l
rb_ary_cat(ary, elem, 2);
}
VALUE hash = rb_hash_new_with_size(RARRAY_LEN(ary) / 2);
rb_hash_bulk_insert(RARRAY_LEN(ary), RARRAY_CONST_PTR(ary), hash);
@@ -1530,7 +1535,7 @@ pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_l
// This is details. Users should call pm_setup_args() instead.
static int
-pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, const bool has_regular_blockarg, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
{
const pm_node_location_t location = *node_location;
@@ -1563,7 +1568,7 @@ pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *b
*flags |= VM_CALL_KW_SPLAT;
has_keyword_splat = true;
- if (elements->size > 1) {
// A new hash will be created for the keyword arguments
// in this case, so mark the method as passing mutable
// keyword splat.
@@ -1676,8 +1681,8 @@ pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *b
// foo(a, *b, c)
// ^^
if (index + 1 < arguments->size || has_regular_blockarg) {
- PUSH_INSN1(ret, location, splatarray, Qtrue);
- *flags |= VM_CALL_ARGS_SPLAT_MUT;
}
// If this is the first spalt array seen and it's the last
// parameter, we don't want splatarray to dup it.
@@ -1796,10 +1801,49 @@ pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *b
return orig_argc;
}
// Compile the argument parts of a call
static int
pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
{
if (block && PM_NODE_TYPE_P(block, PM_BLOCK_ARGUMENT_NODE)) {
// We compile the `&block_arg` expression first and stitch it later
// since the nature of the expression influences whether splat should
@@ -1825,12 +1869,24 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block,
}
}
- int argc = pm_setup_args_core(arguments_node, block, flags, regular_block_arg, kw_arg, iseq, ret, scope_node, node_location);
PUSH_SEQ(ret, block_arg);
return argc;
}
- return pm_setup_args_core(arguments_node, block, flags, false, kw_arg, iseq, ret, scope_node, node_location);
}
/**