Skip to content

Commit 3a49e76

Browse files
committed
get_next_shape_internal: Skip VM lock for single child case
If the shape has only one child, we check it lock-free without compromising thread safety. I haven't computed hard data as to how often that it the case, but we can assume that it's not too rare for shapes to have a single child that is often requested, typically when freezing and object.
1 parent 6052b12 commit 3a49e76

File tree

2 files changed

+54
-10
lines changed

2 files changed

+54
-10
lines changed

include/ruby/atomic.h

+26
Original file line numberDiff line numberDiff line change
@@ -301,6 +301,19 @@ typedef unsigned int rb_atomic_t;
301301
#define RUBY_ATOMIC_PTR_LOAD(var) \
302302
RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var))
303303

304+
/**
305+
* Identical to #RUBY_ATOMIC_SET, except it expects its arguments are
306+
* `void*`. There are cases where ::rb_atomic_t is 32bit while ::VALUE is
307+
* 64bit. This should be used for pointer related operations to support such
308+
* platforms.
309+
*
310+
* @param var A variable of `void*`.
311+
* @param val Value to set.
312+
* @post `var` holds `val`.
313+
*/
314+
#define RUBY_ATOMIC_PTR_SET(var, val) \
315+
rbimpl_atomic_ptr_set((volatile void **)&(var), (val))
316+
304317
/**
305318
* Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `void*`.
306319
* There are cases where ::rb_atomic_t is 32bit while `void*` is 64bit. This
@@ -788,6 +801,19 @@ rbimpl_atomic_ptr_exchange(void *volatile *ptr, const void *val)
788801
#endif
789802
}
790803

804+
RBIMPL_ATTR_ARTIFICIAL()
805+
RBIMPL_ATTR_NOALIAS()
806+
RBIMPL_ATTR_NONNULL((1))
807+
static inline void
808+
rbimpl_atomic_ptr_set(volatile void **ptr, void *val)
809+
{
810+
RBIMPL_STATIC_ASSERT(sizeof_value, sizeof *ptr == sizeof(size_t));
811+
812+
const size_t sval = RBIMPL_CAST((size_t)val);
813+
volatile size_t *const sptr = RBIMPL_CAST((volatile size_t *)ptr);
814+
rbimpl_atomic_size_set(sptr, sval);
815+
}
816+
791817
RBIMPL_ATTR_ARTIFICIAL()
792818
RBIMPL_ATTR_NOALIAS()
793819
RBIMPL_ATTR_NONNULL((1))

shape.c

+28-10
Original file line numberDiff line numberDiff line change
@@ -499,13 +499,27 @@ get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, b
499499

500500
*variation_created = false;
501501

502+
// Fast path: if the shape has a single child, we can check it without a lock
503+
struct rb_id_table *edges = RUBY_ATOMIC_PTR_LOAD(shape->edges);
504+
if (edges && SINGLE_CHILD_P(edges)) {
505+
rb_shape_t *child = SINGLE_CHILD(edges);
506+
if (child->edge_name == id) {
507+
return child;
508+
}
509+
}
510+
502511
RB_VM_LOCK_ENTER();
503512
{
513+
// The situation may have changed while we waited for the lock.
514+
// So we load the edge again.
515+
edges = RUBY_ATOMIC_PTR_LOAD(shape->edges);
516+
504517
// If the current shape has children
505-
if (shape->edges) {
518+
if (edges) {
506519
// Check if it only has one child
507-
if (SINGLE_CHILD_P(shape->edges)) {
508-
rb_shape_t * child = SINGLE_CHILD(shape->edges);
520+
if (SINGLE_CHILD_P(edges)) {
521+
rb_shape_t *child = SINGLE_CHILD(edges);
522+
509523
// If the one child has a matching edge name, then great,
510524
// we found what we want.
511525
if (child->edge_name == id) {
@@ -515,7 +529,7 @@ get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, b
515529
else {
516530
// If it has more than one child, do a hash lookup to find it.
517531
VALUE lookup_result;
518-
if (rb_id_table_lookup(shape->edges, id, &lookup_result)) {
532+
if (rb_id_table_lookup(edges, id, &lookup_result)) {
519533
res = (rb_shape_t *)lookup_result;
520534
}
521535
}
@@ -531,22 +545,26 @@ get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, b
531545
else {
532546
rb_shape_t * new_shape = rb_shape_alloc_new_child(id, shape, shape_type);
533547

534-
if (!shape->edges) {
548+
if (!edges) {
535549
// If the shape had no edge yet, we can directly set the new child
536-
shape->edges = TAG_SINGLE_CHILD(new_shape);
550+
edges = TAG_SINGLE_CHILD(new_shape);
537551
}
538552
else {
539553
// If the edge was single child we need to allocate a table.
540554
if (SINGLE_CHILD_P(shape->edges)) {
541-
rb_shape_t * old_child = SINGLE_CHILD(shape->edges);
542-
shape->edges = rb_id_table_create(2);
543-
rb_id_table_insert(shape->edges, old_child->edge_name, (VALUE)old_child);
555+
rb_shape_t *old_child = SINGLE_CHILD(edges);
556+
edges = rb_id_table_create(2);
557+
rb_id_table_insert(edges, old_child->edge_name, (VALUE)old_child);
544558
}
545559

546-
rb_id_table_insert(shape->edges, new_shape->edge_name, (VALUE)new_shape);
560+
rb_id_table_insert(edges, new_shape->edge_name, (VALUE)new_shape);
547561
*variation_created = true;
548562
}
549563

564+
// We must use an atomic when setting the edges to ensure the writes
565+
// from rb_shape_alloc_new_child are committed.
566+
RUBY_ATOMIC_PTR_SET(shape->edges, edges);
567+
550568
res = new_shape;
551569
}
552570
}

0 commit comments

Comments
 (0)