22 * This file is included by vm.c
33 */
44
5- #ifndef GLOBAL_METHOD_CACHE_SIZE
6- #define GLOBAL_METHOD_CACHE_SIZE 0x800
7- #endif
8- #ifndef GLOBAL_METHOD_CACHE_MASK
9- #define GLOBAL_METHOD_CACHE_MASK 0x7ff
10- #endif
11-
12- #define GLOBAL_METHOD_CACHE_KEY (c ,m ) ((((c)>>3)^(m))&GLOBAL_METHOD_CACHE_MASK)
13- #define GLOBAL_METHOD_CACHE (c ,m ) (global_method_cache + GLOBAL_METHOD_CACHE_KEY(c,m))
145#include "method.h"
156
167#define NOEX_NOREDEF 0
@@ -30,14 +21,97 @@ static void rb_vm_check_redefinition_opt_method(const rb_method_entry_t *me, VAL
3021#define attached id__attached__
3122
3223struct cache_entry {
33- rb_serial_t method_state ;
34- rb_serial_t class_serial ;
3524 ID mid ;
36- rb_method_entry_t * me ;
25+ uintptr_t me ;
3726 VALUE defined_class ;
3827};
3928
40- static struct cache_entry global_method_cache [GLOBAL_METHOD_CACHE_SIZE ];
29+ #define METHOD_ENTRY (entry ) ((rb_method_entry_t*)((entry)->me & ~1))
30+ #define COLLISION (entry ) ((entry).me & 1)
31+
32+ static void rb_mcache_resize (struct rb_meth_cache * cache );
33+ static void
34+ rb_mcache_insert (struct rb_meth_cache * cache , ID id , uintptr_t me , VALUE defined_class )
35+ {
36+ int mask , pos , dlt ;
37+ struct cache_entry * ent ;
38+ if (cache -> capa / 4 * 3 <= cache -> size ) {
39+ rb_mcache_resize (cache );
40+ }
41+ mask = cache -> capa - 1 ;
42+ pos = (id >> 3 ) & mask ;
43+
44+ ent = cache -> entries ;
45+ if (ent [pos ].mid == 0 ) {
46+ goto found ;
47+ }
48+ ent [pos ].me |= 1 ; /* set collision */
49+ dlt = (id % mask ) | 1 ;
50+ for (;;) {
51+ pos = (pos + dlt ) & mask ;
52+ if (ent [pos ].mid == 0 ) {
53+ goto found ;
54+ }
55+ ent [pos ].me |= 1 ;
56+ }
57+ found :
58+ ent += pos ;
59+ ent -> defined_class = defined_class ;
60+ ent -> mid = id ;
61+ ent -> me = me ;
62+ cache -> size ++ ;
63+ }
64+
65+ static void
66+ rb_mcache_resize (struct rb_meth_cache * cache )
67+ {
68+ struct rb_meth_cache tmp ;
69+ int i ;
70+
71+ MEMZERO (& tmp , struct rb_meth_cache , 1 );
72+ tmp .capa = cache -> capa * 2 ;
73+ tmp .entries = xcalloc (tmp .capa , sizeof (struct cache_entry ));
74+ for (i = 0 ; i < cache -> capa ; i ++ ) {
75+ if (cache -> entries [i ].mid ) {
76+ struct cache_entry * ent = & cache -> entries [i ];
77+ rb_mcache_insert (& tmp , ent -> mid , ent -> me & ~1 , ent -> defined_class );
78+ }
79+ }
80+ xfree (cache -> entries );
81+ * cache = tmp ;
82+ }
83+
84+ static inline void
85+ rb_mcache_reset (struct rb_meth_cache * cache , rb_serial_t class_serial )
86+ {
87+ cache -> method_state = GET_GLOBAL_METHOD_STATE ();
88+ cache -> class_serial = class_serial ;
89+ cache -> size = 0 ;
90+ if (cache -> entries != NULL ) {
91+ MEMZERO (cache -> entries , struct cache_entry , cache -> capa );
92+ } else {
93+ cache -> entries = xcalloc (8 , sizeof (struct cache_entry ));
94+ cache -> capa = 8 ;
95+ }
96+ }
97+
98+ static inline struct cache_entry *
99+ rb_mcache_find (struct rb_meth_cache * cache , ID id )
100+ {
101+ struct cache_entry * ent = cache -> entries ;
102+ int mask = cache -> capa - 1 ;
103+ int pos = (id >> 3 ) & mask ;
104+ int dlt ;
105+ if (ent [pos ].mid == id ) return ent + pos ;
106+ if (!COLLISION (ent [pos ])) return NULL ;
107+ dlt = (id % mask ) | 1 ;
108+ for (;;) {
109+ pos = (pos + dlt ) & mask ;
110+ if (ent [pos ].mid == id ) return ent + pos ;
111+ if (!COLLISION (ent [pos ])) return NULL ;
112+ }
113+ }
114+
41115#define ruby_running (GET_VM()->running)
42116/* int ruby_running = 0; */
43117
@@ -569,20 +643,11 @@ rb_method_entry_get_without_cache(VALUE klass, ID id,
569643 defined_class = me -> klass ;
570644
571645 if (ruby_running ) {
572- struct cache_entry * ent ;
573- ent = GLOBAL_METHOD_CACHE (klass , id );
574- ent -> class_serial = RCLASS_SERIAL (klass );
575- ent -> method_state = GET_GLOBAL_METHOD_STATE ();
576- ent -> defined_class = defined_class ;
577- ent -> mid = id ;
578-
646+ struct rb_classext_struct * ext = RCLASS_EXT (klass );
579647 if (UNDEFINED_METHOD_ENTRY_P (me )) {
580- ent -> me = 0 ;
581648 me = 0 ;
582649 }
583- else {
584- ent -> me = me ;
585- }
650+ rb_mcache_insert (& ext -> cache , id , (uintptr_t )me , defined_class );
586651 }
587652
588653 if (defined_class_ptr )
@@ -608,18 +673,22 @@ rb_method_entry_t *
608673rb_method_entry (VALUE klass , ID id , VALUE * defined_class_ptr )
609674{
610675#if OPT_GLOBAL_METHOD_CACHE
611- struct cache_entry * ent ;
612- ent = GLOBAL_METHOD_CACHE (klass , id );
613- if (ent -> method_state == GET_GLOBAL_METHOD_STATE () &&
614- ent -> class_serial == RCLASS_SERIAL (klass ) &&
615- ent -> mid == id ) {
676+ struct rb_classext_struct * ext = RCLASS_EXT (klass );
677+ if (ext -> cache .method_state != GET_GLOBAL_METHOD_STATE () ||
678+ ext -> cache .class_serial != ext -> class_serial ) {
679+ rb_mcache_reset (& ext -> cache , ext -> class_serial );
680+ } else {
681+ struct cache_entry * ent ;
682+ ent = rb_mcache_find (& ext -> cache , id );
683+ if (ent == NULL ) goto not_found ;
616684 if (defined_class_ptr )
617685 * defined_class_ptr = ent -> defined_class ;
618686#if VM_DEBUG_VERIFY_METHOD_CACHE
619687 verify_method_cache (klass , id , ent -> defined_class , ent -> me );
620688#endif
621- return ent -> me ;
689+ return METHOD_ENTRY ( ent ) ;
622690 }
691+ not_found :
623692#endif
624693
625694 return rb_method_entry_get_without_cache (klass , id , defined_class_ptr );
0 commit comments