@@ -32,13 +32,15 @@ pub struct CallCache {
3232 map : HashMap < SmolStr , ( usize , usize ) > ,
3333 memoizable : HashSet < ( usize , usize ) > ,
3434 memo : HashMap < ( usize , usize ) , HashMap < Vec < MemoKey > , Value > > ,
35+ const_map : Vec < Vec < Option < ( usize , usize ) > > > ,
3536}
3637
3738impl CallCache {
3839 pub fn new ( units : & [ MetadataUnit ] ) -> Self {
3940 let mut map = HashMap :: new ( ) ;
4041 let mut memoizable = HashSet :: new ( ) ;
4142 let mut memo = HashMap :: new ( ) ;
43+ let mut const_map: Vec < Vec < Option < ( usize , usize ) > > > = Vec :: with_capacity ( units. len ( ) ) ;
4244 for ( unit_index, unit) in units. iter ( ) . enumerate ( ) {
4345 for ( func_index, func) in unit. methods . iter ( ) . enumerate ( ) {
4446 let self_path = format_smolstr ! ( "{}/{}" , unit. names, func. name) ;
@@ -50,17 +52,40 @@ impl CallCache {
5052 }
5153 }
5254 }
55+ for unit in units {
56+ let mut vec = vec ! [ None ; unit. constant_table. len( ) ] ;
57+ for ( idx, value) in unit. constant_table . iter ( ) . enumerate ( ) {
58+ if let Value :: Ref ( path) = value
59+ && let Some ( target) = map. get ( path)
60+ {
61+ vec[ idx] = Some ( * target) ;
62+ }
63+ }
64+ const_map. push ( vec) ;
65+ }
5366 Self {
5467 map,
5568 memoizable,
5669 memo,
70+ const_map,
5771 }
5872 }
5973
6074 pub fn resolve ( & self , path : & SmolStr ) -> Option < ( usize , usize ) > {
6175 self . map . get ( path) . copied ( )
6276 }
6377
78+ pub fn resolve_const (
79+ & self ,
80+ unit_index : usize ,
81+ const_index : usize ,
82+ ) -> Option < ( usize , usize ) > {
83+ self . const_map
84+ . get ( unit_index)
85+ . and_then ( |vec| vec. get ( const_index) . copied ( ) )
86+ . flatten ( )
87+ }
88+
6489 pub fn is_memoizable ( & self , unit_index : usize , func_index : usize ) -> bool {
6590 self . memoizable . contains ( & ( unit_index, func_index) )
6691 }
@@ -124,6 +149,12 @@ fn is_pure_self_recursive(
124149 _ => return false ,
125150 }
126151 }
152+ ByteCode :: CallConst ( const_index) => {
153+ match unit. constant_table . get ( * const_index) {
154+ Some ( Value :: Ref ( path) ) if path == self_path => { }
155+ _ => return false ,
156+ }
157+ }
127158 _ => { }
128159 }
129160 }
@@ -259,6 +290,112 @@ pub fn call_func<'a>(
259290 Ok ( RunState :: CallRequest ( frame) )
260291}
261292
293+ pub fn call_const < ' a > (
294+ stack_frame : & mut StackFrame ,
295+ units : & ' a [ MetadataUnit ] ,
296+ call_cache : & CallCache ,
297+ sync_table : & SyncTable ,
298+ const_index : usize ,
299+ ) -> Result < RunState < ' a > , RuntimeError > {
300+ let current_unit = stack_frame. get_unit_index ( ) ;
301+ let mut path_ref: Option < SmolStr > = None ;
302+
303+ let ( unit_index, func_index) = if let Some ( target) =
304+ call_cache. resolve_const ( current_unit, const_index)
305+ {
306+ target
307+ } else {
308+ let Some ( Value :: Ref ( path) ) = stack_frame. get_const ( const_index) else {
309+ return Err ( RuntimeError :: VMError ) ;
310+ } ;
311+ path_ref = Some ( path. clone ( ) ) ;
312+ let Some ( target) = call_cache. resolve ( path) else {
313+ return Err ( RuntimeError :: NoSuchFunctionException ( path. clone ( ) ) ) ;
314+ } ;
315+ target
316+ } ;
317+
318+ let unit = & units[ unit_index] ;
319+ let func = & unit. methods [ func_index] ;
320+ let codes = func. get_codes ( ) ;
321+ let sync_locked = sync_table. lock_if_sync ( unit_index, func_index) ;
322+
323+ if call_cache. is_memoizable ( unit_index, func_index)
324+ && let Some ( args) = stack_frame. peek_args ( func. args )
325+ && let Some ( key) = CallCache :: make_key ( args)
326+ {
327+ if let Some ( value) = call_cache. get_memo ( unit_index, func_index, & key) {
328+ for _ in 0 ..func. args {
329+ let _ = stack_frame. pop_op_stack ( ) ;
330+ }
331+ stack_frame. push_op_stack ( value) ;
332+ stack_frame. next_pc ( ) ;
333+ if sync_locked {
334+ sync_table. unlock ( unit_index, func_index) ;
335+ }
336+ return Ok ( RunState :: Continue ) ;
337+ }
338+ let native = if func. is_native {
339+ let path = if let Some ( path) = path_ref {
340+ path
341+ } else {
342+ let Some ( Value :: Ref ( path) ) = stack_frame. get_const ( const_index) else {
343+ return Err ( RuntimeError :: VMError ) ;
344+ } ;
345+ path. clone ( )
346+ } ;
347+ Some ( path)
348+ } else {
349+ None
350+ } ;
351+ let mut frame = StackFrame :: new (
352+ unit_index,
353+ func. locals ,
354+ codes,
355+ unit. constant_table ,
356+ func. name . as_str ( ) ,
357+ func. r_name . as_str ( ) ,
358+ native,
359+ func. args ,
360+ ) ;
361+ frame. set_memo ( ( unit_index, func_index) , key) ;
362+ if sync_locked {
363+ frame. set_sync_lock ( ( unit_index, func_index) ) ;
364+ }
365+ stack_frame. next_pc ( ) ;
366+ return Ok ( RunState :: CallRequest ( frame) ) ;
367+ }
368+
369+ stack_frame. next_pc ( ) ;
370+ let native = if func. is_native {
371+ let path = if let Some ( path) = path_ref {
372+ path
373+ } else {
374+ let Some ( Value :: Ref ( path) ) = stack_frame. get_const ( const_index) else {
375+ return Err ( RuntimeError :: VMError ) ;
376+ } ;
377+ path. clone ( )
378+ } ;
379+ Some ( path)
380+ } else {
381+ None
382+ } ;
383+ let mut frame = StackFrame :: new (
384+ unit_index,
385+ func. locals ,
386+ codes,
387+ unit. constant_table ,
388+ func. name . as_str ( ) ,
389+ func. r_name . as_str ( ) ,
390+ native,
391+ func. args ,
392+ ) ;
393+ if sync_locked {
394+ frame. set_sync_lock ( ( unit_index, func_index) ) ;
395+ }
396+ Ok ( RunState :: CallRequest ( frame) )
397+ }
398+
262399pub fn load_array_local ( stack_frame : & mut StackFrame , len : usize , index : usize ) {
263400 let mut elements: Vec < Value > = Vec :: new ( ) ;
264401 for _ in 0 ..len {
0 commit comments