@@ -2761,33 +2761,73 @@ class LiftoffCompiler {
2761
2761
return index ;
2762
2762
}
2763
2763
2764
+ bool IndexStaticallyInBounds (const LiftoffAssembler::VarState& index_slot,
2765
+ int access_size, uintptr_t * offset) {
2766
+ if (!index_slot.is_const ()) return false ;
2767
+
2768
+ // Potentially zero extend index (which is a 32-bit constant).
2769
+ const uintptr_t index = static_cast <uint32_t >(index_slot.i32_const ());
2770
+ const uintptr_t effective_offset = index + *offset;
2771
+
2772
+ if (effective_offset < index // overflow
2773
+ || !base::IsInBounds<uintptr_t >(effective_offset, access_size,
2774
+ env_->min_memory_size )) {
2775
+ return false ;
2776
+ }
2777
+
2778
+ *offset = effective_offset;
2779
+ return true ;
2780
+ }
2781
+
2764
2782
void LoadMem (FullDecoder* decoder, LoadType type,
2765
2783
const MemoryAccessImmediate<validate>& imm,
2766
2784
const Value& index_val, Value* result) {
2767
2785
ValueKind kind = type.value_type ().kind ();
2786
+ RegClass rc = reg_class_for (kind);
2768
2787
if (!CheckSupportedType (decoder, kind, " load" )) return ;
2769
- LiftoffRegister full_index = __ PopToRegister ();
2770
- Register index = BoundsCheckMem (decoder, type.size (), imm.offset ,
2771
- full_index, {}, kDontForceCheck );
2772
- if (index == no_reg) return ;
2773
2788
2774
2789
uintptr_t offset = imm.offset ;
2775
- LiftoffRegList pinned = LiftoffRegList::ForRegs (index );
2776
- index = AddMemoryMasking (index , &offset, &pinned);
2777
- DEBUG_CODE_COMMENT (" load from memory" );
2778
- Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2779
- LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
2780
- RegClass rc = reg_class_for (kind);
2781
- LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2782
- uint32_t protected_load_pc = 0 ;
2783
- __ Load (value, addr, index , offset, type, pinned, &protected_load_pc, true );
2784
- if (env_->use_trap_handler ) {
2785
- AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2786
- protected_load_pc);
2790
+ Register index = no_reg;
2791
+
2792
+ // Only look at the slot, do not pop it yet (will happen in PopToRegister
2793
+ // below, if this is not a statically-in-bounds index).
2794
+ auto & index_slot = __ cache_state ()->stack_state .back ();
2795
+ if (IndexStaticallyInBounds (index_slot, type.size (), &offset)) {
2796
+ __ cache_state ()->stack_state .pop_back ();
2797
+ DEBUG_CODE_COMMENT (" load from memory (constant offset)" );
2798
+ LiftoffRegList pinned;
2799
+ Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2800
+ LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2801
+ LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2802
+ __ Load (value, mem, no_reg, offset, type, pinned, nullptr , true );
2803
+ __ PushRegister (kind, value);
2804
+ } else {
2805
+ LiftoffRegister full_index = __ PopToRegister ();
2806
+ index = BoundsCheckMem (decoder, type.size (), offset, full_index, {},
2807
+ kDontForceCheck );
2808
+ if (index == no_reg) return ;
2809
+
2810
+ DEBUG_CODE_COMMENT (" load from memory" );
2811
+ LiftoffRegList pinned = LiftoffRegList::ForRegs (index );
2812
+ index = AddMemoryMasking (index , &offset, &pinned);
2813
+
2814
+ // Load the memory start address only now to reduce register pressure
2815
+ // (important on ia32).
2816
+ Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2817
+ LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2818
+ LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2819
+
2820
+ uint32_t protected_load_pc = 0 ;
2821
+ __ Load (value, mem, index , offset, type, pinned, &protected_load_pc,
2822
+ true );
2823
+ if (env_->use_trap_handler ) {
2824
+ AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2825
+ protected_load_pc);
2826
+ }
2827
+ __ PushRegister (kind, value);
2787
2828
}
2788
- __ PushRegister (kind, value);
2789
2829
2790
- if (FLAG_trace_wasm_memory) {
2830
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
2791
2831
TraceMemoryOperation (false , type.mem_type ().representation (), index ,
2792
2832
offset, decoder->position ());
2793
2833
}
@@ -2830,7 +2870,7 @@ class LiftoffCompiler {
2830
2870
}
2831
2871
__ PushRegister (kS128 , value);
2832
2872
2833
- if (FLAG_trace_wasm_memory) {
2873
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
2834
2874
// Again load extend is different.
2835
2875
MachineRepresentation mem_rep =
2836
2876
transform == LoadTransformationKind::kExtend
@@ -2872,7 +2912,7 @@ class LiftoffCompiler {
2872
2912
2873
2913
__ PushRegister (kS128 , result);
2874
2914
2875
- if (FLAG_trace_wasm_memory) {
2915
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
2876
2916
TraceMemoryOperation (false , type.mem_type ().representation (), index ,
2877
2917
offset, decoder->position ());
2878
2918
}
@@ -2883,29 +2923,45 @@ class LiftoffCompiler {
2883
2923
const Value& index_val, const Value& value_val) {
2884
2924
ValueKind kind = type.value_type ().kind ();
2885
2925
if (!CheckSupportedType (decoder, kind, " store" )) return ;
2926
+
2886
2927
LiftoffRegList pinned;
2887
2928
LiftoffRegister value = pinned.set (__ PopToRegister ());
2888
- LiftoffRegister full_index = __ PopToRegister (pinned);
2889
- Register index = BoundsCheckMem (decoder, type.size (), imm.offset ,
2890
- full_index, pinned, kDontForceCheck );
2891
- if (index == no_reg) return ;
2892
2929
2893
2930
uintptr_t offset = imm.offset ;
2894
- pinned.set (index );
2895
- index = AddMemoryMasking (index , &offset, &pinned);
2896
- DEBUG_CODE_COMMENT (" store to memory" );
2897
- Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2898
- LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
2899
- uint32_t protected_store_pc = 0 ;
2900
- LiftoffRegList outer_pinned;
2901
- if (FLAG_trace_wasm_memory) outer_pinned.set (index );
2902
- __ Store (addr, index , offset, value, type, outer_pinned,
2903
- &protected_store_pc, true );
2904
- if (env_->use_trap_handler ) {
2905
- AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2906
- protected_store_pc);
2931
+ Register index = no_reg;
2932
+
2933
+ auto & index_slot = __ cache_state ()->stack_state .back ();
2934
+ if (IndexStaticallyInBounds (index_slot, type.size (), &offset)) {
2935
+ __ cache_state ()->stack_state .pop_back ();
2936
+ DEBUG_CODE_COMMENT (" store to memory (constant offset)" );
2937
+ Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2938
+ LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2939
+ __ Store (mem, no_reg, offset, value, type, pinned, nullptr , true );
2940
+ } else {
2941
+ LiftoffRegister full_index = __ PopToRegister (pinned);
2942
+ index = BoundsCheckMem (decoder, type.size (), imm.offset , full_index,
2943
+ pinned, kDontForceCheck );
2944
+ if (index == no_reg) return ;
2945
+
2946
+ pinned.set (index );
2947
+ index = AddMemoryMasking (index , &offset, &pinned);
2948
+ DEBUG_CODE_COMMENT (" store to memory" );
2949
+ uint32_t protected_store_pc = 0 ;
2950
+ // Load the memory start address only now to reduce register pressure
2951
+ // (important on ia32).
2952
+ Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2953
+ LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2954
+ LiftoffRegList outer_pinned;
2955
+ if (V8_UNLIKELY (FLAG_trace_wasm_memory)) outer_pinned.set (index );
2956
+ __ Store (mem, index , offset, value, type, outer_pinned,
2957
+ &protected_store_pc, true );
2958
+ if (env_->use_trap_handler ) {
2959
+ AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2960
+ protected_store_pc);
2961
+ }
2907
2962
}
2908
- if (FLAG_trace_wasm_memory) {
2963
+
2964
+ if (V8_UNLIKELY (FLAG_trace_wasm_memory)) {
2909
2965
TraceMemoryOperation (true , type.mem_rep (), index , offset,
2910
2966
decoder->position ());
2911
2967
}
@@ -2934,7 +2990,7 @@ class LiftoffCompiler {
2934
2990
AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2935
2991
protected_store_pc);
2936
2992
}
2937
- if (FLAG_trace_wasm_memory) {
2993
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
2938
2994
TraceMemoryOperation (true , type.mem_rep (), index , offset,
2939
2995
decoder->position ());
2940
2996
}
@@ -4179,9 +4235,9 @@ class LiftoffCompiler {
4179
4235
Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
4180
4236
LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
4181
4237
LiftoffRegList outer_pinned;
4182
- if (FLAG_trace_wasm_memory) outer_pinned.set (index );
4238
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) outer_pinned.set (index );
4183
4239
__ AtomicStore (addr, index , offset, value, type, outer_pinned);
4184
- if (FLAG_trace_wasm_memory) {
4240
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
4185
4241
TraceMemoryOperation (true , type.mem_rep (), index , offset,
4186
4242
decoder->position ());
4187
4243
}
@@ -4207,7 +4263,7 @@ class LiftoffCompiler {
4207
4263
__ AtomicLoad (value, addr, index , offset, type, pinned);
4208
4264
__ PushRegister (kind, value);
4209
4265
4210
- if (FLAG_trace_wasm_memory) {
4266
+ if (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
4211
4267
TraceMemoryOperation (false , type.mem_type ().representation (), index ,
4212
4268
offset, decoder->position ());
4213
4269
}
0 commit comments