@@ -62,10 +62,21 @@ impl GlobalStateInner {
62
62
}
63
63
}
64
64
65
- impl < ' mir , ' tcx > GlobalStateInner {
65
+ /// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
66
+ /// of `align` that is larger or equal to `addr`
67
+ fn align_addr ( addr : u64 , align : u64 ) -> u64 {
68
+ match addr % align {
69
+ 0 => addr,
70
+ rem => addr. checked_add ( align) . unwrap ( ) - rem,
71
+ }
72
+ }
73
+
74
+ impl < ' mir , ' tcx : ' mir > EvalContextExtPriv < ' mir , ' tcx > for crate :: MiriInterpCx < ' mir , ' tcx > { }
75
+ trait EvalContextExtPriv < ' mir , ' tcx : ' mir > : crate :: MiriInterpCxExt < ' mir , ' tcx > {
66
76
// Returns the exposed `AllocId` that corresponds to the specified addr,
67
77
// or `None` if the addr is out of bounds
68
- fn alloc_id_from_addr ( ecx : & MiriInterpCx < ' mir , ' tcx > , addr : u64 ) -> Option < AllocId > {
78
+ fn alloc_id_from_addr ( & self , addr : u64 ) -> Option < AllocId > {
79
+ let ecx = self . eval_context_ref ( ) ;
69
80
let global_state = ecx. machine . intptrcast . borrow ( ) ;
70
81
assert ! ( global_state. provenance_mode != ProvenanceMode :: Strict ) ;
71
82
@@ -105,70 +116,8 @@ impl<'mir, 'tcx> GlobalStateInner {
105
116
None
106
117
}
107
118
108
- pub fn expose_ptr (
109
- ecx : & mut MiriInterpCx < ' mir , ' tcx > ,
110
- alloc_id : AllocId ,
111
- tag : BorTag ,
112
- ) -> InterpResult < ' tcx > {
113
- let global_state = ecx. machine . intptrcast . get_mut ( ) ;
114
- // In strict mode, we don't need this, so we can save some cycles by not tracking it.
115
- if global_state. provenance_mode != ProvenanceMode :: Strict {
116
- trace ! ( "Exposing allocation id {alloc_id:?}" ) ;
117
- global_state. exposed . insert ( alloc_id) ;
118
- if ecx. machine . borrow_tracker . is_some ( ) {
119
- ecx. expose_tag ( alloc_id, tag) ?;
120
- }
121
- }
122
- Ok ( ( ) )
123
- }
124
-
125
- pub fn ptr_from_addr_transmute (
126
- _ecx : & MiriInterpCx < ' mir , ' tcx > ,
127
- addr : u64 ,
128
- ) -> Pointer < Option < Provenance > > {
129
- trace ! ( "Transmuting {:#x} to a pointer" , addr) ;
130
-
131
- // We consider transmuted pointers to be "invalid" (`None` provenance).
132
- Pointer :: new ( None , Size :: from_bytes ( addr) )
133
- }
134
-
135
- pub fn ptr_from_addr_cast (
136
- ecx : & MiriInterpCx < ' mir , ' tcx > ,
137
- addr : u64 ,
138
- ) -> InterpResult < ' tcx , Pointer < Option < Provenance > > > {
139
- trace ! ( "Casting {:#x} to a pointer" , addr) ;
140
-
141
- let global_state = ecx. machine . intptrcast . borrow ( ) ;
142
-
143
- match global_state. provenance_mode {
144
- ProvenanceMode :: Default => {
145
- // The first time this happens at a particular location, print a warning.
146
- thread_local ! {
147
- // `Span` is non-`Send`, so we use a thread-local instead.
148
- static PAST_WARNINGS : RefCell <FxHashSet <Span >> = RefCell :: default ( ) ;
149
- }
150
- PAST_WARNINGS . with_borrow_mut ( |past_warnings| {
151
- let first = past_warnings. is_empty ( ) ;
152
- if past_warnings. insert ( ecx. cur_span ( ) ) {
153
- // Newly inserted, so first time we see this span.
154
- ecx. emit_diagnostic ( NonHaltingDiagnostic :: Int2Ptr { details : first } ) ;
155
- }
156
- } ) ;
157
- }
158
- ProvenanceMode :: Strict => {
159
- throw_machine_stop ! ( TerminationInfo :: Int2PtrWithStrictProvenance ) ;
160
- }
161
- ProvenanceMode :: Permissive => { }
162
- }
163
-
164
- // This is how wildcard pointers are born.
165
- Ok ( Pointer :: new ( Some ( Provenance :: Wildcard ) , Size :: from_bytes ( addr) ) )
166
- }
167
-
168
- fn alloc_base_addr (
169
- ecx : & MiriInterpCx < ' mir , ' tcx > ,
170
- alloc_id : AllocId ,
171
- ) -> InterpResult < ' tcx , u64 > {
119
+ fn addr_from_alloc_id ( & self , alloc_id : AllocId ) -> InterpResult < ' tcx , u64 > {
120
+ let ecx = self . eval_context_ref ( ) ;
172
121
let mut global_state = ecx. machine . intptrcast . borrow_mut ( ) ;
173
122
let global_state = & mut * global_state;
174
123
@@ -191,7 +140,7 @@ impl<'mir, 'tcx> GlobalStateInner {
191
140
. next_base_addr
192
141
. checked_add ( slack)
193
142
. ok_or_else ( || err_exhaust ! ( AddressSpaceFull ) ) ?;
194
- let base_addr = Self :: align_addr ( base_addr, align. bytes ( ) ) ;
143
+ let base_addr = align_addr ( base_addr, align. bytes ( ) ) ;
195
144
entry. insert ( base_addr) ;
196
145
trace ! (
197
146
"Assigning base address {:#x} to allocation {:?} (size: {}, align: {}, slack: {})" ,
@@ -221,14 +170,61 @@ impl<'mir, 'tcx> GlobalStateInner {
221
170
}
222
171
} )
223
172
}
173
+ }
174
+
175
+ impl < ' mir , ' tcx : ' mir > EvalContextExt < ' mir , ' tcx > for crate :: MiriInterpCx < ' mir , ' tcx > { }
176
+ pub trait EvalContextExt < ' mir , ' tcx : ' mir > : crate :: MiriInterpCxExt < ' mir , ' tcx > {
177
+ fn expose_ptr ( & mut self , alloc_id : AllocId , tag : BorTag ) -> InterpResult < ' tcx > {
178
+ let ecx = self . eval_context_mut ( ) ;
179
+ let global_state = ecx. machine . intptrcast . get_mut ( ) ;
180
+ // In strict mode, we don't need this, so we can save some cycles by not tracking it.
181
+ if global_state. provenance_mode != ProvenanceMode :: Strict {
182
+ trace ! ( "Exposing allocation id {alloc_id:?}" ) ;
183
+ global_state. exposed . insert ( alloc_id) ;
184
+ if ecx. machine . borrow_tracker . is_some ( ) {
185
+ ecx. expose_tag ( alloc_id, tag) ?;
186
+ }
187
+ }
188
+ Ok ( ( ) )
189
+ }
190
+
191
+ fn ptr_from_addr_cast ( & self , addr : u64 ) -> InterpResult < ' tcx , Pointer < Option < Provenance > > > {
192
+ trace ! ( "Casting {:#x} to a pointer" , addr) ;
193
+
194
+ let ecx = self . eval_context_ref ( ) ;
195
+ let global_state = ecx. machine . intptrcast . borrow ( ) ;
196
+
197
+ match global_state. provenance_mode {
198
+ ProvenanceMode :: Default => {
199
+ // The first time this happens at a particular location, print a warning.
200
+ thread_local ! {
201
+ // `Span` is non-`Send`, so we use a thread-local instead.
202
+ static PAST_WARNINGS : RefCell <FxHashSet <Span >> = RefCell :: default ( ) ;
203
+ }
204
+ PAST_WARNINGS . with_borrow_mut ( |past_warnings| {
205
+ let first = past_warnings. is_empty ( ) ;
206
+ if past_warnings. insert ( ecx. cur_span ( ) ) {
207
+ // Newly inserted, so first time we see this span.
208
+ ecx. emit_diagnostic ( NonHaltingDiagnostic :: Int2Ptr { details : first } ) ;
209
+ }
210
+ } ) ;
211
+ }
212
+ ProvenanceMode :: Strict => {
213
+ throw_machine_stop ! ( TerminationInfo :: Int2PtrWithStrictProvenance ) ;
214
+ }
215
+ ProvenanceMode :: Permissive => { }
216
+ }
217
+
218
+ // This is how wildcard pointers are born.
219
+ Ok ( Pointer :: new ( Some ( Provenance :: Wildcard ) , Size :: from_bytes ( addr) ) )
220
+ }
224
221
225
222
/// Convert a relative (tcx) pointer to an absolute address.
226
- pub fn rel_ptr_to_addr (
227
- ecx : & MiriInterpCx < ' mir , ' tcx > ,
228
- ptr : Pointer < AllocId > ,
229
- ) -> InterpResult < ' tcx , u64 > {
223
+ fn rel_ptr_to_addr ( & self , ptr : Pointer < AllocId > ) -> InterpResult < ' tcx , u64 > {
224
+ let ecx = self . eval_context_ref ( ) ;
225
+
230
226
let ( alloc_id, offset) = ptr. into_parts ( ) ; // offset is relative (AllocId provenance)
231
- let base_addr = GlobalStateInner :: alloc_base_addr ( ecx, alloc_id) ?;
227
+ let base_addr = ecx. addr_from_alloc_id ( alloc_id) ?;
232
228
233
229
// Add offset with the right kind of pointer-overflowing arithmetic.
234
230
let dl = ecx. data_layout ( ) ;
@@ -237,22 +233,21 @@ impl<'mir, 'tcx> GlobalStateInner {
237
233
238
234
/// When a pointer is used for a memory access, this computes where in which allocation the
239
235
/// access is going.
240
- pub fn abs_ptr_to_rel (
241
- ecx : & MiriInterpCx < ' mir , ' tcx > ,
242
- ptr : Pointer < Provenance > ,
243
- ) -> Option < ( AllocId , Size ) > {
236
+ fn abs_ptr_to_rel ( & self , ptr : Pointer < Provenance > ) -> Option < ( AllocId , Size ) > {
237
+ let ecx = self . eval_context_ref ( ) ;
238
+
244
239
let ( tag, addr) = ptr. into_parts ( ) ; // addr is absolute (Tag provenance)
245
240
246
241
let alloc_id = if let Provenance :: Concrete { alloc_id, .. } = tag {
247
242
alloc_id
248
243
} else {
249
244
// A wildcard pointer.
250
- GlobalStateInner :: alloc_id_from_addr ( ecx , addr. bytes ( ) ) ?
245
+ ecx . alloc_id_from_addr ( addr. bytes ( ) ) ?
251
246
} ;
252
247
253
248
// This cannot fail: since we already have a pointer with that provenance, rel_ptr_to_addr
254
249
// must have been called in the past.
255
- let base_addr = GlobalStateInner :: alloc_base_addr ( ecx, alloc_id) . unwrap ( ) ;
250
+ let base_addr = ecx. addr_from_alloc_id ( alloc_id) . unwrap ( ) ;
256
251
257
252
// Wrapping "addr - base_addr"
258
253
let dl = ecx. data_layout ( ) ;
@@ -263,15 +258,6 @@ impl<'mir, 'tcx> GlobalStateInner {
263
258
Size :: from_bytes ( dl. overflowing_signed_offset ( addr. bytes ( ) , neg_base_addr) . 0 ) ,
264
259
) )
265
260
}
266
-
267
- /// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
268
- /// of `align` that is larger or equal to `addr`
269
- fn align_addr ( addr : u64 , align : u64 ) -> u64 {
270
- match addr % align {
271
- 0 => addr,
272
- rem => addr. checked_add ( align) . unwrap ( ) - rem,
273
- }
274
- }
275
261
}
276
262
277
263
#[ cfg( test) ]
@@ -280,7 +266,7 @@ mod tests {
280
266
281
267
#[ test]
282
268
fn test_align_addr ( ) {
283
- assert_eq ! ( GlobalStateInner :: align_addr( 37 , 4 ) , 40 ) ;
284
- assert_eq ! ( GlobalStateInner :: align_addr( 44 , 4 ) , 44 ) ;
269
+ assert_eq ! ( align_addr( 37 , 4 ) , 40 ) ;
270
+ assert_eq ! ( align_addr( 44 , 4 ) , 44 ) ;
285
271
}
286
272
}
0 commit comments