Skip to content

Commit 00d316f

Browse files
authored
Resolve issues for unit tests on 32 bits (#1095)
This PR resolves a few issues we recently saw for running unit tests in our CI. * Fix an issue in the tests for byte map mmapper. In the clean up step, we do munmap for a larger area than what we actually mmapped in the test. This caused an issue that we may munmap regions that are used by other code. #1092 is caused by this -- the munmap after the tests unmaps the memory used by Rust, and caused Rust to segfault after the tests. * Change the test address for mmap tests to reduce the chance that we may fail in mmap in the tests. * ~~Make raw_memory_freelist only compiled in 64bits. This mitigates the issue we saw in #1091 for raw_memory_freelist tests.~~
1 parent 86a94ca commit 00d316f

File tree

2 files changed

+35
-26
lines changed

2 files changed

+35
-26
lines changed

src/util/heap/layout/byte_map_mmapper.rs

Lines changed: 33 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -229,16 +229,16 @@ mod tests {
229229
#[test]
230230
fn ensure_mapped_1page() {
231231
serial_test(|| {
232+
let pages = 1;
233+
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
234+
let end_chunk =
235+
ByteMapMmapper::address_to_mmap_chunks_up(FIXED_ADDRESS + pages_to_bytes(pages));
236+
let test_memory_bytes = (end_chunk - start_chunk) * MMAP_CHUNK_BYTES;
232237
with_cleanup(
233238
|| {
234239
let mmapper = ByteMapMmapper::new();
235-
let pages = 1;
236240
mmapper.ensure_mapped(FIXED_ADDRESS, pages).unwrap();
237241

238-
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
239-
let end_chunk = ByteMapMmapper::address_to_mmap_chunks_up(
240-
FIXED_ADDRESS + pages_to_bytes(pages),
241-
);
242242
for chunk in start_chunk..end_chunk {
243243
assert_eq!(
244244
mmapper.mapped[chunk].load(Ordering::Relaxed),
@@ -247,7 +247,7 @@ mod tests {
247247
}
248248
},
249249
|| {
250-
memory::munmap(FIXED_ADDRESS, MAX_SIZE).unwrap();
250+
memory::munmap(FIXED_ADDRESS, test_memory_bytes).unwrap();
251251
},
252252
)
253253
})
@@ -256,16 +256,16 @@ mod tests {
256256
#[test]
257257
fn ensure_mapped_1chunk() {
258258
serial_test(|| {
259+
let pages = MMAP_CHUNK_BYTES >> LOG_BYTES_IN_PAGE as usize;
260+
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
261+
let end_chunk =
262+
ByteMapMmapper::address_to_mmap_chunks_up(FIXED_ADDRESS + pages_to_bytes(pages));
263+
let test_memory_bytes = (end_chunk - start_chunk) * MMAP_CHUNK_BYTES;
259264
with_cleanup(
260265
|| {
261266
let mmapper = ByteMapMmapper::new();
262-
let pages = MMAP_CHUNK_BYTES >> LOG_BYTES_IN_PAGE as usize;
263267
mmapper.ensure_mapped(FIXED_ADDRESS, pages).unwrap();
264268

265-
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
266-
let end_chunk = ByteMapMmapper::address_to_mmap_chunks_up(
267-
FIXED_ADDRESS + pages_to_bytes(pages),
268-
);
269269
for chunk in start_chunk..end_chunk {
270270
assert_eq!(
271271
mmapper.mapped[chunk].load(Ordering::Relaxed),
@@ -274,7 +274,7 @@ mod tests {
274274
}
275275
},
276276
|| {
277-
memory::munmap(FIXED_ADDRESS, MAX_SIZE).unwrap();
277+
memory::munmap(FIXED_ADDRESS, test_memory_bytes).unwrap();
278278
},
279279
)
280280
})
@@ -283,11 +283,14 @@ mod tests {
283283
#[test]
284284
fn ensure_mapped_more_than_1chunk() {
285285
serial_test(|| {
286+
let pages = (MMAP_CHUNK_BYTES + MMAP_CHUNK_BYTES / 2) >> LOG_BYTES_IN_PAGE as usize;
287+
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
288+
let end_chunk =
289+
ByteMapMmapper::address_to_mmap_chunks_up(FIXED_ADDRESS + pages_to_bytes(pages));
290+
let test_memory_bytes = (end_chunk - start_chunk) * MMAP_CHUNK_BYTES;
286291
with_cleanup(
287292
|| {
288293
let mmapper = ByteMapMmapper::new();
289-
let pages =
290-
(MMAP_CHUNK_BYTES + MMAP_CHUNK_BYTES / 2) >> LOG_BYTES_IN_PAGE as usize;
291294
mmapper.ensure_mapped(FIXED_ADDRESS, pages).unwrap();
292295

293296
let start_chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
@@ -303,7 +306,7 @@ mod tests {
303306
}
304307
},
305308
|| {
306-
memory::munmap(FIXED_ADDRESS, MAX_SIZE).unwrap();
309+
memory::munmap(FIXED_ADDRESS, test_memory_bytes).unwrap();
307310
},
308311
)
309312
})
@@ -312,17 +315,20 @@ mod tests {
312315
#[test]
313316
fn protect() {
314317
serial_test(|| {
318+
let test_memory_bytes = MMAP_CHUNK_BYTES * 2;
319+
let test_memory_pages = test_memory_bytes >> LOG_BYTES_IN_PAGE;
320+
let protect_memory_bytes = MMAP_CHUNK_BYTES;
321+
let protect_memory_pages = protect_memory_bytes >> LOG_BYTES_IN_PAGE;
315322
with_cleanup(
316323
|| {
317324
// map 2 chunks
318325
let mmapper = ByteMapMmapper::new();
319-
let pages_per_chunk = MMAP_CHUNK_BYTES >> LOG_BYTES_IN_PAGE as usize;
320326
mmapper
321-
.ensure_mapped(FIXED_ADDRESS, pages_per_chunk * 2)
327+
.ensure_mapped(FIXED_ADDRESS, test_memory_pages)
322328
.unwrap();
323329

324330
// protect 1 chunk
325-
mmapper.protect(FIXED_ADDRESS, pages_per_chunk);
331+
mmapper.protect(FIXED_ADDRESS, protect_memory_pages);
326332

327333
let chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
328334
assert_eq!(
@@ -335,7 +341,7 @@ mod tests {
335341
);
336342
},
337343
|| {
338-
memory::munmap(FIXED_ADDRESS, MAX_SIZE).unwrap();
344+
memory::munmap(FIXED_ADDRESS, test_memory_bytes).unwrap();
339345
},
340346
)
341347
})
@@ -344,17 +350,20 @@ mod tests {
344350
#[test]
345351
fn ensure_mapped_on_protected_chunks() {
346352
serial_test(|| {
353+
let test_memory_bytes = MMAP_CHUNK_BYTES * 2;
354+
let test_memory_pages = test_memory_bytes >> LOG_BYTES_IN_PAGE;
355+
let protect_memory_pages_1 = MMAP_CHUNK_BYTES >> LOG_BYTES_IN_PAGE; // protect one chunk in the first protect
356+
let protect_memory_pages_2 = test_memory_pages; // protect both chunks in the second protect
347357
with_cleanup(
348358
|| {
349359
// map 2 chunks
350360
let mmapper = ByteMapMmapper::new();
351-
let pages_per_chunk = MMAP_CHUNK_BYTES >> LOG_BYTES_IN_PAGE as usize;
352361
mmapper
353-
.ensure_mapped(FIXED_ADDRESS, pages_per_chunk * 2)
362+
.ensure_mapped(FIXED_ADDRESS, test_memory_pages)
354363
.unwrap();
355364

356365
// protect 1 chunk
357-
mmapper.protect(FIXED_ADDRESS, pages_per_chunk);
366+
mmapper.protect(FIXED_ADDRESS, protect_memory_pages_1);
358367

359368
let chunk = ByteMapMmapper::address_to_mmap_chunks_down(FIXED_ADDRESS);
360369
assert_eq!(
@@ -368,7 +377,7 @@ mod tests {
368377

369378
// ensure mapped - this will unprotect the previously protected chunk
370379
mmapper
371-
.ensure_mapped(FIXED_ADDRESS, pages_per_chunk * 2)
380+
.ensure_mapped(FIXED_ADDRESS, protect_memory_pages_2)
372381
.unwrap();
373382
assert_eq!(
374383
mmapper.mapped[chunk].load(Ordering::Relaxed),
@@ -380,7 +389,7 @@ mod tests {
380389
);
381390
},
382391
|| {
383-
memory::munmap(FIXED_ADDRESS, MAX_SIZE).unwrap();
392+
memory::munmap(FIXED_ADDRESS, test_memory_bytes).unwrap();
384393
},
385394
)
386395
})

src/util/test_util/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,10 @@ mod test {
4646
}
4747
}
4848

49-
// Test with an address that works for 32bits.
49+
// Test with an address that works for 32bits. The address is chosen empirically.
5050
#[cfg(target_os = "linux")]
5151
const TEST_ADDRESS: Address =
52-
crate::util::conversions::chunk_align_down(unsafe { Address::from_usize(0x6000_0000) });
52+
crate::util::conversions::chunk_align_down(unsafe { Address::from_usize(0x7000_0000) });
5353
#[cfg(target_os = "macos")]
5454
const TEST_ADDRESS: Address =
5555
crate::util::conversions::chunk_align_down(unsafe { Address::from_usize(0x2_0000_0000) });

0 commit comments

Comments
 (0)