[ruby/mmtk] Fix clippy warnings and formatting.

We also enable `#![warn(unsafe_op_in_unsafe_fn)]` in the whole mmtk_ruby
crate.

https://github.com/ruby/mmtk/commit/8b8025f71a
This commit is contained in:
Kunshan Wang 2025-05-28 14:28:19 +08:00 committed by git
parent d8774ec98f
commit 94688bdc7d
7 changed files with 100 additions and 74 deletions

View File

@ -230,7 +230,7 @@ impl GCThreadTLS {
/// Has undefined behavior if `ptr` is invalid.
pub unsafe fn check_cast(ptr: *mut GCThreadTLS) -> &'static mut GCThreadTLS {
assert!(!ptr.is_null());
let result = &mut *ptr;
let result = unsafe { &mut *ptr };
debug_assert!({
let kind = result.kind;
kind == GC_THREAD_KIND_WORKER
@ -245,7 +245,7 @@ impl GCThreadTLS {
/// Has undefined behavior if `ptr` is invalid.
pub unsafe fn from_vwt_check(vwt: VMWorkerThread) -> &'static mut GCThreadTLS {
let ptr = Self::from_vwt(vwt);
Self::check_cast(ptr)
unsafe { Self::check_cast(ptr) }
}
#[allow(clippy::not_unsafe_ptr_arg_deref)] // `transmute` does not dereference pointer
@ -281,7 +281,7 @@ impl RawVecOfObjRef {
///
/// This function turns raw pointer into a Vec without check.
pub unsafe fn into_vec(self) -> Vec<ObjectReference> {
Vec::from_raw_parts(self.ptr, self.len, self.capa)
unsafe { Vec::from_raw_parts(self.ptr, self.len, self.capa) }
}
}

View File

@ -1,5 +1,9 @@
use std::sync::atomic::Ordering;
// Functions in this module are unsafe for one reason:
// They are called by C functions and they need to pass raw pointers to Rust.
#![allow(clippy::missing_safety_doc)]
use mmtk::util::options::PlanSelector;
use std::sync::atomic::Ordering;
use crate::abi::RawVecOfObjRef;
use crate::abi::RubyBindingOptions;
@ -7,10 +11,10 @@ use crate::abi::RubyUpcalls;
use crate::binding;
use crate::binding::RubyBinding;
use crate::mmtk;
use crate::Ruby;
use crate::RubySlot;
use crate::utils::default_heap_max;
use crate::utils::parse_capacity;
use crate::Ruby;
use crate::RubySlot;
use mmtk::memory_manager;
use mmtk::memory_manager::mmtk_init;
use mmtk::util::constants::MIN_OBJECT_SIZE;
@ -38,22 +42,18 @@ pub extern "C" fn mmtk_is_reachable(object: ObjectReference) -> bool {
// =============== Bootup ===============
fn mmtk_builder_default_parse_threads() -> usize {
let threads_str = std::env::var("MMTK_THREADS")
.unwrap_or("0".to_string());
let threads_str = std::env::var("MMTK_THREADS").unwrap_or("0".to_string());
threads_str
.parse::<usize>()
.unwrap_or_else(|_err| {
eprintln!("[FATAL] Invalid MMTK_THREADS {}", threads_str);
std::process::exit(1);
})
threads_str.parse::<usize>().unwrap_or_else(|_err| {
eprintln!("[FATAL] Invalid MMTK_THREADS {}", threads_str);
std::process::exit(1);
})
}
fn mmtk_builder_default_parse_heap_min() -> usize {
const DEFAULT_HEAP_MIN: usize = 1 << 20;
let heap_min_str = std::env::var("MMTK_HEAP_MIN")
.unwrap_or(DEFAULT_HEAP_MIN.to_string());
let heap_min_str = std::env::var("MMTK_HEAP_MIN").unwrap_or(DEFAULT_HEAP_MIN.to_string());
let size = parse_capacity(&heap_min_str, 0);
if size == 0 {
@ -65,8 +65,7 @@ fn mmtk_builder_default_parse_heap_min() -> usize {
}
fn mmtk_builder_default_parse_heap_max() -> usize {
let heap_max_str = std::env::var("MMTK_HEAP_MAX")
.unwrap_or(default_heap_max().to_string());
let heap_max_str = std::env::var("MMTK_HEAP_MAX").unwrap_or(default_heap_max().to_string());
let size = parse_capacity(&heap_max_str, 0);
if size == 0 {
@ -78,8 +77,7 @@ fn mmtk_builder_default_parse_heap_max() -> usize {
}
fn mmtk_builder_default_parse_heap_mode(heap_min: usize, heap_max: usize) -> GCTriggerSelector {
let heap_mode_str = std::env::var("MMTK_HEAP_MODE")
.unwrap_or("dynamic".to_string());
let heap_mode_str = std::env::var("MMTK_HEAP_MODE").unwrap_or("dynamic".to_string());
match heap_mode_str.as_str() {
"fixed" => GCTriggerSelector::FixedHeapSize(heap_max),
@ -92,8 +90,7 @@ fn mmtk_builder_default_parse_heap_mode(heap_min: usize, heap_max: usize) -> GCT
}
fn mmtk_builder_default_parse_plan() -> PlanSelector {
let plan_str = std::env::var("MMTK_PLAN")
.unwrap_or("Immix".to_string());
let plan_str = std::env::var("MMTK_PLAN").unwrap_or("Immix".to_string());
match plan_str.as_str() {
"NoGC" => PlanSelector::NoGC,
@ -121,11 +118,17 @@ pub extern "C" fn mmtk_builder_default() -> *mut MMTKBuilder {
let heap_max = mmtk_builder_default_parse_heap_max();
if heap_min >= heap_max {
eprintln!("[FATAL] MMTK_HEAP_MIN({}) >= MMTK_HEAP_MAX({})", heap_min, heap_max);
eprintln!(
"[FATAL] MMTK_HEAP_MIN({}) >= MMTK_HEAP_MAX({})",
heap_min, heap_max
);
std::process::exit(1);
}
builder.options.gc_trigger.set(mmtk_builder_default_parse_heap_mode(heap_min, heap_max));
builder
.options
.gc_trigger
.set(mmtk_builder_default_parse_heap_mode(heap_min, heap_max));
builder.options.plan.set(mmtk_builder_default_parse_plan());
@ -133,7 +136,7 @@ pub extern "C" fn mmtk_builder_default() -> *mut MMTKBuilder {
}
#[no_mangle]
pub extern "C" fn mmtk_init_binding(
pub unsafe extern "C" fn mmtk_init_binding(
builder: *mut MMTKBuilder,
_binding_options: *const RubyBindingOptions,
upcalls: *const RubyUpcalls,
@ -142,11 +145,19 @@ pub extern "C" fn mmtk_init_binding(
crate::set_panic_hook();
let builder = unsafe { Box::from_raw(builder) };
let binding_options = RubyBindingOptions {ractor_check_mode: false, suffix_size: 0};
let binding_options = RubyBindingOptions {
ractor_check_mode: false,
suffix_size: 0,
};
let mmtk_boxed = mmtk_init(&builder);
let mmtk_static = Box::leak(Box::new(mmtk_boxed));
let binding = RubyBinding::new(mmtk_static, &binding_options, upcalls, weak_reference_dead_value);
let binding = RubyBinding::new(
mmtk_static,
&binding_options,
upcalls,
weak_reference_dead_value,
);
crate::BINDING
.set(binding)
@ -164,7 +175,7 @@ pub extern "C" fn mmtk_bind_mutator(tls: VMMutatorThread) -> *mut RubyMutator {
}
#[no_mangle]
pub extern "C" fn mmtk_destroy_mutator(mutator: *mut RubyMutator) {
pub unsafe extern "C" fn mmtk_destroy_mutator(mutator: *mut RubyMutator) {
// notify mmtk-core about destroyed mutator
memory_manager::destroy_mutator(unsafe { &mut *mutator });
// turn the ptr back to a box, and let Rust properly reclaim it
@ -184,7 +195,9 @@ pub extern "C" fn mmtk_handle_user_collection_request(
#[no_mangle]
pub extern "C" fn mmtk_set_gc_enabled(enable: bool) {
crate::CONFIGURATION.gc_enabled.store(enable, Ordering::Relaxed);
crate::CONFIGURATION
.gc_enabled
.store(enable, Ordering::Relaxed);
}
#[no_mangle]
@ -195,7 +208,7 @@ pub extern "C" fn mmtk_gc_enabled_p() -> bool {
// =============== Object allocation ===============
#[no_mangle]
pub extern "C" fn mmtk_alloc(
pub unsafe extern "C" fn mmtk_alloc(
mutator: *mut RubyMutator,
size: usize,
align: usize,
@ -213,7 +226,7 @@ pub extern "C" fn mmtk_alloc(
}
#[no_mangle]
pub extern "C" fn mmtk_post_alloc(
pub unsafe extern "C" fn mmtk_post_alloc(
mutator: *mut RubyMutator,
refer: ObjectReference,
bytes: usize,
@ -243,7 +256,7 @@ pub extern "C" fn mmtk_remove_weak(ptr: &ObjectReference) {
// =============== Write barriers ===============
#[no_mangle]
pub extern "C" fn mmtk_object_reference_write_post(
pub unsafe extern "C" fn mmtk_object_reference_write_post(
mutator: *mut RubyMutator,
object: ObjectReference,
) {
@ -347,7 +360,7 @@ pub extern "C" fn mmtk_plan() -> *const u8 {
PlanSelector::NoGC => NO_GC.as_ptr(),
PlanSelector::MarkSweep => MARK_SWEEP.as_ptr(),
PlanSelector::Immix => IMMIX.as_ptr(),
_ => panic!("Unknown plan")
_ => panic!("Unknown plan"),
}
}
@ -359,7 +372,7 @@ pub extern "C" fn mmtk_heap_mode() -> *const u8 {
match *crate::BINDING.get().unwrap().mmtk.get_options().gc_trigger {
GCTriggerSelector::FixedHeapSize(_) => FIXED_HEAP.as_ptr(),
GCTriggerSelector::DynamicHeapSize(_, _) => DYNAMIC_HEAP.as_ptr(),
_ => panic!("Unknown heap mode")
_ => panic!("Unknown heap mode"),
}
}
@ -368,7 +381,7 @@ pub extern "C" fn mmtk_heap_min() -> usize {
match *crate::BINDING.get().unwrap().mmtk.get_options().gc_trigger {
GCTriggerSelector::FixedHeapSize(_) => 0,
GCTriggerSelector::DynamicHeapSize(min_size, _) => min_size,
_ => panic!("Unknown heap mode")
_ => panic!("Unknown heap mode"),
}
}
@ -377,7 +390,7 @@ pub extern "C" fn mmtk_heap_max() -> usize {
match *crate::BINDING.get().unwrap().mmtk.get_options().gc_trigger {
GCTriggerSelector::FixedHeapSize(max_size) => max_size,
GCTriggerSelector::DynamicHeapSize(_, max_size) => max_size,
_ => panic!("Unknown heap mode")
_ => panic!("Unknown heap mode"),
}
}

View File

@ -83,7 +83,7 @@ impl RubyBinding {
gc_thread_join_handles: Default::default(),
wb_unprotected_objects: Default::default(),
weak_reference_dead_value
weak_reference_dead_value,
}
}

View File

@ -1,3 +1,7 @@
// Warn about unsafe operations in functions that are already marked as unsafe.
// This will become default in Rust 2024 edition.
#![warn(unsafe_op_in_unsafe_fn)]
extern crate libc;
extern crate mmtk;
#[macro_use]
@ -141,4 +145,4 @@ macro_rules! extra_assert {
std::assert!($($arg)*);
}
};
}
}

View File

@ -40,9 +40,7 @@ impl ObjectModel<Ruby> for VMObjectModel {
_semantics: CopySemantics,
_copy_context: &mut GCWorkerCopyContext<Ruby>,
) -> ObjectReference {
unimplemented!(
"Copying GC not currently supported"
)
unimplemented!("Copying GC not currently supported")
}
fn copy_to(_from: ObjectReference, _to: ObjectReference, _region: Address) -> Address {

View File

@ -3,8 +3,8 @@ use std::sync::atomic::{AtomicUsize, Ordering};
use atomic_refcell::AtomicRefCell;
use mmtk::scheduler::{GCWork, GCWorker, WorkBucketStage};
use sysinfo::System;
use crate::Ruby;
use sysinfo::System;
pub struct ChunkedVecCollector<T> {
vecs: Vec<Vec<T>>,
@ -97,7 +97,7 @@ pub fn default_heap_max() -> usize {
.expect("Invalid Memory size") as usize
}
pub fn parse_capacity(input: &String, default: usize) -> usize {
pub fn parse_capacity(input: &str, default: usize) -> usize {
let trimmed = input.trim();
const KIBIBYTE: usize = 1024;
@ -112,17 +112,20 @@ pub fn parse_capacity(input: &String, default: usize) -> usize {
// 1MiB is the default heap size
match (val, suffix) {
(number, "GiB") => number.parse::<usize>()
.and_then(|v| Ok(v * GIBIBYTE))
(number, "GiB") => number
.parse::<usize>()
.map(|v| v * GIBIBYTE)
.unwrap_or(default),
(number, "MiB") => number.parse::<usize>()
.and_then(|v| Ok(v * MEBIBYTE))
(number, "MiB") => number
.parse::<usize>()
.map(|v| v * MEBIBYTE)
.unwrap_or(default),
(number, "KiB") => number.parse::<usize>()
.and_then(|v| Ok(v * KIBIBYTE))
(number, "KiB") => number
.parse::<usize>()
.map(|v| v * KIBIBYTE)
.unwrap_or(default),
(number, suffix) if suffix.is_empty() => number.parse::<usize>().unwrap_or(default),
(_, _) => default
(number, "") => number.parse::<usize>().unwrap_or(default),
(_, _) => default,
}
}
@ -154,10 +157,25 @@ mod tests {
fn test_parses_nonsense_value_as_default_max() {
let default = 100;
assert_eq!(default, parse_capacity(&String::from("notanumber"), default));
assert_eq!(default, parse_capacity(&String::from("5tartswithanumber"), default));
assert_eq!(default, parse_capacity(&String::from("number1nthemiddle"), default));
assert_eq!(default, parse_capacity(&String::from("numberattheend111"), default));
assert_eq!(default, parse_capacity(&String::from("mult1pl3numb3r5"), default));
assert_eq!(
default,
parse_capacity(&String::from("notanumber"), default)
);
assert_eq!(
default,
parse_capacity(&String::from("5tartswithanumber"), default)
);
assert_eq!(
default,
parse_capacity(&String::from("number1nthemiddle"), default)
);
assert_eq!(
default,
parse_capacity(&String::from("numberattheend111"), default)
);
assert_eq!(
default,
parse_capacity(&String::from("mult1pl3numb3r5"), default)
);
}
}

View File

@ -6,11 +6,7 @@ use mmtk::{
vm::ObjectTracerContext,
};
use crate::{
abi::GCThreadTLS,
upcalls,
Ruby,
};
use crate::{abi::GCThreadTLS, upcalls, Ruby};
pub struct WeakProcessor {
/// Objects that needs `obj_free` called when dying.
@ -84,16 +80,13 @@ impl WeakProcessor {
let global_tables_count = (crate::upcalls().global_tables_count)();
let work_packets = (0..global_tables_count)
.map(|i| {
Box::new(UpdateGlobalTables { idx: i }) as _
})
.collect();
.map(|i| Box::new(UpdateGlobalTables { idx: i }) as _)
.collect();
worker.scheduler().work_buckets[WorkBucketStage::VMRefClosure].bulk_add(work_packets);
worker.scheduler().work_buckets[WorkBucketStage::VMRefClosure].bulk_add(vec![
Box::new(UpdateWbUnprotectedObjectsList) as _,
]);
worker.scheduler().work_buckets[WorkBucketStage::VMRefClosure]
.bulk_add(vec![Box::new(UpdateWbUnprotectedObjectsList) as _]);
}
}
@ -144,13 +137,13 @@ impl GCWork<Ruby> for ProcessWeakReferences {
.try_lock()
.expect("Mutators should not be holding the lock.");
for ptr_ptr in weak_references.iter_mut() {
if !(**ptr_ptr).is_reachable() {
**ptr_ptr = crate::binding().weak_reference_dead_value;
}
for ptr_ptr in weak_references.iter_mut() {
if !(**ptr_ptr).is_reachable() {
**ptr_ptr = crate::binding().weak_reference_dead_value;
}
}
weak_references.clear();
weak_references.clear();
}
}
@ -194,7 +187,7 @@ impl GCWork<Ruby> for UpdateFinalizerObjIdTables {
}
struct UpdateGlobalTables {
idx: i32
idx: i32,
}
impl GlobalTableProcessingWork for UpdateGlobalTables {
fn process_table(&mut self) {