1use std::borrow::Borrow;
18use std::ffi::c_void;
19use std::fmt::{Debug, Formatter};
20use std::hash::{Hash, Hasher};
21use std::marker::PhantomData;
22
23use binaryninjacore_sys::*;
24
25use crate::architecture::{
26 Architecture, ArchitectureExt, CoreArchitecture, CoreRegister, Register, RegisterId,
27};
28use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Guard, Ref, RefCountable};
29use crate::string::*;
30use crate::types::FunctionParameter;
31use crate::variable::Variable;
32pub trait CallingConvention: Sync {
38 fn caller_saved_registers(&self) -> Vec<RegisterId>;
39 fn callee_saved_registers(&self) -> Vec<RegisterId>;
40 fn int_arg_registers(&self) -> Vec<RegisterId>;
41 fn float_arg_registers(&self) -> Vec<RegisterId>;
42 fn required_argument_registers(&self) -> Vec<RegisterId> {
43 Vec::new()
44 }
45 fn required_clobbered_registers(&self) -> Vec<RegisterId> {
46 Vec::new()
47 }
48
49 fn arg_registers_shared_index(&self) -> bool;
50 fn reserved_stack_space_for_arg_registers(&self) -> bool;
51 fn stack_adjusted_on_return(&self) -> bool;
52 fn is_eligible_for_heuristics(&self) -> bool;
53
54 fn return_int_reg(&self) -> Option<RegisterId>;
55 fn return_hi_int_reg(&self) -> Option<RegisterId>;
56 fn return_float_reg(&self) -> Option<RegisterId>;
57
58 fn global_pointer_reg(&self) -> Option<RegisterId>;
59
60 fn implicitly_defined_registers(&self) -> Vec<RegisterId>;
61 fn are_argument_registers_used_for_var_args(&self) -> bool;
62}
63
64pub fn register_calling_convention<A, C>(arch: &A, name: &str, cc: C) -> Ref<CoreCallingConvention>
65where
66 A: Architecture,
67 C: 'static + CallingConvention,
68{
69 struct CustomCallingConventionContext<C>
70 where
71 C: CallingConvention,
72 {
73 raw_handle: *mut BNCallingConvention,
74 cc: C,
75 }
76
77 extern "C" fn cb_free<C>(ctxt: *mut c_void)
79 where
80 C: CallingConvention,
81 {
82 ffi_wrap!("CallingConvention::free", unsafe {
83 let _ctxt = Box::from_raw(ctxt as *mut CustomCallingConventionContext<C>);
84 })
85 }
86
87 extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32, count: usize) {
88 ffi_wrap!("CallingConvention::free_register_list", unsafe {
89 if regs.is_null() {
90 return;
91 }
92
93 let regs_ptr = std::ptr::slice_from_raw_parts_mut(regs, count);
94 let _regs = Box::from_raw(regs_ptr);
95 })
96 }
97
98 extern "C" fn cb_caller_saved<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
99 where
100 C: CallingConvention,
101 {
102 ffi_wrap!("CallingConvention::caller_saved_registers", unsafe {
103 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
104 let mut regs: Vec<_> = ctxt
105 .cc
106 .caller_saved_registers()
107 .iter()
108 .map(|r| r.0)
109 .collect();
110
111 *count = regs.len();
113 let regs_ptr = regs.as_mut_ptr();
114 std::mem::forget(regs);
115 regs_ptr
116 })
117 }
118
119 extern "C" fn cb_callee_saved<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
120 where
121 C: CallingConvention,
122 {
123 ffi_wrap!("CallingConvention::callee_saved_registers", unsafe {
124 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
125 let mut regs: Vec<_> = ctxt
126 .cc
127 .callee_saved_registers()
128 .iter()
129 .map(|r| r.0)
130 .collect();
131
132 *count = regs.len();
134 let regs_ptr = regs.as_mut_ptr();
135 std::mem::forget(regs);
136 regs_ptr
137 })
138 }
139
140 extern "C" fn cb_int_args<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
141 where
142 C: CallingConvention,
143 {
144 ffi_wrap!("CallingConvention::int_arg_registers", unsafe {
145 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
146 let mut regs: Vec<_> = ctxt.cc.int_arg_registers().iter().map(|r| r.0).collect();
147
148 *count = regs.len();
150 let regs_ptr = regs.as_mut_ptr();
151 std::mem::forget(regs);
152 regs_ptr
153 })
154 }
155
156 extern "C" fn cb_float_args<C>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
157 where
158 C: CallingConvention,
159 {
160 ffi_wrap!("CallingConvention::float_arg_registers", unsafe {
161 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
162 let mut regs: Vec<_> = ctxt.cc.float_arg_registers().iter().map(|r| r.0).collect();
163
164 *count = regs.len();
166 let regs_ptr = regs.as_mut_ptr();
167 std::mem::forget(regs);
168 regs_ptr
169 })
170 }
171
172 extern "C" fn cb_required_argument_registers<C>(
173 ctxt: *mut c_void,
174 count: *mut usize,
175 ) -> *mut u32
176 where
177 C: CallingConvention,
178 {
179 ffi_wrap!("CallingConvention::required_argument_registers", unsafe {
180 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
181 let mut regs: Vec<_> = ctxt
182 .cc
183 .required_argument_registers()
184 .iter()
185 .map(|r| r.0)
186 .collect();
187
188 *count = regs.len();
190 let regs_ptr = regs.as_mut_ptr();
191 std::mem::forget(regs);
192 regs_ptr
193 })
194 }
195
196 extern "C" fn cb_required_clobbered_registers<C>(
197 ctxt: *mut c_void,
198 count: *mut usize,
199 ) -> *mut u32
200 where
201 C: CallingConvention,
202 {
203 ffi_wrap!("CallingConvention::required_clobbered_registers", unsafe {
204 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
205 let mut regs: Vec<_> = ctxt
206 .cc
207 .required_clobbered_registers()
208 .iter()
209 .map(|r| r.0)
210 .collect();
211
212 *count = regs.len();
214 let regs_ptr = regs.as_mut_ptr();
215 std::mem::forget(regs);
216 regs_ptr
217 })
218 }
219
220 extern "C" fn cb_arg_shared_index<C>(ctxt: *mut c_void) -> bool
221 where
222 C: CallingConvention,
223 {
224 ffi_wrap!("CallingConvention::arg_registers_shared_index", unsafe {
225 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
226
227 ctxt.cc.arg_registers_shared_index()
228 })
229 }
230
231 extern "C" fn cb_stack_reserved_arg_regs<C>(ctxt: *mut c_void) -> bool
232 where
233 C: CallingConvention,
234 {
235 ffi_wrap!(
236 "CallingConvention::reserved_stack_space_for_arg_registers",
237 unsafe {
238 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
239
240 ctxt.cc.reserved_stack_space_for_arg_registers()
241 }
242 )
243 }
244
245 extern "C" fn cb_stack_adjusted_on_return<C>(ctxt: *mut c_void) -> bool
246 where
247 C: CallingConvention,
248 {
249 ffi_wrap!("CallingConvention::stack_adjusted_on_return", unsafe {
250 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
251
252 ctxt.cc.stack_adjusted_on_return()
253 })
254 }
255
256 extern "C" fn cb_is_eligible_for_heuristics<C>(ctxt: *mut c_void) -> bool
257 where
258 C: CallingConvention,
259 {
260 ffi_wrap!("CallingConvention::is_eligible_for_heuristics", unsafe {
261 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
262
263 ctxt.cc.is_eligible_for_heuristics()
264 })
265 }
266
267 extern "C" fn cb_return_int_reg<C>(ctxt: *mut c_void) -> u32
268 where
269 C: CallingConvention,
270 {
271 ffi_wrap!("CallingConvention::return_int_reg", unsafe {
272 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
273
274 match ctxt.cc.return_int_reg() {
275 Some(r) => r.0,
276 _ => 0xffff_ffff,
277 }
278 })
279 }
280
281 extern "C" fn cb_return_hi_int_reg<C>(ctxt: *mut c_void) -> u32
282 where
283 C: CallingConvention,
284 {
285 ffi_wrap!("CallingConvention::return_hi_int_reg", unsafe {
286 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
287
288 match ctxt.cc.return_hi_int_reg() {
289 Some(r) => r.0,
290 _ => 0xffff_ffff,
291 }
292 })
293 }
294
295 extern "C" fn cb_return_float_reg<C>(ctxt: *mut c_void) -> u32
296 where
297 C: CallingConvention,
298 {
299 ffi_wrap!("CallingConvention::return_float_reg", unsafe {
300 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
301
302 match ctxt.cc.return_float_reg() {
303 Some(r) => r.0,
304 _ => 0xffff_ffff,
305 }
306 })
307 }
308
309 extern "C" fn cb_global_pointer_reg<C>(ctxt: *mut c_void) -> u32
310 where
311 C: CallingConvention,
312 {
313 ffi_wrap!("CallingConvention::global_pointer_reg", unsafe {
314 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
315
316 match ctxt.cc.global_pointer_reg() {
317 Some(r) => r.0,
318 _ => 0xffff_ffff,
319 }
320 })
321 }
322
323 extern "C" fn cb_implicitly_defined_registers<C>(
324 ctxt: *mut c_void,
325 count: *mut usize,
326 ) -> *mut u32
327 where
328 C: CallingConvention,
329 {
330 ffi_wrap!("CallingConvention::implicitly_defined_registers", unsafe {
331 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
332 let mut regs: Vec<_> = ctxt
333 .cc
334 .implicitly_defined_registers()
335 .iter()
336 .map(|r| r.0)
337 .collect();
338
339 *count = regs.len();
341 let regs_ptr = regs.as_mut_ptr();
342 std::mem::forget(regs);
343 regs_ptr
344 })
345 }
346
347 #[allow(clippy::extra_unused_type_parameters)]
348 extern "C" fn cb_incoming_reg_value<C>(
349 _ctxt: *mut c_void,
350 _reg: u32,
351 _func: *mut BNFunction,
352 val: *mut BNRegisterValue,
353 ) where
354 C: CallingConvention,
355 {
356 ffi_wrap!("CallingConvention::incoming_reg_value", unsafe {
358 let val = &mut *val;
360
361 val.state = BNRegisterValueType::EntryValue;
362 val.value = _reg as i64;
363 })
364 }
365
366 #[allow(clippy::extra_unused_type_parameters)]
367 extern "C" fn cb_incoming_flag_value<C>(
368 _ctxt: *mut c_void,
369 _flag: u32,
370 _func: *mut BNFunction,
371 val: *mut BNRegisterValue,
372 ) where
373 C: CallingConvention,
374 {
375 ffi_wrap!("CallingConvention::incoming_flag_value", unsafe {
377 let val = &mut *val;
379
380 val.state = BNRegisterValueType::EntryValue;
381 val.value = _flag as i64;
382 })
383 }
384
385 extern "C" fn cb_incoming_var_for_param<C>(
386 ctxt: *mut c_void,
387 var: *const BNVariable,
388 _func: *mut BNFunction,
389 param: *mut BNVariable,
390 ) where
391 C: CallingConvention,
392 {
393 ffi_wrap!("CallingConvention::incoming_var_for_param", unsafe {
394 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
395 std::ptr::write(
396 param,
397 BNGetDefaultIncomingVariableForParameterVariable(ctxt.raw_handle, var),
398 );
399 })
400 }
401
402 extern "C" fn cb_incoming_param_for_var<C>(
403 ctxt: *mut c_void,
404 var: *const BNVariable,
405 _func: *mut BNFunction,
406 param: *mut BNVariable,
407 ) where
408 C: CallingConvention,
409 {
410 ffi_wrap!("CallingConvention::incoming_param_for_var", unsafe {
411 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
412 std::ptr::write(
413 param,
414 BNGetDefaultParameterVariableForIncomingVariable(ctxt.raw_handle, var),
415 );
416 })
417 }
418
419 extern "C" fn cb_are_argument_registers_used_for_var_args<C>(ctxt: *mut c_void) -> bool
420 where
421 C: CallingConvention,
422 {
423 ffi_wrap!(
424 "CallingConvention::are_argument_registers_used_for_var_args",
425 unsafe {
426 let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
427
428 ctxt.cc.are_argument_registers_used_for_var_args()
429 }
430 )
431 }
432
433 let name = name.to_cstr();
434 let raw = Box::into_raw(Box::new(CustomCallingConventionContext {
435 raw_handle: std::ptr::null_mut(),
436 cc,
437 }));
438 let mut cc = BNCustomCallingConvention {
439 context: raw as *mut _,
440
441 freeObject: Some(cb_free::<C>),
442
443 getCallerSavedRegisters: Some(cb_caller_saved::<C>),
444 getCalleeSavedRegisters: Some(cb_callee_saved::<C>),
445 getIntegerArgumentRegisters: Some(cb_int_args::<C>),
446 getFloatArgumentRegisters: Some(cb_float_args::<C>),
447 getRequiredArgumentRegisters: Some(cb_required_argument_registers::<C>),
448 getRequiredClobberedRegisters: Some(cb_required_clobbered_registers::<C>),
449 freeRegisterList: Some(cb_free_register_list),
450
451 areArgumentRegistersSharedIndex: Some(cb_arg_shared_index::<C>),
452 isStackReservedForArgumentRegisters: Some(cb_stack_reserved_arg_regs::<C>),
453 isStackAdjustedOnReturn: Some(cb_stack_adjusted_on_return::<C>),
454 isEligibleForHeuristics: Some(cb_is_eligible_for_heuristics::<C>),
455
456 getIntegerReturnValueRegister: Some(cb_return_int_reg::<C>),
457 getHighIntegerReturnValueRegister: Some(cb_return_hi_int_reg::<C>),
458 getFloatReturnValueRegister: Some(cb_return_float_reg::<C>),
459 getGlobalPointerRegister: Some(cb_global_pointer_reg::<C>),
460
461 getImplicitlyDefinedRegisters: Some(cb_implicitly_defined_registers::<C>),
462 getIncomingRegisterValue: Some(cb_incoming_reg_value::<C>),
463 getIncomingFlagValue: Some(cb_incoming_flag_value::<C>),
464 getIncomingVariableForParameterVariable: Some(cb_incoming_var_for_param::<C>),
465 getParameterVariableForIncomingVariable: Some(cb_incoming_param_for_var::<C>),
466
467 areArgumentRegistersUsedForVarArgs: Some(cb_are_argument_registers_used_for_var_args::<C>),
468 };
469
470 unsafe {
471 let cc_name = name.as_ptr();
472 let result = BNCreateCallingConvention(arch.as_ref().handle, cc_name, &mut cc);
473
474 assert!(!result.is_null());
475
476 (*raw).raw_handle = result;
477
478 BNRegisterCallingConvention(arch.as_ref().handle, result);
479
480 Ref::new(CoreCallingConvention {
481 handle: result,
482 arch_handle: arch.as_ref().handle(),
483 })
484 }
485}
486
487pub struct CoreCallingConvention {
488 pub(crate) handle: *mut BNCallingConvention,
489 pub(crate) arch_handle: CoreArchitecture,
490}
491
492impl CoreCallingConvention {
493 pub(crate) unsafe fn from_raw(
494 handle: *mut BNCallingConvention,
495 arch: CoreArchitecture,
496 ) -> Self {
497 CoreCallingConvention {
498 handle,
499 arch_handle: arch,
500 }
501 }
502
503 pub(crate) unsafe fn ref_from_raw(
504 handle: *mut BNCallingConvention,
505 arch: CoreArchitecture,
506 ) -> Ref<Self> {
507 Ref::new(CoreCallingConvention {
508 handle,
509 arch_handle: arch,
510 })
511 }
512
513 pub fn name(&self) -> String {
514 unsafe { BnString::into_string(BNGetCallingConventionName(self.handle)) }
515 }
516
517 pub fn variables_for_parameters(
518 &self,
519 params: &[FunctionParameter],
520 permitted_registers: Option<&[CoreRegister]>,
521 ) -> Vec<Variable> {
522 let mut count: usize = 0;
523 let raw_params: Vec<BNFunctionParameter> = params
524 .iter()
525 .cloned()
526 .map(FunctionParameter::into_raw)
527 .collect();
528 let raw_vars_ptr: *mut BNVariable = if let Some(permitted_args) = permitted_registers {
529 let permitted_regs = permitted_args.iter().map(|r| r.id().0).collect::<Vec<_>>();
530
531 unsafe {
532 BNGetVariablesForParameters(
533 self.handle,
534 raw_params.as_ptr(),
535 raw_params.len(),
536 permitted_regs.as_ptr(),
537 permitted_regs.len(),
538 &mut count,
539 )
540 }
541 } else {
542 unsafe {
543 BNGetVariablesForParametersDefaultPermittedArgs(
544 self.handle,
545 raw_params.as_ptr(),
546 raw_params.len(),
547 &mut count,
548 )
549 }
550 };
551
552 for raw_param in raw_params {
553 FunctionParameter::free_raw(raw_param);
554 }
555
556 unsafe { Array::<Variable>::new(raw_vars_ptr, count, ()) }.to_vec()
557 }
558}
559
560unsafe impl Send for CoreCallingConvention {}
561unsafe impl Sync for CoreCallingConvention {}
562
563impl Eq for CoreCallingConvention {}
564impl PartialEq for CoreCallingConvention {
565 fn eq(&self, rhs: &Self) -> bool {
566 self.handle == rhs.handle
567 }
568}
569
570impl Debug for CoreCallingConvention {
571 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
572 f.debug_struct("CoreCallingConvention")
573 .field("name", &self.name())
574 .field("caller_saved_registers", &self.caller_saved_registers())
575 .field("callee_saved_registers", &self.callee_saved_registers())
576 .field("int_arg_registers", &self.int_arg_registers())
577 .field("float_arg_registers", &self.float_arg_registers())
578 .field(
579 "required_argument_registers",
580 &self.required_argument_registers(),
581 )
582 .field(
583 "required_clobbered_registers",
584 &self.required_clobbered_registers(),
585 )
586 .field(
587 "arg_registers_shared_index",
588 &self.arg_registers_shared_index(),
589 )
590 .field(
591 "reserved_stack_space_for_arg_registers",
592 &self.reserved_stack_space_for_arg_registers(),
593 )
594 .field("stack_adjusted_on_return", &self.stack_adjusted_on_return())
595 .field(
596 "is_eligible_for_heuristics",
597 &self.is_eligible_for_heuristics(),
598 )
599 .field("return_int_reg", &self.return_int_reg())
600 .field("return_hi_int_reg", &self.return_hi_int_reg())
601 .field("return_float_reg", &self.return_float_reg())
602 .field("global_pointer_reg", &self.global_pointer_reg())
603 .field(
604 "implicitly_defined_registers",
605 &self.implicitly_defined_registers(),
606 )
607 .field(
608 "are_argument_registers_used_for_var_args",
609 &self.are_argument_registers_used_for_var_args(),
610 )
611 .finish()
612 }
613}
614
615impl Hash for CoreCallingConvention {
616 fn hash<H: Hasher>(&self, state: &mut H) {
617 self.handle.hash(state);
618 }
619}
620
621impl CallingConvention for CoreCallingConvention {
622 fn caller_saved_registers(&self) -> Vec<RegisterId> {
623 unsafe {
624 let mut count = 0;
625 let regs_ptr = BNGetCallerSavedRegisters(self.handle, &mut count);
626 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
627 .iter()
628 .copied()
629 .map(RegisterId::from)
630 .collect();
631 BNFreeRegisterList(regs_ptr);
632 regs
633 }
634 }
635
636 fn callee_saved_registers(&self) -> Vec<RegisterId> {
637 unsafe {
638 let mut count = 0;
639 let regs_ptr = BNGetCalleeSavedRegisters(self.handle, &mut count);
640 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
641 .iter()
642 .copied()
643 .map(RegisterId::from)
644 .collect();
645 BNFreeRegisterList(regs_ptr);
646 regs
647 }
648 }
649
650 fn int_arg_registers(&self) -> Vec<RegisterId> {
651 unsafe {
652 let mut count = 0;
653 let regs_ptr = BNGetIntegerArgumentRegisters(self.handle, &mut count);
654 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
655 .iter()
656 .copied()
657 .map(RegisterId::from)
658 .collect();
659 BNFreeRegisterList(regs_ptr);
660 regs
661 }
662 }
663
664 fn float_arg_registers(&self) -> Vec<RegisterId> {
665 unsafe {
666 let mut count = 0;
667 let regs_ptr = BNGetFloatArgumentRegisters(self.handle, &mut count);
668 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
669 .iter()
670 .copied()
671 .map(RegisterId::from)
672 .collect();
673 BNFreeRegisterList(regs_ptr);
674 regs
675 }
676 }
677
678 fn required_argument_registers(&self) -> Vec<RegisterId> {
679 unsafe {
680 let mut count = 0;
681 let regs_ptr = BNGetRequiredArgumentRegisters(self.handle, &mut count);
682 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
683 .iter()
684 .copied()
685 .map(RegisterId::from)
686 .collect();
687 BNFreeRegisterList(regs_ptr);
688 regs
689 }
690 }
691
692 fn required_clobbered_registers(&self) -> Vec<RegisterId> {
693 unsafe {
694 let mut count = 0;
695 let regs_ptr = BNGetRequiredClobberedRegisters(self.handle, &mut count);
696 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
697 .iter()
698 .copied()
699 .map(RegisterId::from)
700 .collect();
701 BNFreeRegisterList(regs_ptr);
702 regs
703 }
704 }
705
706 fn arg_registers_shared_index(&self) -> bool {
707 unsafe { BNAreArgumentRegistersSharedIndex(self.handle) }
708 }
709
710 fn reserved_stack_space_for_arg_registers(&self) -> bool {
711 unsafe { BNIsStackReservedForArgumentRegisters(self.handle) }
712 }
713
714 fn stack_adjusted_on_return(&self) -> bool {
715 unsafe { BNIsStackAdjustedOnReturn(self.handle) }
716 }
717
718 fn is_eligible_for_heuristics(&self) -> bool {
719 unsafe { BNIsEligibleForHeuristics(self.handle) }
720 }
721
722 fn return_int_reg(&self) -> Option<RegisterId> {
723 match unsafe { BNGetIntegerReturnValueRegister(self.handle) } {
724 id if id < 0x8000_0000 => self
725 .arch_handle
726 .borrow()
727 .register_from_id(RegisterId(id))
728 .map(|r| r.id()),
729 _ => None,
730 }
731 }
732
733 fn return_hi_int_reg(&self) -> Option<RegisterId> {
734 match unsafe { BNGetHighIntegerReturnValueRegister(self.handle) } {
735 id if id < 0x8000_0000 => self
736 .arch_handle
737 .borrow()
738 .register_from_id(RegisterId(id))
739 .map(|r| r.id()),
740 _ => None,
741 }
742 }
743
744 fn return_float_reg(&self) -> Option<RegisterId> {
745 match unsafe { BNGetFloatReturnValueRegister(self.handle) } {
746 id if id < 0x8000_0000 => self
747 .arch_handle
748 .borrow()
749 .register_from_id(RegisterId(id))
750 .map(|r| r.id()),
751 _ => None,
752 }
753 }
754
755 fn global_pointer_reg(&self) -> Option<RegisterId> {
756 match unsafe { BNGetGlobalPointerRegister(self.handle) } {
757 id if id < 0x8000_0000 => self
758 .arch_handle
759 .borrow()
760 .register_from_id(RegisterId(id))
761 .map(|r| r.id()),
762 _ => None,
763 }
764 }
765
766 fn implicitly_defined_registers(&self) -> Vec<RegisterId> {
767 unsafe {
768 let mut count = 0;
769 let regs_ptr = BNGetImplicitlyDefinedRegisters(self.handle, &mut count);
770 let regs: Vec<RegisterId> = std::slice::from_raw_parts(regs_ptr, count)
771 .iter()
772 .copied()
773 .map(RegisterId::from)
774 .collect();
775 BNFreeRegisterList(regs_ptr);
776 regs
777 }
778 }
779
780 fn are_argument_registers_used_for_var_args(&self) -> bool {
781 unsafe { BNAreArgumentRegistersUsedForVarArgs(self.handle) }
782 }
783}
784
785impl ToOwned for CoreCallingConvention {
786 type Owned = Ref<Self>;
787
788 fn to_owned(&self) -> Self::Owned {
789 unsafe { RefCountable::inc_ref(self) }
790 }
791}
792
793unsafe impl RefCountable for CoreCallingConvention {
794 unsafe fn inc_ref(handle: &Self) -> Ref<Self> {
795 Ref::new(Self {
796 handle: BNNewCallingConventionReference(handle.handle),
797 arch_handle: handle.arch_handle,
798 })
799 }
800
801 unsafe fn dec_ref(handle: &Self) {
802 BNFreeCallingConvention(handle.handle);
803 }
804}
805
806impl CoreArrayProvider for CoreCallingConvention {
807 type Raw = *mut BNCallingConvention;
808 type Context = CoreArchitecture;
809 type Wrapped<'a> = Guard<'a, CoreCallingConvention>;
810}
811
812unsafe impl CoreArrayProviderInner for CoreCallingConvention {
813 unsafe fn free(raw: *mut *mut BNCallingConvention, count: usize, _content: &Self::Context) {
814 BNFreeCallingConventionList(raw, count);
815 }
816
817 unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
818 Guard::new(
819 CoreCallingConvention {
820 handle: *raw,
821 arch_handle: *context,
822 },
823 context,
824 )
825 }
826}
827
828pub struct ConventionBuilder<A: Architecture> {
829 caller_saved_registers: Vec<RegisterId>,
830 callee_saved_registers: Vec<RegisterId>,
831 int_arg_registers: Vec<RegisterId>,
832 float_arg_registers: Vec<RegisterId>,
833 required_argument_registers: Vec<RegisterId>,
834 required_clobbered_registers: Vec<RegisterId>,
835
836 arg_registers_shared_index: bool,
837 reserved_stack_space_for_arg_registers: bool,
838 stack_adjusted_on_return: bool,
839 is_eligible_for_heuristics: bool,
840
841 return_int_reg: Option<RegisterId>,
842 return_hi_int_reg: Option<RegisterId>,
843 return_float_reg: Option<RegisterId>,
844
845 global_pointer_reg: Option<RegisterId>,
846
847 implicitly_defined_registers: Vec<RegisterId>,
848
849 are_argument_registers_used_for_var_args: bool,
850
851 arch_handle: A::Handle,
852 _arch: PhantomData<*const A>,
853}
854
855macro_rules! bool_arg {
856 ($name:ident) => {
857 pub fn $name(mut self, val: bool) -> Self {
858 self.$name = val;
859 self
860 }
861 };
862}
863
864macro_rules! reg_list {
865 ($name:ident) => {
866 pub fn $name(mut self, regs: &[&str]) -> Self {
867 {
868 let arch = self.arch_handle.borrow();
870 let arch_regs = regs
871 .iter()
872 .filter_map(|&r| arch.register_by_name(r))
873 .map(|r| r.id());
874
875 self.$name = arch_regs.collect();
876 }
877
878 self
879 }
880 };
881}
882
883macro_rules! reg {
884 ($name:ident) => {
885 pub fn $name(mut self, reg: &str) -> Self {
886 {
887 let arch = self.arch_handle.borrow();
889 self.$name = arch.register_by_name(reg).map(|r| r.id());
890 }
891
892 self
893 }
894 };
895}
896
897impl<A: Architecture> ConventionBuilder<A> {
898 pub fn new(arch: &A) -> Self {
899 Self {
900 caller_saved_registers: Vec::new(),
901 callee_saved_registers: Vec::new(),
902 int_arg_registers: Vec::new(),
903 float_arg_registers: Vec::new(),
904 required_argument_registers: Vec::new(),
905 required_clobbered_registers: Vec::new(),
906
907 arg_registers_shared_index: false,
908 reserved_stack_space_for_arg_registers: false,
909 stack_adjusted_on_return: false,
910 is_eligible_for_heuristics: false,
911
912 return_int_reg: None,
913 return_hi_int_reg: None,
914 return_float_reg: None,
915
916 global_pointer_reg: None,
917
918 implicitly_defined_registers: Vec::new(),
919
920 are_argument_registers_used_for_var_args: false,
921
922 arch_handle: arch.handle(),
923 _arch: PhantomData,
924 }
925 }
926
927 reg_list!(caller_saved_registers);
928 reg_list!(callee_saved_registers);
929 reg_list!(int_arg_registers);
930 reg_list!(float_arg_registers);
931 reg_list!(required_argument_registers);
932 reg_list!(required_clobbered_registers);
933
934 bool_arg!(arg_registers_shared_index);
935 bool_arg!(reserved_stack_space_for_arg_registers);
936 bool_arg!(stack_adjusted_on_return);
937 bool_arg!(is_eligible_for_heuristics);
938
939 reg!(return_int_reg);
940 reg!(return_hi_int_reg);
941 reg!(return_float_reg);
942
943 reg!(global_pointer_reg);
944
945 reg_list!(implicitly_defined_registers);
946
947 bool_arg!(are_argument_registers_used_for_var_args);
948
949 pub fn register(self, name: &str) -> Ref<CoreCallingConvention> {
950 let arch = self.arch_handle.clone();
951 register_calling_convention(arch.borrow(), name, self)
952 }
953}
954
955impl<A: Architecture> CallingConvention for ConventionBuilder<A> {
956 fn caller_saved_registers(&self) -> Vec<RegisterId> {
957 self.caller_saved_registers.clone()
958 }
959
960 fn callee_saved_registers(&self) -> Vec<RegisterId> {
961 self.callee_saved_registers.clone()
962 }
963
964 fn int_arg_registers(&self) -> Vec<RegisterId> {
965 self.int_arg_registers.clone()
966 }
967
968 fn float_arg_registers(&self) -> Vec<RegisterId> {
969 self.float_arg_registers.clone()
970 }
971
972 fn required_argument_registers(&self) -> Vec<RegisterId> {
973 self.required_argument_registers.clone()
974 }
975
976 fn required_clobbered_registers(&self) -> Vec<RegisterId> {
977 self.required_clobbered_registers.clone()
978 }
979
980 fn arg_registers_shared_index(&self) -> bool {
981 self.arg_registers_shared_index
982 }
983
984 fn reserved_stack_space_for_arg_registers(&self) -> bool {
985 self.reserved_stack_space_for_arg_registers
986 }
987
988 fn stack_adjusted_on_return(&self) -> bool {
989 self.stack_adjusted_on_return
990 }
991
992 fn is_eligible_for_heuristics(&self) -> bool {
993 self.is_eligible_for_heuristics
994 }
995
996 fn return_int_reg(&self) -> Option<RegisterId> {
997 self.return_int_reg
998 }
999
1000 fn return_hi_int_reg(&self) -> Option<RegisterId> {
1001 self.return_hi_int_reg
1002 }
1003
1004 fn return_float_reg(&self) -> Option<RegisterId> {
1005 self.return_float_reg
1006 }
1007
1008 fn global_pointer_reg(&self) -> Option<RegisterId> {
1009 self.global_pointer_reg
1010 }
1011
1012 fn implicitly_defined_registers(&self) -> Vec<RegisterId> {
1013 self.implicitly_defined_registers.clone()
1014 }
1015
1016 fn are_argument_registers_used_for_var_args(&self) -> bool {
1017 self.are_argument_registers_used_for_var_args
1018 }
1019}
1020
1021unsafe impl<A: Architecture> Send for ConventionBuilder<A> {}
1022unsafe impl<A: Architecture> Sync for ConventionBuilder<A> {}