binaryninja/
architecture.rs

1// Copyright 2021-2026 Vector 35 Inc.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15//! Architectures provide disassembly, lifting, and associated metadata about a CPU to inform
16//! analysis and decompilation.
17//!
18//! For more information see the [`Architecture`] trait and the [`CoreArchitecture`] structure for
19//! querying already registered architectures.
20
21// RegisterInfo purge
22use binaryninjacore_sys::*;
23use std::fmt::{Debug, Formatter};
24
25use crate::{
26    calling_convention::CoreCallingConvention,
27    data_buffer::DataBuffer,
28    disassembly::InstructionTextToken,
29    function::Function,
30    platform::Platform,
31    rc::*,
32    relocation::CoreRelocationHandler,
33    string::{IntoCStr, *},
34    types::{NameAndType, Type},
35    Endianness,
36};
37use std::ops::Deref;
38use std::{
39    borrow::Borrow,
40    ffi::{c_char, c_void, CString},
41    hash::Hash,
42    mem::MaybeUninit,
43};
44
45use crate::function_recognizer::FunctionRecognizer;
46use crate::relocation::{CustomRelocationHandlerHandle, RelocationHandler};
47
48use crate::confidence::Conf;
49use crate::low_level_il::expression::ValueExpr;
50use crate::low_level_il::lifting::{
51    get_default_flag_cond_llil, get_default_flag_write_llil, LowLevelILFlagWriteOp,
52};
53use crate::low_level_il::{LowLevelILMutableExpression, LowLevelILMutableFunction};
54
55pub mod basic_block;
56pub mod branches;
57pub mod flag;
58pub mod instruction;
59pub mod intrinsic;
60pub mod register;
61
62// Re-export all the submodules to keep from breaking everyone's code.
63// We split these out just to clarify each part, not necessarily to enforce an extra namespace.
64pub use basic_block::*;
65pub use branches::*;
66pub use flag::*;
67pub use instruction::*;
68pub use intrinsic::*;
69pub use register::*;
70
71/// The [`Architecture`] trait is the backbone of Binary Ninja's analysis capabilities. It tells the
72/// core how to interpret the machine code into LLIL, a generic intermediate representation for
73/// program analysis.
74///
75/// To add support for a new Instruction Set Architecture (ISA), you must implement this trait and
76/// register it. The core analysis loop relies on your implementation for three critical stages:
77///
78/// 1.  **Disassembly ([`Architecture::instruction_text`])**: Machine code into human-readable text (e.g., `55` -> `push rbp`).
79/// 2.  **Control Flow Analysis ([`Architecture::instruction_info`])**: Identifying where execution goes next (e.g., "This is a `call` instruction, it targets address `0x401000`").
80/// 3.  **Lifting ([`Architecture::instruction_llil`])**: Translating machine code into **Low Level Intermediate Language (LLIL)**, which enables decompilation and automated analysis.
81pub trait Architecture: 'static + Sized + AsRef<CoreArchitecture> {
82    type Handle: Borrow<Self> + Clone;
83
84    /// The [`RegisterInfo`] associated with this architecture.
85    type RegisterInfo: RegisterInfo<RegType = Self::Register>;
86
87    /// The [`Register`] associated with this architecture.
88    type Register: Register<InfoType = Self::RegisterInfo>;
89
90    /// The [`RegisterStackInfo`] associated with this architecture.
91    ///
92    /// You may only set this to [`UnusedRegisterStack`] if [`Self::RegisterStack`] is as well.
93    type RegisterStackInfo: RegisterStackInfo<
94        RegType = Self::Register,
95        RegInfoType = Self::RegisterInfo,
96        RegStackType = Self::RegisterStack,
97    >;
98
99    /// The [`RegisterStack`] associated with this architecture.
100    ///
101    /// If you do not override [`Architecture::register_stack_from_id`] and [`Architecture::register_stacks`],
102    /// you may set this to [`UnusedRegisterStack`].
103    type RegisterStack: RegisterStack<
104        InfoType = Self::RegisterStackInfo,
105        RegType = Self::Register,
106        RegInfoType = Self::RegisterInfo,
107    >;
108
109    /// The [`Flag`] associated with this architecture.
110    ///
111    /// If you do not override [`Architecture::flag_from_id`] and [`Architecture::flags`], you may
112    /// set this to [`UnusedFlag`].
113    type Flag: Flag<FlagClass = Self::FlagClass>;
114
115    /// The [`FlagWrite`] associated with this architecture.
116    ///
117    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
118    /// this points to a custom [`FlagWrite`] with the following functions defined:
119    ///
120    /// - [`Architecture::flag_write_types`]
121    /// - [`Architecture::flag_write_from_id`]
122    type FlagWrite: FlagWrite<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
123
124    /// The [`FlagClass`] associated with this architecture.
125    ///
126    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
127    /// this points to a custom [`FlagClass`] with the following functions defined:
128    ///
129    /// - [`Architecture::flag_classes`]
130    /// - [`Architecture::flag_class_from_id`]
131    type FlagClass: FlagClass;
132
133    /// The [`FlagGroup`] associated with this architecture.
134    ///
135    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
136    /// this points to a custom [`FlagGroup`] with the following functions defined:
137    ///
138    /// - [`Architecture::flag_groups`]
139    /// - [`Architecture::flag_group_from_id`]
140    type FlagGroup: FlagGroup<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
141
142    type Intrinsic: Intrinsic;
143
144    fn endianness(&self) -> Endianness;
145    fn address_size(&self) -> usize;
146    fn default_integer_size(&self) -> usize;
147    fn instruction_alignment(&self) -> usize;
148
149    /// The maximum length of an instruction in bytes. This is used to determine the size of the buffer
150    /// given to callbacks such as [`Architecture::instruction_info`], [`Architecture::instruction_text`]
151    /// and [`Architecture::instruction_llil`].
152    ///
153    /// NOTE: The maximum **CANNOT** be greater than 256.
154    fn max_instr_len(&self) -> usize;
155
156    /// How many bytes to display in the opcode space before displaying a `...`, typically set to
157    /// the [`Architecture::max_instr_len`], however, can be overridden to display a truncated opcode.
158    fn opcode_display_len(&self) -> usize {
159        self.max_instr_len()
160    }
161
162    /// In binaries with multiple architectures, you may wish to associate a specific architecture
163    /// with a given virtual address. This can be seen in armv7 where odd addresses are associated
164    /// with the thumb architecture.
165    fn associated_arch_by_addr(&self, _addr: u64) -> CoreArchitecture {
166        *self.as_ref()
167    }
168
169    /// Returns the [`InstructionInfo`] at the given virtual address with `data`.
170    ///
171    /// The [`InstructionInfo`] object should always fill the proper length and branches if not, the
172    /// next instruction will likely be incorrect.
173    fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo>;
174
175    /// Disassembles a raw byte sequence into a human-readable list of text tokens.
176    ///
177    /// This function is responsible for the visual representation of assembly instructions.
178    /// It does *not* define semantics (use [`Architecture::instruction_llil`] for that);
179    /// it simply tells the UI how to print the instruction.
180    ///
181    /// # Returns
182    ///
183    /// An `Option` containing a tuple:
184    ///
185    /// * `usize`: The size of the decoded instruction in bytes. Is used to advance to the next instruction.
186    /// * `Vec<InstructionTextToken>`: A list of text tokens representing the instruction.
187    ///
188    /// Returns `None` if the bytes do not form a valid instruction.
189    fn instruction_text(
190        &self,
191        data: &[u8],
192        addr: u64,
193    ) -> Option<(usize, Vec<InstructionTextToken>)>;
194
195    // TODO: Why do we need to return a boolean here? Does `None` not represent the same thing?
196    /// Appends arbitrary low-level il instructions to `il`.
197    ///
198    /// If `None` is returned, no instructions were appended and the data is invalid. If `Some` is returned,
199    /// the instructions consumed length is returned (necessary for variable length instruction decoding).
200    fn instruction_llil(
201        &self,
202        data: &[u8],
203        addr: u64,
204        il: &LowLevelILMutableFunction,
205    ) -> Option<(usize, bool)>;
206
207    /// Performs basic block recovery and commits the results to the function analysis.
208    ///
209    /// NOTE: Only implement this method if function-level analysis is required. Otherwise, do not
210    /// implement to let default basic block analysis take place.
211    fn analyze_basic_blocks(
212        &self,
213        function: &mut Function,
214        context: &mut BasicBlockAnalysisContext,
215    ) {
216        unsafe {
217            BNArchitectureDefaultAnalyzeBasicBlocks(function.handle, context.handle);
218        }
219    }
220
221    fn lift_function(
222        &self,
223        function: LowLevelILMutableFunction,
224        context: &mut FunctionLifterContext,
225    ) -> bool {
226        unsafe { BNArchitectureDefaultLiftFunction(function.handle, context.handle) }
227    }
228
229    /// Fallback flag value calculation path. This method is invoked when the core is unable to
230    /// recover the flag using semantics and resorts to emitting instructions that explicitly set each
231    /// observed flag to the value of an expression returned by this function.
232    ///
233    /// This function *MUST NOT* append instructions that have side effects.
234    ///
235    /// This function *MUST NOT* observe the values of other flags.
236    ///
237    /// This function *MUST* return `None` or an expression representing a boolean value.
238    fn flag_write_llil<'a>(
239        &self,
240        flag: Self::Flag,
241        flag_write_type: Self::FlagWrite,
242        op: LowLevelILFlagWriteOp<Self::Register>,
243        il: &'a LowLevelILMutableFunction,
244    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
245        let role = flag.role(flag_write_type.class());
246        Some(get_default_flag_write_llil(self, role, op, il))
247    }
248
249    /// Determines what flags need to be examined to attempt automatic recovery of the flag uses semantics.
250    ///
251    /// If automatic recovery is not possible, the [`Architecture::flag_cond_llil`] method will be invoked
252    /// to give this [`Architecture`] implementation arbitrary control over the expression to be evaluated.
253    fn flags_required_for_flag_condition(
254        &self,
255        _condition: FlagCondition,
256        _class: Option<Self::FlagClass>,
257    ) -> Vec<Self::Flag> {
258        Vec::new()
259    }
260
261    /// This function *MUST NOT* append instructions that have side effects.
262    ///
263    /// This function *MUST NOT* observe the values of flags not returned by
264    /// `flags_required_for_flag_condition`.
265    ///
266    /// This function *MUST* return `None` or an expression representing a boolean value.
267    fn flag_cond_llil<'a>(
268        &self,
269        cond: FlagCondition,
270        class: Option<Self::FlagClass>,
271        il: &'a LowLevelILMutableFunction,
272    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
273        Some(get_default_flag_cond_llil(self, cond, class, il))
274    }
275
276    /// Performs fallback resolution when the core was unable to recover the semantics of a
277    /// `LLIL_FLAG_GROUP` expression. This occurs when multiple instructions may have set the flags
278    /// at the flag group query, or when the `FlagGroup::flag_conditions()` map doesn't have an entry
279    /// for the `FlagClass` associated with the `FlagWrite` type of the expression that last set
280    /// the flags required by the `FlagGroup` `group`.
281    ///
282    /// In this fallback path, the `Architecture` must generate the boolean expression in terms of
283    /// the values of that flags returned by `group`'s `flags_required` method.
284    ///
285    /// This function must return an expression representing a boolean (as in, size of `0`) value.
286    /// It is not allowed to add any instructions that can cause side effects.
287    ///
288    /// This function must not observe the values of any flag not returned by `group`'s
289    /// `flags_required` method.
290    fn flag_group_llil<'a>(
291        &self,
292        _group: Self::FlagGroup,
293        _il: &'a LowLevelILMutableFunction,
294    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
295        None
296    }
297
298    fn registers_all(&self) -> Vec<Self::Register>;
299
300    fn register_from_id(&self, id: RegisterId) -> Option<Self::Register>;
301
302    fn registers_full_width(&self) -> Vec<Self::Register>;
303
304    // TODO: Document the difference between global and system registers.
305    fn registers_global(&self) -> Vec<Self::Register> {
306        Vec::new()
307    }
308
309    // TODO: Document the difference between global and system registers.
310    fn registers_system(&self) -> Vec<Self::Register> {
311        Vec::new()
312    }
313
314    fn stack_pointer_reg(&self) -> Option<Self::Register>;
315
316    fn link_reg(&self) -> Option<Self::Register> {
317        None
318    }
319
320    /// List of concrete register stacks for this architecture.
321    ///
322    /// You **must** override the following functions as well:
323    ///
324    /// - [`Architecture::register_stack_from_id`]
325    fn register_stacks(&self) -> Vec<Self::RegisterStack> {
326        Vec::new()
327    }
328
329    /// Get the [`Self::RegisterStack`] associated with the given [`RegisterStackId`].
330    ///
331    /// You **must** override the following functions as well:
332    ///
333    /// - [`Architecture::register_stacks`]
334    fn register_stack_from_id(&self, _id: RegisterStackId) -> Option<Self::RegisterStack> {
335        None
336    }
337
338    /// List of concrete flags for this architecture.
339    ///
340    /// You **must** override the following functions as well:
341    ///
342    /// - [`Architecture::flag_from_id`]
343    /// - [`Architecture::flag_write_types`]
344    /// - [`Architecture::flag_write_from_id`]
345    /// - [`Architecture::flag_classes`]
346    /// - [`Architecture::flag_class_from_id`]
347    /// - [`Architecture::flag_groups`]
348    /// - [`Architecture::flag_group_from_id`]
349    fn flags(&self) -> Vec<Self::Flag> {
350        Vec::new()
351    }
352
353    /// Get the [`Self::Flag`] associated with the given [`FlagId`].
354    ///
355    /// You **must** override the following functions as well:
356    ///
357    /// - [`Architecture::flags`]
358    /// - [`Architecture::flag_write_types`]
359    /// - [`Architecture::flag_write_from_id`]
360    /// - [`Architecture::flag_classes`]
361    /// - [`Architecture::flag_class_from_id`]
362    /// - [`Architecture::flag_groups`]
363    /// - [`Architecture::flag_group_from_id`]
364    fn flag_from_id(&self, _id: FlagId) -> Option<Self::Flag> {
365        None
366    }
367
368    /// List of concrete flag write types for this architecture.
369    ///
370    /// You **must** override the following functions as well:
371    ///
372    /// - [`Architecture::flags`]
373    /// - [`Architecture::flag_from_id`]
374    /// - [`Architecture::flag_write_from_id`]
375    /// - [`Architecture::flag_classes`]
376    /// - [`Architecture::flag_class_from_id`]
377    /// - [`Architecture::flag_groups`]
378    /// - [`Architecture::flag_group_from_id`]
379    fn flag_write_types(&self) -> Vec<Self::FlagWrite> {
380        Vec::new()
381    }
382
383    /// Get the [`Self::FlagWrite`] associated with the given [`FlagWriteId`].
384    ///
385    /// You **must** override the following functions as well:
386    ///
387    /// - [`Architecture::flags`]
388    /// - [`Architecture::flag_from_id`]
389    /// - [`Architecture::flag_write_types`]
390    /// - [`Architecture::flag_classes`]
391    /// - [`Architecture::flag_class_from_id`]
392    /// - [`Architecture::flag_groups`]
393    /// - [`Architecture::flag_group_from_id`]
394    fn flag_write_from_id(&self, _id: FlagWriteId) -> Option<Self::FlagWrite> {
395        None
396    }
397
398    /// List of concrete flag classes for this architecture.
399    ///
400    /// You **must** override the following functions as well:
401    ///
402    /// - [`Architecture::flags`]
403    /// - [`Architecture::flag_from_id`]
404    /// - [`Architecture::flag_write_from_id`]
405    /// - [`Architecture::flag_class_from_id`]
406    /// - [`Architecture::flag_groups`]
407    /// - [`Architecture::flag_group_from_id`]
408    fn flag_classes(&self) -> Vec<Self::FlagClass> {
409        Vec::new()
410    }
411
412    /// Get the [`Self::FlagClass`] associated with the given [`FlagClassId`].
413    ///
414    /// You **must** override the following functions as well:
415    ///
416    /// - [`Architecture::flags`]
417    /// - [`Architecture::flag_from_id`]
418    /// - [`Architecture::flag_write_from_id`]
419    /// - [`Architecture::flag_classes`]
420    /// - [`Architecture::flag_groups`]
421    /// - [`Architecture::flag_group_from_id`]
422    fn flag_class_from_id(&self, _id: FlagClassId) -> Option<Self::FlagClass> {
423        None
424    }
425
426    /// List of concrete flag groups for this architecture.
427    ///
428    /// You **must** override the following functions as well:
429    ///
430    /// - [`Architecture::flags`]
431    /// - [`Architecture::flag_from_id`]
432    /// - [`Architecture::flag_write_from_id`]
433    /// - [`Architecture::flag_classes`]
434    /// - [`Architecture::flag_class_from_id`]
435    /// - [`Architecture::flag_group_from_id`]
436    fn flag_groups(&self) -> Vec<Self::FlagGroup> {
437        Vec::new()
438    }
439
440    /// Get the [`Self::FlagGroup`] associated with the given [`FlagGroupId`].
441    ///
442    /// You **must** override the following functions as well:
443    ///
444    /// - [`Architecture::flags`]
445    /// - [`Architecture::flag_from_id`]
446    /// - [`Architecture::flag_write_from_id`]
447    /// - [`Architecture::flag_classes`]
448    /// - [`Architecture::flag_class_from_id`]
449    /// - [`Architecture::flag_groups`]
450    fn flag_group_from_id(&self, _id: FlagGroupId) -> Option<Self::FlagGroup> {
451        None
452    }
453
454    /// List of concrete intrinsics for this architecture.
455    ///
456    /// You **must** override the following functions as well:
457    ///
458    /// - [`Architecture::intrinsic_from_id`]
459    fn intrinsics(&self) -> Vec<Self::Intrinsic> {
460        Vec::new()
461    }
462
463    fn intrinsic_class(&self, _id: IntrinsicId) -> BNIntrinsicClass {
464        BNIntrinsicClass::GeneralIntrinsicClass
465    }
466
467    /// Get the [`Self::Intrinsic`] associated with the given [`IntrinsicId`].
468    ///
469    /// You **must** override the following functions as well:
470    ///
471    /// - [`Architecture::intrinsics`]
472    fn intrinsic_from_id(&self, _id: IntrinsicId) -> Option<Self::Intrinsic> {
473        None
474    }
475
476    /// Let the UI display this patch option.
477    ///
478    /// If set to true, you must override [`Architecture::assemble`].
479    fn can_assemble(&self) -> bool {
480        false
481    }
482
483    /// Assemble the code at the specified address and return the machine code in bytes.
484    ///
485    /// If overridden, you must set [`Architecture::can_assemble`] to `true`.
486    fn assemble(&self, _code: &str, _addr: u64) -> Result<Vec<u8>, String> {
487        Err("Assemble unsupported".into())
488    }
489
490    /// Let the UI display this patch option.
491    ///
492    /// If set to true, you must override [`Architecture::invert_branch`].
493    fn is_never_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
494        self.is_invert_branch_patch_available(data, addr)
495    }
496
497    /// Let the UI display this patch option.
498    ///
499    /// If set to true, you must override [`Architecture::always_branch`].
500    fn is_always_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
501        false
502    }
503
504    /// Let the UI display this patch option.
505    ///
506    /// If set to true, you must override [`Architecture::invert_branch`].
507    fn is_invert_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
508        false
509    }
510
511    /// Let the UI display this patch option.
512    ///
513    /// If set to true, you must override [`Architecture::skip_and_return_value`].
514    fn is_skip_and_return_zero_patch_available(&self, data: &[u8], addr: u64) -> bool {
515        self.is_skip_and_return_value_patch_available(data, addr)
516    }
517
518    /// Let the UI display this patch option.
519    ///
520    /// If set to true, you must override [`Architecture::skip_and_return_value`].
521    fn is_skip_and_return_value_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
522        false
523    }
524
525    fn convert_to_nop(&self, _data: &mut [u8], _addr: u64) -> bool {
526        false
527    }
528
529    /// Patch the instruction to always branch.
530    ///
531    /// If overridden, you must also override [`Architecture::is_always_branch_patch_available`].
532    fn always_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
533        false
534    }
535
536    /// Patch the instruction to invert the branch condition.
537    ///
538    /// If overridden, you must also override [`Architecture::is_invert_branch_patch_available`].
539    fn invert_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
540        false
541    }
542
543    /// Patch the instruction to skip and return value.
544    ///
545    /// If overridden, you must also override [`Architecture::is_skip_and_return_value_patch_available`].
546    fn skip_and_return_value(&self, _data: &mut [u8], _addr: u64, _value: u64) -> bool {
547        false
548    }
549
550    fn handle(&self) -> Self::Handle;
551}
552
553pub struct FunctionLifterContext {
554    pub(crate) handle: *mut BNFunctionLifterContext,
555}
556
557impl FunctionLifterContext {
558    pub unsafe fn from_raw(handle: *mut BNFunctionLifterContext) -> Self {
559        debug_assert!(!handle.is_null());
560
561        FunctionLifterContext { handle }
562    }
563}
564
565// TODO: WTF?!?!?!?
566pub struct CoreArchitectureList(*mut *mut BNArchitecture, usize);
567
568impl Deref for CoreArchitectureList {
569    type Target = [CoreArchitecture];
570
571    fn deref(&self) -> &Self::Target {
572        unsafe { std::slice::from_raw_parts_mut(self.0 as *mut CoreArchitecture, self.1) }
573    }
574}
575
576impl Drop for CoreArchitectureList {
577    fn drop(&mut self) {
578        unsafe {
579            BNFreeArchitectureList(self.0);
580        }
581    }
582}
583
584#[derive(Copy, Clone, Eq, PartialEq, Hash)]
585pub struct CoreArchitecture {
586    pub(crate) handle: *mut BNArchitecture,
587}
588
589impl CoreArchitecture {
590    // TODO: Leave a note on architecture lifetimes. Specifically that they are never freed.
591    pub unsafe fn from_raw(handle: *mut BNArchitecture) -> Self {
592        debug_assert!(!handle.is_null());
593        CoreArchitecture { handle }
594    }
595
596    pub fn list_all() -> CoreArchitectureList {
597        let mut count: usize = 0;
598        let archs = unsafe { BNGetArchitectureList(&mut count) };
599
600        CoreArchitectureList(archs, count)
601    }
602
603    pub fn by_name(name: &str) -> Option<Self> {
604        let name = name.to_cstr();
605        let handle = unsafe { BNGetArchitectureByName(name.as_ptr()) };
606        match handle.is_null() {
607            false => Some(CoreArchitecture { handle }),
608            true => None,
609        }
610    }
611
612    pub fn name(&self) -> String {
613        unsafe { BnString::into_string(BNGetArchitectureName(self.handle)) }
614    }
615}
616
617unsafe impl Send for CoreArchitecture {}
618unsafe impl Sync for CoreArchitecture {}
619
620impl AsRef<CoreArchitecture> for CoreArchitecture {
621    fn as_ref(&self) -> &Self {
622        self
623    }
624}
625
626impl Architecture for CoreArchitecture {
627    type Handle = Self;
628
629    type RegisterInfo = CoreRegisterInfo;
630    type Register = CoreRegister;
631    type RegisterStackInfo = CoreRegisterStackInfo;
632    type RegisterStack = CoreRegisterStack;
633    type Flag = CoreFlag;
634    type FlagWrite = CoreFlagWrite;
635    type FlagClass = CoreFlagClass;
636    type FlagGroup = CoreFlagGroup;
637    type Intrinsic = CoreIntrinsic;
638
639    fn endianness(&self) -> Endianness {
640        unsafe { BNGetArchitectureEndianness(self.handle) }
641    }
642
643    fn address_size(&self) -> usize {
644        unsafe { BNGetArchitectureAddressSize(self.handle) }
645    }
646
647    fn default_integer_size(&self) -> usize {
648        unsafe { BNGetArchitectureDefaultIntegerSize(self.handle) }
649    }
650
651    fn instruction_alignment(&self) -> usize {
652        unsafe { BNGetArchitectureInstructionAlignment(self.handle) }
653    }
654
655    fn max_instr_len(&self) -> usize {
656        unsafe { BNGetArchitectureMaxInstructionLength(self.handle) }
657    }
658
659    fn opcode_display_len(&self) -> usize {
660        unsafe { BNGetArchitectureOpcodeDisplayLength(self.handle) }
661    }
662
663    fn associated_arch_by_addr(&self, addr: u64) -> CoreArchitecture {
664        let handle = unsafe { BNGetAssociatedArchitectureByAddress(self.handle, addr as *mut _) };
665        CoreArchitecture { handle }
666    }
667
668    fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo> {
669        let mut info = BNInstructionInfo::default();
670        if unsafe { BNGetInstructionInfo(self.handle, data.as_ptr(), addr, data.len(), &mut info) }
671        {
672            Some(info.into())
673        } else {
674            None
675        }
676    }
677
678    fn instruction_text(
679        &self,
680        data: &[u8],
681        addr: u64,
682    ) -> Option<(usize, Vec<InstructionTextToken>)> {
683        let mut consumed = data.len();
684        let mut count: usize = 0;
685        let mut result: *mut BNInstructionTextToken = std::ptr::null_mut();
686
687        unsafe {
688            if BNGetInstructionText(
689                self.handle,
690                data.as_ptr(),
691                addr,
692                &mut consumed,
693                &mut result,
694                &mut count,
695            ) {
696                let instr_text_tokens = std::slice::from_raw_parts(result, count)
697                    .iter()
698                    .map(InstructionTextToken::from_raw)
699                    .collect();
700                BNFreeInstructionText(result, count);
701                Some((consumed, instr_text_tokens))
702            } else {
703                None
704            }
705        }
706    }
707
708    fn instruction_llil(
709        &self,
710        data: &[u8],
711        addr: u64,
712        il: &LowLevelILMutableFunction,
713    ) -> Option<(usize, bool)> {
714        let mut size = data.len();
715        let success = unsafe {
716            BNGetInstructionLowLevelIL(
717                self.handle,
718                data.as_ptr(),
719                addr,
720                &mut size as *mut _,
721                il.handle,
722            )
723        };
724
725        if !success {
726            None
727        } else {
728            Some((size, true))
729        }
730    }
731
732    /// Performs basic block recovery and commits the results to the function analysis.
733    ///
734    /// NOTE: Only implement this method if function-level analysis is required. Otherwise, do not
735    /// implement to let default basic block analysis take place.
736    ///
737    /// NOTE: The default implementation exists in C++ here: <https://github.com/Vector35/binaryninja-api/blob/dev/defaultabb.cpp>
738    fn analyze_basic_blocks(
739        &self,
740        function: &mut Function,
741        context: &mut BasicBlockAnalysisContext,
742    ) {
743        unsafe {
744            BNArchitectureAnalyzeBasicBlocks(self.handle, function.handle, context.handle);
745        }
746    }
747
748    fn lift_function(
749        &self,
750        function: LowLevelILMutableFunction,
751        context: &mut FunctionLifterContext,
752    ) -> bool {
753        unsafe { BNArchitectureLiftFunction(self.handle, function.handle, context.handle) }
754    }
755
756    fn flag_write_llil<'a>(
757        &self,
758        _flag: Self::Flag,
759        _flag_write: Self::FlagWrite,
760        _op: LowLevelILFlagWriteOp<Self::Register>,
761        _il: &'a LowLevelILMutableFunction,
762    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
763        None
764    }
765
766    fn flags_required_for_flag_condition(
767        &self,
768        condition: FlagCondition,
769        class: Option<Self::FlagClass>,
770    ) -> Vec<Self::Flag> {
771        let class_id_raw = class.map(|c| c.id().0).unwrap_or(0);
772
773        unsafe {
774            let mut count: usize = 0;
775            let flags = BNGetArchitectureFlagsRequiredForFlagCondition(
776                self.handle,
777                condition,
778                class_id_raw,
779                &mut count,
780            );
781
782            let ret = std::slice::from_raw_parts(flags, count)
783                .iter()
784                .map(|&id| FlagId::from(id))
785                .filter_map(|flag| CoreFlag::new(*self, flag))
786                .collect();
787
788            BNFreeRegisterList(flags);
789
790            ret
791        }
792    }
793
794    fn flag_cond_llil<'a>(
795        &self,
796        _cond: FlagCondition,
797        _class: Option<Self::FlagClass>,
798        _il: &'a LowLevelILMutableFunction,
799    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
800        None
801    }
802
803    fn flag_group_llil<'a>(
804        &self,
805        _group: Self::FlagGroup,
806        _il: &'a LowLevelILMutableFunction,
807    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
808        None
809    }
810
811    fn registers_all(&self) -> Vec<CoreRegister> {
812        unsafe {
813            let mut count: usize = 0;
814            let registers_raw = BNGetAllArchitectureRegisters(self.handle, &mut count);
815
816            let ret = std::slice::from_raw_parts(registers_raw, count)
817                .iter()
818                .map(|&id| RegisterId::from(id))
819                .filter_map(|reg| CoreRegister::new(*self, reg))
820                .collect();
821
822            BNFreeRegisterList(registers_raw);
823
824            ret
825        }
826    }
827
828    fn register_from_id(&self, id: RegisterId) -> Option<CoreRegister> {
829        CoreRegister::new(*self, id)
830    }
831
832    fn registers_full_width(&self) -> Vec<CoreRegister> {
833        unsafe {
834            let mut count: usize = 0;
835            let registers_raw = BNGetFullWidthArchitectureRegisters(self.handle, &mut count);
836
837            let ret = std::slice::from_raw_parts(registers_raw, count)
838                .iter()
839                .map(|&id| RegisterId::from(id))
840                .filter_map(|reg| CoreRegister::new(*self, reg))
841                .collect();
842
843            BNFreeRegisterList(registers_raw);
844
845            ret
846        }
847    }
848
849    fn registers_global(&self) -> Vec<CoreRegister> {
850        unsafe {
851            let mut count: usize = 0;
852            let registers_raw = BNGetArchitectureGlobalRegisters(self.handle, &mut count);
853
854            let ret = std::slice::from_raw_parts(registers_raw, count)
855                .iter()
856                .map(|&id| RegisterId::from(id))
857                .filter_map(|reg| CoreRegister::new(*self, reg))
858                .collect();
859
860            BNFreeRegisterList(registers_raw);
861
862            ret
863        }
864    }
865
866    fn registers_system(&self) -> Vec<CoreRegister> {
867        unsafe {
868            let mut count: usize = 0;
869            let registers_raw = BNGetArchitectureSystemRegisters(self.handle, &mut count);
870
871            let ret = std::slice::from_raw_parts(registers_raw, count)
872                .iter()
873                .map(|&id| RegisterId::from(id))
874                .filter_map(|reg| CoreRegister::new(*self, reg))
875                .collect();
876
877            BNFreeRegisterList(registers_raw);
878
879            ret
880        }
881    }
882
883    fn stack_pointer_reg(&self) -> Option<CoreRegister> {
884        match unsafe { BNGetArchitectureStackPointerRegister(self.handle) } {
885            0xffff_ffff => None,
886            reg => Some(CoreRegister::new(*self, reg.into())?),
887        }
888    }
889
890    fn link_reg(&self) -> Option<CoreRegister> {
891        match unsafe { BNGetArchitectureLinkRegister(self.handle) } {
892            0xffff_ffff => None,
893            reg => Some(CoreRegister::new(*self, reg.into())?),
894        }
895    }
896
897    fn register_stacks(&self) -> Vec<CoreRegisterStack> {
898        unsafe {
899            let mut count: usize = 0;
900            let reg_stacks_raw = BNGetAllArchitectureRegisterStacks(self.handle, &mut count);
901
902            let ret = std::slice::from_raw_parts(reg_stacks_raw, count)
903                .iter()
904                .map(|&id| RegisterStackId::from(id))
905                .filter_map(|reg_stack| CoreRegisterStack::new(*self, reg_stack))
906                .collect();
907
908            BNFreeRegisterList(reg_stacks_raw);
909
910            ret
911        }
912    }
913
914    fn register_stack_from_id(&self, id: RegisterStackId) -> Option<CoreRegisterStack> {
915        CoreRegisterStack::new(*self, id)
916    }
917
918    fn flags(&self) -> Vec<CoreFlag> {
919        unsafe {
920            let mut count: usize = 0;
921            let flags_raw = BNGetAllArchitectureFlags(self.handle, &mut count);
922
923            let ret = std::slice::from_raw_parts(flags_raw, count)
924                .iter()
925                .map(|&id| FlagId::from(id))
926                .filter_map(|flag| CoreFlag::new(*self, flag))
927                .collect();
928
929            BNFreeRegisterList(flags_raw);
930
931            ret
932        }
933    }
934
935    fn flag_from_id(&self, id: FlagId) -> Option<CoreFlag> {
936        CoreFlag::new(*self, id)
937    }
938
939    fn flag_write_types(&self) -> Vec<CoreFlagWrite> {
940        unsafe {
941            let mut count: usize = 0;
942            let flag_writes_raw = BNGetAllArchitectureFlagWriteTypes(self.handle, &mut count);
943
944            let ret = std::slice::from_raw_parts(flag_writes_raw, count)
945                .iter()
946                .map(|&id| FlagWriteId::from(id))
947                .filter_map(|flag_write| CoreFlagWrite::new(*self, flag_write))
948                .collect();
949
950            BNFreeRegisterList(flag_writes_raw);
951
952            ret
953        }
954    }
955
956    fn flag_write_from_id(&self, id: FlagWriteId) -> Option<CoreFlagWrite> {
957        CoreFlagWrite::new(*self, id)
958    }
959
960    fn flag_classes(&self) -> Vec<CoreFlagClass> {
961        unsafe {
962            let mut count: usize = 0;
963            let flag_classes_raw = BNGetAllArchitectureSemanticFlagClasses(self.handle, &mut count);
964
965            let ret = std::slice::from_raw_parts(flag_classes_raw, count)
966                .iter()
967                .map(|&id| FlagClassId::from(id))
968                .filter_map(|flag_class| CoreFlagClass::new(*self, flag_class))
969                .collect();
970
971            BNFreeRegisterList(flag_classes_raw);
972
973            ret
974        }
975    }
976
977    fn flag_class_from_id(&self, id: FlagClassId) -> Option<CoreFlagClass> {
978        CoreFlagClass::new(*self, id)
979    }
980
981    fn flag_groups(&self) -> Vec<CoreFlagGroup> {
982        unsafe {
983            let mut count: usize = 0;
984            let flag_groups_raw = BNGetAllArchitectureSemanticFlagGroups(self.handle, &mut count);
985
986            let ret = std::slice::from_raw_parts(flag_groups_raw, count)
987                .iter()
988                .map(|&id| FlagGroupId::from(id))
989                .filter_map(|flag_group| CoreFlagGroup::new(*self, flag_group))
990                .collect();
991
992            BNFreeRegisterList(flag_groups_raw);
993
994            ret
995        }
996    }
997
998    fn flag_group_from_id(&self, id: FlagGroupId) -> Option<CoreFlagGroup> {
999        CoreFlagGroup::new(*self, id)
1000    }
1001
1002    fn intrinsics(&self) -> Vec<CoreIntrinsic> {
1003        unsafe {
1004            let mut count: usize = 0;
1005            let intrinsics_raw = BNGetAllArchitectureIntrinsics(self.handle, &mut count);
1006
1007            let intrinsics = std::slice::from_raw_parts_mut(intrinsics_raw, count)
1008                .iter()
1009                .map(|&id| IntrinsicId::from(id))
1010                .filter_map(|intrinsic| CoreIntrinsic::new(*self, intrinsic))
1011                .collect();
1012
1013            BNFreeRegisterList(intrinsics_raw);
1014
1015            intrinsics
1016        }
1017    }
1018
1019    fn intrinsic_from_id(&self, id: IntrinsicId) -> Option<CoreIntrinsic> {
1020        CoreIntrinsic::new(*self, id)
1021    }
1022
1023    fn can_assemble(&self) -> bool {
1024        unsafe { BNCanArchitectureAssemble(self.handle) }
1025    }
1026
1027    fn assemble(&self, code: &str, addr: u64) -> Result<Vec<u8>, String> {
1028        let code = CString::new(code).map_err(|_| "Invalid encoding in code string".to_string())?;
1029
1030        let result = DataBuffer::new(&[]);
1031        // TODO: This is actually a list of errors.
1032        let mut error_raw: *mut c_char = std::ptr::null_mut();
1033        let res = unsafe {
1034            BNAssemble(
1035                self.handle,
1036                code.as_ptr(),
1037                addr,
1038                result.as_raw(),
1039                &mut error_raw as *mut *mut c_char,
1040            )
1041        };
1042
1043        let error = raw_to_string(error_raw);
1044        unsafe {
1045            BNFreeString(error_raw);
1046        }
1047
1048        if res {
1049            Ok(result.get_data().to_vec())
1050        } else {
1051            Err(error.unwrap_or_else(|| "Assemble failed".into()))
1052        }
1053    }
1054
1055    fn is_never_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1056        unsafe {
1057            BNIsArchitectureNeverBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1058        }
1059    }
1060
1061    fn is_always_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1062        unsafe {
1063            BNIsArchitectureAlwaysBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1064        }
1065    }
1066
1067    fn is_invert_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1068        unsafe {
1069            BNIsArchitectureInvertBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1070        }
1071    }
1072
1073    fn is_skip_and_return_zero_patch_available(&self, data: &[u8], addr: u64) -> bool {
1074        unsafe {
1075            BNIsArchitectureSkipAndReturnZeroPatchAvailable(
1076                self.handle,
1077                data.as_ptr(),
1078                addr,
1079                data.len(),
1080            )
1081        }
1082    }
1083
1084    fn is_skip_and_return_value_patch_available(&self, data: &[u8], addr: u64) -> bool {
1085        unsafe {
1086            BNIsArchitectureSkipAndReturnValuePatchAvailable(
1087                self.handle,
1088                data.as_ptr(),
1089                addr,
1090                data.len(),
1091            )
1092        }
1093    }
1094
1095    fn convert_to_nop(&self, data: &mut [u8], addr: u64) -> bool {
1096        unsafe { BNArchitectureConvertToNop(self.handle, data.as_mut_ptr(), addr, data.len()) }
1097    }
1098
1099    fn always_branch(&self, data: &mut [u8], addr: u64) -> bool {
1100        unsafe { BNArchitectureAlwaysBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1101    }
1102
1103    fn invert_branch(&self, data: &mut [u8], addr: u64) -> bool {
1104        unsafe { BNArchitectureInvertBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1105    }
1106
1107    fn skip_and_return_value(&self, data: &mut [u8], addr: u64, value: u64) -> bool {
1108        unsafe {
1109            BNArchitectureSkipAndReturnValue(
1110                self.handle,
1111                data.as_mut_ptr(),
1112                addr,
1113                data.len(),
1114                value,
1115            )
1116        }
1117    }
1118
1119    fn handle(&self) -> CoreArchitecture {
1120        *self
1121    }
1122}
1123
1124impl Debug for CoreArchitecture {
1125    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
1126        f.debug_struct("CoreArchitecture")
1127            .field("name", &self.name())
1128            .field("endianness", &self.endianness())
1129            .field("address_size", &self.address_size())
1130            .field("instruction_alignment", &self.instruction_alignment())
1131            .finish()
1132    }
1133}
1134
1135macro_rules! cc_func {
1136    ($get_name:ident, $get_api:ident, $set_name:ident, $set_api:ident) => {
1137        fn $get_name(&self) -> Option<Ref<CoreCallingConvention>> {
1138            let arch = self.as_ref();
1139
1140            unsafe {
1141                let cc = $get_api(arch.handle);
1142
1143                if cc.is_null() {
1144                    None
1145                } else {
1146                    Some(CoreCallingConvention::ref_from_raw(
1147                        cc,
1148                        self.as_ref().handle(),
1149                    ))
1150                }
1151            }
1152        }
1153
1154        fn $set_name(&self, cc: &CoreCallingConvention) {
1155            let arch = self.as_ref();
1156
1157            assert!(
1158                cc.arch_handle.borrow().as_ref().handle == arch.handle,
1159                "use of calling convention with non-matching architecture!"
1160            );
1161
1162            unsafe {
1163                $set_api(arch.handle, cc.handle);
1164            }
1165        }
1166    };
1167}
1168
1169/// Contains helper methods for all types implementing 'Architecture'
1170pub trait ArchitectureExt: Architecture {
1171    fn register_by_name(&self, name: &str) -> Option<Self::Register> {
1172        let name = name.to_cstr();
1173
1174        match unsafe { BNGetArchitectureRegisterByName(self.as_ref().handle, name.as_ptr()) } {
1175            0xffff_ffff => None,
1176            reg => self.register_from_id(reg.into()),
1177        }
1178    }
1179
1180    fn calling_conventions(&self) -> Array<CoreCallingConvention> {
1181        unsafe {
1182            let mut count = 0;
1183            let calling_convs =
1184                BNGetArchitectureCallingConventions(self.as_ref().handle, &mut count);
1185            Array::new(calling_convs, count, self.as_ref().handle())
1186        }
1187    }
1188
1189    cc_func!(
1190        get_default_calling_convention,
1191        BNGetArchitectureDefaultCallingConvention,
1192        set_default_calling_convention,
1193        BNSetArchitectureDefaultCallingConvention
1194    );
1195
1196    cc_func!(
1197        get_cdecl_calling_convention,
1198        BNGetArchitectureCdeclCallingConvention,
1199        set_cdecl_calling_convention,
1200        BNSetArchitectureCdeclCallingConvention
1201    );
1202
1203    cc_func!(
1204        get_stdcall_calling_convention,
1205        BNGetArchitectureStdcallCallingConvention,
1206        set_stdcall_calling_convention,
1207        BNSetArchitectureStdcallCallingConvention
1208    );
1209
1210    cc_func!(
1211        get_fastcall_calling_convention,
1212        BNGetArchitectureFastcallCallingConvention,
1213        set_fastcall_calling_convention,
1214        BNSetArchitectureFastcallCallingConvention
1215    );
1216
1217    fn standalone_platform(&self) -> Option<Ref<Platform>> {
1218        unsafe {
1219            let handle = BNGetArchitectureStandalonePlatform(self.as_ref().handle);
1220
1221            if handle.is_null() {
1222                return None;
1223            }
1224
1225            Some(Platform::ref_from_raw(handle))
1226        }
1227    }
1228
1229    fn relocation_handler(&self, view_name: &str) -> Option<Ref<CoreRelocationHandler>> {
1230        let view_name = match CString::new(view_name) {
1231            Ok(view_name) => view_name,
1232            Err(_) => return None,
1233        };
1234
1235        unsafe {
1236            let handle =
1237                BNArchitectureGetRelocationHandler(self.as_ref().handle, view_name.as_ptr());
1238
1239            if handle.is_null() {
1240                return None;
1241            }
1242
1243            Some(CoreRelocationHandler::ref_from_raw(handle))
1244        }
1245    }
1246
1247    fn register_relocation_handler<R, F>(&self, name: &str, func: F)
1248    where
1249        R: 'static
1250            + RelocationHandler<Handle = CustomRelocationHandlerHandle<R>>
1251            + Send
1252            + Sync
1253            + Sized,
1254        F: FnOnce(CustomRelocationHandlerHandle<R>, CoreRelocationHandler) -> R,
1255    {
1256        crate::relocation::register_relocation_handler(self.as_ref(), name, func);
1257    }
1258
1259    fn register_function_recognizer<R>(&self, recognizer: R)
1260    where
1261        R: 'static + FunctionRecognizer + Send + Sync + Sized,
1262    {
1263        crate::function_recognizer::register_arch_function_recognizer(self.as_ref(), recognizer);
1264    }
1265}
1266
1267impl<T: Architecture> ArchitectureExt for T {}
1268
1269/// Registers a new architecture with the given name.
1270///
1271/// NOTE: This function should only be called within `CorePluginInit`.
1272pub fn register_architecture<A, F>(name: &str, func: F) -> &'static A
1273where
1274    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync + Sized,
1275    F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1276{
1277    #[repr(C)]
1278    struct ArchitectureBuilder<A, F>
1279    where
1280        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1281        F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1282    {
1283        arch: MaybeUninit<A>,
1284        func: Option<F>,
1285    }
1286
1287    extern "C" fn cb_init<A, F>(ctxt: *mut c_void, obj: *mut BNArchitecture)
1288    where
1289        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1290        F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1291    {
1292        unsafe {
1293            let custom_arch = &mut *(ctxt as *mut ArchitectureBuilder<A, F>);
1294            let custom_arch_handle = CustomArchitectureHandle {
1295                handle: ctxt as *mut A,
1296            };
1297
1298            let create = custom_arch.func.take().unwrap();
1299            custom_arch
1300                .arch
1301                .write(create(custom_arch_handle, CoreArchitecture::from_raw(obj)));
1302        }
1303    }
1304
1305    extern "C" fn cb_endianness<A>(ctxt: *mut c_void) -> BNEndianness
1306    where
1307        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1308    {
1309        let custom_arch = unsafe { &*(ctxt as *mut A) };
1310        custom_arch.endianness()
1311    }
1312
1313    extern "C" fn cb_address_size<A>(ctxt: *mut c_void) -> usize
1314    where
1315        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1316    {
1317        let custom_arch = unsafe { &*(ctxt as *mut A) };
1318        custom_arch.address_size()
1319    }
1320
1321    extern "C" fn cb_default_integer_size<A>(ctxt: *mut c_void) -> usize
1322    where
1323        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1324    {
1325        let custom_arch = unsafe { &*(ctxt as *mut A) };
1326        custom_arch.default_integer_size()
1327    }
1328
1329    extern "C" fn cb_instruction_alignment<A>(ctxt: *mut c_void) -> usize
1330    where
1331        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1332    {
1333        let custom_arch = unsafe { &*(ctxt as *mut A) };
1334        custom_arch.instruction_alignment()
1335    }
1336
1337    extern "C" fn cb_max_instr_len<A>(ctxt: *mut c_void) -> usize
1338    where
1339        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1340    {
1341        let custom_arch = unsafe { &*(ctxt as *mut A) };
1342        custom_arch.max_instr_len()
1343    }
1344
1345    extern "C" fn cb_opcode_display_len<A>(ctxt: *mut c_void) -> usize
1346    where
1347        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1348    {
1349        let custom_arch = unsafe { &*(ctxt as *mut A) };
1350        custom_arch.opcode_display_len()
1351    }
1352
1353    extern "C" fn cb_associated_arch_by_addr<A>(
1354        ctxt: *mut c_void,
1355        addr: *mut u64,
1356    ) -> *mut BNArchitecture
1357    where
1358        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1359    {
1360        let custom_arch = unsafe { &*(ctxt as *mut A) };
1361        let addr = unsafe { *(addr) };
1362
1363        custom_arch.associated_arch_by_addr(addr).handle
1364    }
1365
1366    extern "C" fn cb_instruction_info<A>(
1367        ctxt: *mut c_void,
1368        data: *const u8,
1369        addr: u64,
1370        len: usize,
1371        result: *mut BNInstructionInfo,
1372    ) -> bool
1373    where
1374        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1375    {
1376        let custom_arch = unsafe { &*(ctxt as *mut A) };
1377        let data = unsafe { std::slice::from_raw_parts(data, len) };
1378
1379        match custom_arch.instruction_info(data, addr) {
1380            Some(info) => {
1381                // SAFETY: Passed in to be written to
1382                unsafe { *result = info.into() };
1383                true
1384            }
1385            None => false,
1386        }
1387    }
1388
1389    extern "C" fn cb_get_instruction_text<A>(
1390        ctxt: *mut c_void,
1391        data: *const u8,
1392        addr: u64,
1393        len: *mut usize,
1394        result: *mut *mut BNInstructionTextToken,
1395        count: *mut usize,
1396    ) -> bool
1397    where
1398        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1399    {
1400        let custom_arch = unsafe { &*(ctxt as *mut A) };
1401        let data = unsafe { std::slice::from_raw_parts(data, *len) };
1402        let result = unsafe { &mut *result };
1403
1404        let Some((res_size, res_tokens)) = custom_arch.instruction_text(data, addr) else {
1405            return false;
1406        };
1407
1408        let res_tokens: Box<[BNInstructionTextToken]> = res_tokens
1409            .into_iter()
1410            .map(InstructionTextToken::into_raw)
1411            .collect();
1412        unsafe {
1413            // NOTE: Freed with `cb_free_instruction_text`
1414            let res_tokens = Box::leak(res_tokens);
1415            *result = res_tokens.as_mut_ptr();
1416            *count = res_tokens.len();
1417            *len = res_size;
1418        }
1419        true
1420    }
1421
1422    extern "C" fn cb_free_instruction_text(tokens: *mut BNInstructionTextToken, count: usize) {
1423        unsafe {
1424            let raw_tokens = std::slice::from_raw_parts_mut(tokens, count);
1425            let boxed_tokens = Box::from_raw(raw_tokens);
1426            for token in boxed_tokens {
1427                InstructionTextToken::free_raw(token);
1428            }
1429        }
1430    }
1431
1432    extern "C" fn cb_instruction_llil<A>(
1433        ctxt: *mut c_void,
1434        data: *const u8,
1435        addr: u64,
1436        len: *mut usize,
1437        il: *mut BNLowLevelILFunction,
1438    ) -> bool
1439    where
1440        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1441    {
1442        let custom_arch = unsafe { &*(ctxt as *mut A) };
1443        let data = unsafe { std::slice::from_raw_parts(data, *len) };
1444        let lifter = unsafe {
1445            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
1446        };
1447
1448        match custom_arch.instruction_llil(data, addr, &lifter) {
1449            Some((res_len, res_value)) => {
1450                unsafe { *len = res_len };
1451                res_value
1452            }
1453            None => false,
1454        }
1455    }
1456
1457    extern "C" fn cb_analyze_basic_blocks<A>(
1458        ctxt: *mut c_void,
1459        function: *mut BNFunction,
1460        context: *mut BNBasicBlockAnalysisContext,
1461    ) where
1462        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1463    {
1464        let custom_arch = unsafe { &*(ctxt as *mut A) };
1465        let mut function = unsafe { Function::from_raw(function) };
1466        let mut context: BasicBlockAnalysisContext =
1467            unsafe { BasicBlockAnalysisContext::from_raw(context) };
1468        custom_arch.analyze_basic_blocks(&mut function, &mut context);
1469    }
1470
1471    extern "C" fn cb_lift_function<A>(
1472        ctxt: *mut c_void,
1473        function: *mut BNLowLevelILFunction,
1474        context: *mut BNFunctionLifterContext,
1475    ) -> bool
1476    where
1477        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1478    {
1479        let custom_arch = unsafe { &*(ctxt as *mut A) };
1480        let function = unsafe {
1481            LowLevelILMutableFunction::from_raw_with_arch(function, Some(*custom_arch.as_ref()))
1482        };
1483        let mut context: FunctionLifterContext =
1484            unsafe { FunctionLifterContext::from_raw(context) };
1485        custom_arch.lift_function(function, &mut context)
1486    }
1487
1488    extern "C" fn cb_reg_name<A>(ctxt: *mut c_void, reg: u32) -> *mut c_char
1489    where
1490        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1491    {
1492        let custom_arch = unsafe { &*(ctxt as *mut A) };
1493
1494        match custom_arch.register_from_id(reg.into()) {
1495            Some(reg) => BnString::into_raw(BnString::new(reg.name().as_ref())),
1496            None => BnString::into_raw(BnString::new("invalid_reg")),
1497        }
1498    }
1499
1500    extern "C" fn cb_flag_name<A>(ctxt: *mut c_void, flag: u32) -> *mut c_char
1501    where
1502        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1503    {
1504        let custom_arch = unsafe { &*(ctxt as *mut A) };
1505
1506        match custom_arch.flag_from_id(flag.into()) {
1507            Some(flag) => BnString::into_raw(BnString::new(flag.name().as_ref())),
1508            None => BnString::into_raw(BnString::new("invalid_flag")),
1509        }
1510    }
1511
1512    extern "C" fn cb_flag_write_name<A>(ctxt: *mut c_void, flag_write: u32) -> *mut c_char
1513    where
1514        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1515    {
1516        let custom_arch = unsafe { &*(ctxt as *mut A) };
1517
1518        match custom_arch.flag_write_from_id(flag_write.into()) {
1519            Some(flag_write) => BnString::into_raw(BnString::new(flag_write.name().as_ref())),
1520            None => BnString::into_raw(BnString::new("invalid_flag_write")),
1521        }
1522    }
1523
1524    extern "C" fn cb_semantic_flag_class_name<A>(ctxt: *mut c_void, class: u32) -> *mut c_char
1525    where
1526        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1527    {
1528        let custom_arch = unsafe { &*(ctxt as *mut A) };
1529
1530        match custom_arch.flag_class_from_id(class.into()) {
1531            Some(class) => BnString::into_raw(BnString::new(class.name().as_ref())),
1532            None => BnString::into_raw(BnString::new("invalid_flag_class")),
1533        }
1534    }
1535
1536    extern "C" fn cb_semantic_flag_group_name<A>(ctxt: *mut c_void, group: u32) -> *mut c_char
1537    where
1538        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1539    {
1540        let custom_arch = unsafe { &*(ctxt as *mut A) };
1541
1542        match custom_arch.flag_group_from_id(group.into()) {
1543            Some(group) => BnString::into_raw(BnString::new(group.name().as_ref())),
1544            None => BnString::into_raw(BnString::new("invalid_flag_group")),
1545        }
1546    }
1547
1548    extern "C" fn cb_registers_full_width<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1549    where
1550        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1551    {
1552        let custom_arch = unsafe { &*(ctxt as *mut A) };
1553        let mut regs: Box<[_]> = custom_arch
1554            .registers_full_width()
1555            .iter()
1556            .map(|r| r.id().0)
1557            .collect();
1558
1559        // SAFETY: `count` is an out parameter
1560        unsafe { *count = regs.len() };
1561        let regs_ptr = regs.as_mut_ptr();
1562        std::mem::forget(regs);
1563        regs_ptr
1564    }
1565
1566    extern "C" fn cb_registers_all<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1567    where
1568        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1569    {
1570        let custom_arch = unsafe { &*(ctxt as *mut A) };
1571        let mut regs: Box<[_]> = custom_arch
1572            .registers_all()
1573            .iter()
1574            .map(|r| r.id().0)
1575            .collect();
1576
1577        // SAFETY: `count` is an out parameter
1578        unsafe { *count = regs.len() };
1579        let regs_ptr = regs.as_mut_ptr();
1580        std::mem::forget(regs);
1581        regs_ptr
1582    }
1583
1584    extern "C" fn cb_registers_global<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1585    where
1586        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1587    {
1588        let custom_arch = unsafe { &*(ctxt as *mut A) };
1589        let mut regs: Box<[_]> = custom_arch
1590            .registers_global()
1591            .iter()
1592            .map(|r| r.id().0)
1593            .collect();
1594
1595        // SAFETY: `count` is an out parameter
1596        unsafe { *count = regs.len() };
1597        let regs_ptr = regs.as_mut_ptr();
1598        std::mem::forget(regs);
1599        regs_ptr
1600    }
1601
1602    extern "C" fn cb_registers_system<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1603    where
1604        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1605    {
1606        let custom_arch = unsafe { &*(ctxt as *mut A) };
1607        let mut regs: Box<[_]> = custom_arch
1608            .registers_system()
1609            .iter()
1610            .map(|r| r.id().0)
1611            .collect();
1612
1613        // SAFETY: `count` is an out parameter
1614        unsafe { *count = regs.len() };
1615        let regs_ptr = regs.as_mut_ptr();
1616        std::mem::forget(regs);
1617        regs_ptr
1618    }
1619
1620    extern "C" fn cb_flags<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1621    where
1622        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1623    {
1624        let custom_arch = unsafe { &*(ctxt as *mut A) };
1625        let mut flags: Box<[_]> = custom_arch.flags().iter().map(|f| f.id().0).collect();
1626
1627        // SAFETY: `count` is an out parameter
1628        unsafe { *count = flags.len() };
1629        let flags_ptr = flags.as_mut_ptr();
1630        std::mem::forget(flags);
1631        flags_ptr
1632    }
1633
1634    extern "C" fn cb_flag_write_types<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1635    where
1636        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1637    {
1638        let custom_arch = unsafe { &*(ctxt as *mut A) };
1639        let mut flag_writes: Box<[_]> = custom_arch
1640            .flag_write_types()
1641            .iter()
1642            .map(|f| f.id().0)
1643            .collect();
1644
1645        // SAFETY: `count` is an out parameter
1646        unsafe { *count = flag_writes.len() };
1647        let flags_ptr = flag_writes.as_mut_ptr();
1648        std::mem::forget(flag_writes);
1649        flags_ptr
1650    }
1651
1652    extern "C" fn cb_semantic_flag_classes<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1653    where
1654        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1655    {
1656        let custom_arch = unsafe { &*(ctxt as *mut A) };
1657        let mut flag_classes: Box<[_]> = custom_arch
1658            .flag_classes()
1659            .iter()
1660            .map(|f| f.id().0)
1661            .collect();
1662
1663        // SAFETY: `count` is an out parameter
1664        unsafe { *count = flag_classes.len() };
1665        let flags_ptr = flag_classes.as_mut_ptr();
1666        std::mem::forget(flag_classes);
1667        flags_ptr
1668    }
1669
1670    extern "C" fn cb_semantic_flag_groups<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1671    where
1672        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1673    {
1674        let custom_arch = unsafe { &*(ctxt as *mut A) };
1675        let mut flag_groups: Box<[_]> =
1676            custom_arch.flag_groups().iter().map(|f| f.id().0).collect();
1677
1678        // SAFETY: `count` is an out parameter
1679        unsafe { *count = flag_groups.len() };
1680        let flags_ptr = flag_groups.as_mut_ptr();
1681        std::mem::forget(flag_groups);
1682        flags_ptr
1683    }
1684
1685    extern "C" fn cb_flag_role<A>(ctxt: *mut c_void, flag: u32, class: u32) -> BNFlagRole
1686    where
1687        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1688    {
1689        let custom_arch = unsafe { &*(ctxt as *mut A) };
1690
1691        if let (Some(flag), class) = (
1692            custom_arch.flag_from_id(FlagId(flag)),
1693            custom_arch.flag_class_from_id(FlagClassId(class)),
1694        ) {
1695            flag.role(class)
1696        } else {
1697            FlagRole::SpecialFlagRole
1698        }
1699    }
1700
1701    extern "C" fn cb_flags_required_for_flag_cond<A>(
1702        ctxt: *mut c_void,
1703        cond: BNLowLevelILFlagCondition,
1704        class: u32,
1705        count: *mut usize,
1706    ) -> *mut u32
1707    where
1708        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1709    {
1710        let custom_arch = unsafe { &*(ctxt as *mut A) };
1711        let class = custom_arch.flag_class_from_id(FlagClassId(class));
1712        let mut flags: Box<[_]> = custom_arch
1713            .flags_required_for_flag_condition(cond, class)
1714            .iter()
1715            .map(|f| f.id().0)
1716            .collect();
1717
1718        // SAFETY: `count` is an out parameter
1719        unsafe { *count = flags.len() };
1720        let flags_ptr = flags.as_mut_ptr();
1721        std::mem::forget(flags);
1722        flags_ptr
1723    }
1724
1725    extern "C" fn cb_flags_required_for_semantic_flag_group<A>(
1726        ctxt: *mut c_void,
1727        group: u32,
1728        count: *mut usize,
1729    ) -> *mut u32
1730    where
1731        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1732    {
1733        let custom_arch = unsafe { &*(ctxt as *mut A) };
1734
1735        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
1736            let mut flags: Box<[_]> = group.flags_required().iter().map(|f| f.id().0).collect();
1737
1738            // SAFETY: `count` is an out parameter
1739            unsafe { *count = flags.len() };
1740            let flags_ptr = flags.as_mut_ptr();
1741            std::mem::forget(flags);
1742            flags_ptr
1743        } else {
1744            unsafe {
1745                *count = 0;
1746            }
1747            std::ptr::null_mut()
1748        }
1749    }
1750
1751    extern "C" fn cb_flag_conditions_for_semantic_flag_group<A>(
1752        ctxt: *mut c_void,
1753        group: u32,
1754        count: *mut usize,
1755    ) -> *mut BNFlagConditionForSemanticClass
1756    where
1757        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1758    {
1759        let custom_arch = unsafe { &*(ctxt as *mut A) };
1760
1761        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
1762            let flag_conditions = group.flag_conditions();
1763            let mut flags: Box<[_]> = flag_conditions
1764                .iter()
1765                .map(|(&class, &condition)| BNFlagConditionForSemanticClass {
1766                    semanticClass: class.id().0,
1767                    condition,
1768                })
1769                .collect();
1770
1771            // SAFETY: `count` is an out parameter
1772            unsafe { *count = flags.len() };
1773            let flags_ptr = flags.as_mut_ptr();
1774            std::mem::forget(flags);
1775            flags_ptr
1776        } else {
1777            unsafe {
1778                *count = 0;
1779            }
1780            std::ptr::null_mut()
1781        }
1782    }
1783
1784    extern "C" fn cb_free_flag_conditions_for_semantic_flag_group<A>(
1785        _ctxt: *mut c_void,
1786        conds: *mut BNFlagConditionForSemanticClass,
1787        count: usize,
1788    ) where
1789        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1790    {
1791        if conds.is_null() {
1792            return;
1793        }
1794
1795        unsafe {
1796            let flags_ptr = std::ptr::slice_from_raw_parts_mut(conds, count);
1797            let _flags = Box::from_raw(flags_ptr);
1798        }
1799    }
1800
1801    extern "C" fn cb_flags_written_by_write_type<A>(
1802        ctxt: *mut c_void,
1803        write_type: u32,
1804        count: *mut usize,
1805    ) -> *mut u32
1806    where
1807        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1808    {
1809        let custom_arch = unsafe { &*(ctxt as *mut A) };
1810
1811        if let Some(write_type) = custom_arch.flag_write_from_id(FlagWriteId(write_type)) {
1812            let mut flags_written: Box<[_]> = write_type
1813                .flags_written()
1814                .iter()
1815                .map(|f| f.id().0)
1816                .collect();
1817
1818            // SAFETY: `count` is an out parameter
1819            unsafe { *count = flags_written.len() };
1820            let flags_ptr = flags_written.as_mut_ptr();
1821            std::mem::forget(flags_written);
1822            flags_ptr
1823        } else {
1824            unsafe {
1825                *count = 0;
1826            }
1827            std::ptr::null_mut()
1828        }
1829    }
1830
1831    extern "C" fn cb_semantic_class_for_flag_write_type<A>(
1832        ctxt: *mut c_void,
1833        write_type: u32,
1834    ) -> u32
1835    where
1836        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1837    {
1838        let custom_arch = unsafe { &*(ctxt as *mut A) };
1839        custom_arch
1840            .flag_write_from_id(FlagWriteId(write_type))
1841            .map(|w| w.class())
1842            .and_then(|c| c.map(|c| c.id().0))
1843            .unwrap_or(0)
1844    }
1845
1846    extern "C" fn cb_flag_write_llil<A>(
1847        ctxt: *mut c_void,
1848        op: BNLowLevelILOperation,
1849        size: usize,
1850        flag_write: u32,
1851        flag: u32,
1852        operands_raw: *mut BNRegisterOrConstant,
1853        operand_count: usize,
1854        il: *mut BNLowLevelILFunction,
1855    ) -> usize
1856    where
1857        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1858    {
1859        let custom_arch = unsafe { &*(ctxt as *mut A) };
1860        let flag_write = custom_arch.flag_write_from_id(FlagWriteId(flag_write));
1861        let flag = custom_arch.flag_from_id(FlagId(flag));
1862        let operands = unsafe { std::slice::from_raw_parts(operands_raw, operand_count) };
1863        let lifter = unsafe {
1864            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
1865        };
1866
1867        if let (Some(flag_write), Some(flag)) = (flag_write, flag) {
1868            if let Some(op) = LowLevelILFlagWriteOp::from_op(custom_arch, size, op, operands) {
1869                if let Some(expr) = custom_arch.flag_write_llil(flag, flag_write, op, &lifter) {
1870                    // TODO verify that returned expr is a bool value
1871                    return expr.index.0;
1872                }
1873            } else {
1874                tracing::warn!(
1875                    "unable to unpack flag write op: {:?} with {} operands",
1876                    op,
1877                    operands.len()
1878                );
1879            }
1880
1881            let role = flag.role(flag_write.class());
1882
1883            unsafe {
1884                BNGetDefaultArchitectureFlagWriteLowLevelIL(
1885                    custom_arch.as_ref().handle,
1886                    op,
1887                    size,
1888                    role,
1889                    operands_raw,
1890                    operand_count,
1891                    il,
1892                )
1893            }
1894        } else {
1895            // TODO this should be impossible; requires bad flag/flag_write ids passed in;
1896            // explode more violently
1897            lifter.unimplemented().index.0
1898        }
1899    }
1900
1901    extern "C" fn cb_flag_cond_llil<A>(
1902        ctxt: *mut c_void,
1903        cond: FlagCondition,
1904        class: u32,
1905        il: *mut BNLowLevelILFunction,
1906    ) -> usize
1907    where
1908        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1909    {
1910        let custom_arch = unsafe { &*(ctxt as *mut A) };
1911        let class = custom_arch.flag_class_from_id(FlagClassId(class));
1912
1913        let lifter = unsafe {
1914            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
1915        };
1916        if let Some(expr) = custom_arch.flag_cond_llil(cond, class, &lifter) {
1917            // TODO verify that returned expr is a bool value
1918            return expr.index.0;
1919        }
1920
1921        lifter.unimplemented().index.0
1922    }
1923
1924    extern "C" fn cb_flag_group_llil<A>(
1925        ctxt: *mut c_void,
1926        group: u32,
1927        il: *mut BNLowLevelILFunction,
1928    ) -> usize
1929    where
1930        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1931    {
1932        let custom_arch = unsafe { &*(ctxt as *mut A) };
1933        let lifter = unsafe {
1934            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
1935        };
1936
1937        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
1938            if let Some(expr) = custom_arch.flag_group_llil(group, &lifter) {
1939                // TODO verify that returned expr is a bool value
1940                return expr.index.0;
1941            }
1942        }
1943
1944        lifter.unimplemented().index.0
1945    }
1946
1947    extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32, count: usize) {
1948        if regs.is_null() {
1949            return;
1950        }
1951
1952        unsafe {
1953            let regs_ptr = std::ptr::slice_from_raw_parts_mut(regs, count);
1954            let _regs = Box::from_raw(regs_ptr);
1955        }
1956    }
1957
1958    extern "C" fn cb_register_info<A>(ctxt: *mut c_void, reg: u32, result: *mut BNRegisterInfo)
1959    where
1960        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1961    {
1962        let custom_arch = unsafe { &*(ctxt as *mut A) };
1963        let result = unsafe { &mut *result };
1964
1965        if let Some(reg) = custom_arch.register_from_id(RegisterId(reg)) {
1966            let info = reg.info();
1967
1968            result.fullWidthRegister = match info.parent() {
1969                Some(p) => p.id().0,
1970                None => reg.id().0,
1971            };
1972
1973            result.offset = info.offset();
1974            result.size = info.size();
1975            result.extend = info.implicit_extend().into();
1976        }
1977    }
1978
1979    extern "C" fn cb_stack_pointer<A>(ctxt: *mut c_void) -> u32
1980    where
1981        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1982    {
1983        let custom_arch = unsafe { &*(ctxt as *mut A) };
1984
1985        if let Some(reg) = custom_arch.stack_pointer_reg() {
1986            reg.id().0
1987        } else {
1988            0xffff_ffff
1989        }
1990    }
1991
1992    extern "C" fn cb_link_reg<A>(ctxt: *mut c_void) -> u32
1993    where
1994        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1995    {
1996        let custom_arch = unsafe { &*(ctxt as *mut A) };
1997
1998        if let Some(reg) = custom_arch.link_reg() {
1999            reg.id().0
2000        } else {
2001            0xffff_ffff
2002        }
2003    }
2004
2005    extern "C" fn cb_reg_stack_name<A>(ctxt: *mut c_void, stack: u32) -> *mut c_char
2006    where
2007        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2008    {
2009        let custom_arch = unsafe { &*(ctxt as *mut A) };
2010
2011        match custom_arch.register_stack_from_id(RegisterStackId(stack)) {
2012            Some(stack) => BnString::into_raw(BnString::new(stack.name().as_ref())),
2013            None => BnString::into_raw(BnString::new("invalid_reg_stack")),
2014        }
2015    }
2016
2017    extern "C" fn cb_reg_stacks<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2018    where
2019        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2020    {
2021        let custom_arch = unsafe { &*(ctxt as *mut A) };
2022        let mut regs: Box<[_]> = custom_arch
2023            .register_stacks()
2024            .iter()
2025            .map(|r| r.id().0)
2026            .collect();
2027
2028        // SAFETY: Passed in to be written
2029        unsafe { *count = regs.len() };
2030        let regs_ptr = regs.as_mut_ptr();
2031        std::mem::forget(regs);
2032        regs_ptr
2033    }
2034
2035    extern "C" fn cb_reg_stack_info<A>(
2036        ctxt: *mut c_void,
2037        stack: u32,
2038        result: *mut BNRegisterStackInfo,
2039    ) where
2040        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2041    {
2042        let custom_arch = unsafe { &*(ctxt as *mut A) };
2043        let result = unsafe { &mut *result };
2044
2045        if let Some(stack) = custom_arch.register_stack_from_id(RegisterStackId(stack)) {
2046            let info = stack.info();
2047
2048            let (reg, count) = info.storage_regs();
2049            result.firstStorageReg = reg.id().0;
2050            result.storageCount = count as u32;
2051
2052            if let Some((reg, count)) = info.top_relative_regs() {
2053                result.firstTopRelativeReg = reg.id().0;
2054                result.topRelativeCount = count as u32;
2055            } else {
2056                result.firstTopRelativeReg = 0xffff_ffff;
2057                result.topRelativeCount = 0;
2058            }
2059
2060            result.stackTopReg = info.stack_top_reg().id().0;
2061        }
2062    }
2063
2064    extern "C" fn cb_intrinsic_class<A>(ctxt: *mut c_void, intrinsic: u32) -> BNIntrinsicClass
2065    where
2066        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2067    {
2068        let custom_arch = unsafe { &*(ctxt as *mut A) };
2069        match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
2070            Some(intrinsic) => intrinsic.class(),
2071            // TODO: Make this unreachable?
2072            None => BNIntrinsicClass::GeneralIntrinsicClass,
2073        }
2074    }
2075
2076    extern "C" fn cb_intrinsic_name<A>(ctxt: *mut c_void, intrinsic: u32) -> *mut c_char
2077    where
2078        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2079    {
2080        let custom_arch = unsafe { &*(ctxt as *mut A) };
2081        match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
2082            Some(intrinsic) => BnString::into_raw(BnString::new(intrinsic.name())),
2083            None => BnString::into_raw(BnString::new("invalid_intrinsic")),
2084        }
2085    }
2086
2087    extern "C" fn cb_intrinsics<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2088    where
2089        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2090    {
2091        let custom_arch = unsafe { &*(ctxt as *mut A) };
2092        let mut intrinsics: Box<[_]> = custom_arch.intrinsics().iter().map(|i| i.id().0).collect();
2093
2094        // SAFETY: Passed in to be written
2095        unsafe { *count = intrinsics.len() };
2096        let intrinsics_ptr = intrinsics.as_mut_ptr();
2097        std::mem::forget(intrinsics);
2098        intrinsics_ptr
2099    }
2100
2101    extern "C" fn cb_intrinsic_inputs<A>(
2102        ctxt: *mut c_void,
2103        intrinsic: u32,
2104        count: *mut usize,
2105    ) -> *mut BNNameAndType
2106    where
2107        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2108    {
2109        let custom_arch = unsafe { &*(ctxt as *mut A) };
2110
2111        let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
2112            // SAFETY: Passed in to be written
2113            unsafe {
2114                *count = 0;
2115            }
2116            return std::ptr::null_mut();
2117        };
2118
2119        let inputs = intrinsic.inputs();
2120        // NOTE: The into_raw will leak and be freed later by `cb_free_name_and_types`.
2121        let raw_inputs: Box<[_]> = inputs.into_iter().map(NameAndType::into_raw).collect();
2122
2123        // SAFETY: Passed in to be written
2124        unsafe {
2125            *count = raw_inputs.len();
2126        }
2127
2128        if raw_inputs.is_empty() {
2129            std::ptr::null_mut()
2130        } else {
2131            // Core is responsible for calling back to `cb_free_name_and_types`.
2132            Box::leak(raw_inputs).as_mut_ptr()
2133        }
2134    }
2135
2136    extern "C" fn cb_free_name_and_types<A>(
2137        _ctxt: *mut c_void,
2138        nt: *mut BNNameAndType,
2139        count: usize,
2140    ) where
2141        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2142    {
2143        if nt.is_null() {
2144            return;
2145        }
2146
2147        // Reconstruct the box and drop.
2148        let nt_ptr = std::ptr::slice_from_raw_parts_mut(nt, count);
2149        // SAFETY: nt_ptr is a pointer to a Box.
2150        let boxed_name_and_types = unsafe { Box::from_raw(nt_ptr) };
2151        for nt in boxed_name_and_types {
2152            NameAndType::free_raw(nt);
2153        }
2154    }
2155
2156    extern "C" fn cb_intrinsic_outputs<A>(
2157        ctxt: *mut c_void,
2158        intrinsic: u32,
2159        count: *mut usize,
2160    ) -> *mut BNTypeWithConfidence
2161    where
2162        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2163    {
2164        let custom_arch = unsafe { &*(ctxt as *mut A) };
2165
2166        let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
2167            // SAFETY: Passed in to be written
2168            unsafe {
2169                *count = 0;
2170            }
2171            return std::ptr::null_mut();
2172        };
2173
2174        let outputs = intrinsic.outputs();
2175        let raw_outputs: Box<[BNTypeWithConfidence]> = outputs
2176            .into_iter()
2177            // Leaked to be freed later by `cb_free_type_list`.
2178            .map(Conf::<Ref<Type>>::into_raw)
2179            .collect();
2180
2181        // SAFETY: Passed in to be written
2182        unsafe {
2183            *count = raw_outputs.len();
2184        }
2185
2186        if raw_outputs.is_empty() {
2187            std::ptr::null_mut()
2188        } else {
2189            // Core is responsible for calling back to `cb_free_type_list`.
2190            Box::leak(raw_outputs).as_mut_ptr()
2191        }
2192    }
2193
2194    extern "C" fn cb_free_type_list<A>(
2195        ctxt: *mut c_void,
2196        tl: *mut BNTypeWithConfidence,
2197        count: usize,
2198    ) where
2199        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2200    {
2201        let _custom_arch = unsafe { &*(ctxt as *mut A) };
2202        if !tl.is_null() {
2203            let boxed_types =
2204                unsafe { Box::from_raw(std::ptr::slice_from_raw_parts_mut(tl, count)) };
2205            for ty in boxed_types {
2206                Conf::<Ref<Type>>::free_raw(ty);
2207            }
2208        }
2209    }
2210
2211    extern "C" fn cb_can_assemble<A>(ctxt: *mut c_void) -> bool
2212    where
2213        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2214    {
2215        let custom_arch = unsafe { &*(ctxt as *mut A) };
2216        custom_arch.can_assemble()
2217    }
2218
2219    extern "C" fn cb_assemble<A>(
2220        ctxt: *mut c_void,
2221        code: *const c_char,
2222        addr: u64,
2223        buffer: *mut BNDataBuffer,
2224        errors: *mut *mut c_char,
2225    ) -> bool
2226    where
2227        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2228    {
2229        let custom_arch = unsafe { &*(ctxt as *mut A) };
2230        let code = raw_to_string(code).unwrap_or("".into());
2231        let mut buffer = DataBuffer::from_raw(buffer);
2232
2233        let result = match custom_arch.assemble(&code, addr) {
2234            Ok(result) => {
2235                buffer.set_data(&result);
2236                unsafe {
2237                    *errors = BnString::into_raw(BnString::new(""));
2238                }
2239                true
2240            }
2241            Err(result) => {
2242                unsafe {
2243                    *errors = BnString::into_raw(BnString::new(result));
2244                }
2245                false
2246            }
2247        };
2248
2249        // Caller owns the data buffer, don't free it
2250        std::mem::forget(buffer);
2251
2252        result
2253    }
2254
2255    extern "C" fn cb_is_never_branch_patch_available<A>(
2256        ctxt: *mut c_void,
2257        data: *const u8,
2258        addr: u64,
2259        len: usize,
2260    ) -> bool
2261    where
2262        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2263    {
2264        let custom_arch = unsafe { &*(ctxt as *mut A) };
2265        let data = unsafe { std::slice::from_raw_parts(data, len) };
2266        custom_arch.is_never_branch_patch_available(data, addr)
2267    }
2268
2269    extern "C" fn cb_is_always_branch_patch_available<A>(
2270        ctxt: *mut c_void,
2271        data: *const u8,
2272        addr: u64,
2273        len: usize,
2274    ) -> bool
2275    where
2276        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2277    {
2278        let custom_arch = unsafe { &*(ctxt as *mut A) };
2279        let data = unsafe { std::slice::from_raw_parts(data, len) };
2280        custom_arch.is_always_branch_patch_available(data, addr)
2281    }
2282
2283    extern "C" fn cb_is_invert_branch_patch_available<A>(
2284        ctxt: *mut c_void,
2285        data: *const u8,
2286        addr: u64,
2287        len: usize,
2288    ) -> bool
2289    where
2290        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2291    {
2292        let custom_arch = unsafe { &*(ctxt as *mut A) };
2293        let data = unsafe { std::slice::from_raw_parts(data, len) };
2294        custom_arch.is_invert_branch_patch_available(data, addr)
2295    }
2296
2297    extern "C" fn cb_is_skip_and_return_zero_patch_available<A>(
2298        ctxt: *mut c_void,
2299        data: *const u8,
2300        addr: u64,
2301        len: usize,
2302    ) -> bool
2303    where
2304        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2305    {
2306        let custom_arch = unsafe { &*(ctxt as *mut A) };
2307        let data = unsafe { std::slice::from_raw_parts(data, len) };
2308        custom_arch.is_skip_and_return_zero_patch_available(data, addr)
2309    }
2310
2311    extern "C" fn cb_is_skip_and_return_value_patch_available<A>(
2312        ctxt: *mut c_void,
2313        data: *const u8,
2314        addr: u64,
2315        len: usize,
2316    ) -> bool
2317    where
2318        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2319    {
2320        let custom_arch = unsafe { &*(ctxt as *mut A) };
2321        let data = unsafe { std::slice::from_raw_parts(data, len) };
2322        custom_arch.is_skip_and_return_value_patch_available(data, addr)
2323    }
2324
2325    extern "C" fn cb_convert_to_nop<A>(
2326        ctxt: *mut c_void,
2327        data: *mut u8,
2328        addr: u64,
2329        len: usize,
2330    ) -> bool
2331    where
2332        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2333    {
2334        let custom_arch = unsafe { &*(ctxt as *mut A) };
2335        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2336        custom_arch.convert_to_nop(data, addr)
2337    }
2338
2339    extern "C" fn cb_always_branch<A>(
2340        ctxt: *mut c_void,
2341        data: *mut u8,
2342        addr: u64,
2343        len: usize,
2344    ) -> bool
2345    where
2346        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2347    {
2348        let custom_arch = unsafe { &*(ctxt as *mut A) };
2349        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2350        custom_arch.always_branch(data, addr)
2351    }
2352
2353    extern "C" fn cb_invert_branch<A>(
2354        ctxt: *mut c_void,
2355        data: *mut u8,
2356        addr: u64,
2357        len: usize,
2358    ) -> bool
2359    where
2360        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2361    {
2362        let custom_arch = unsafe { &*(ctxt as *mut A) };
2363        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2364        custom_arch.invert_branch(data, addr)
2365    }
2366
2367    extern "C" fn cb_skip_and_return_value<A>(
2368        ctxt: *mut c_void,
2369        data: *mut u8,
2370        addr: u64,
2371        len: usize,
2372        val: u64,
2373    ) -> bool
2374    where
2375        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2376    {
2377        let custom_arch = unsafe { &*(ctxt as *mut A) };
2378        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2379        custom_arch.skip_and_return_value(data, addr, val)
2380    }
2381
2382    let name = name.to_cstr();
2383
2384    let uninit_arch = ArchitectureBuilder {
2385        arch: MaybeUninit::zeroed(),
2386        func: Some(func),
2387    };
2388
2389    let raw = Box::into_raw(Box::new(uninit_arch));
2390    let mut custom_arch = BNCustomArchitecture {
2391        context: raw as *mut _,
2392        init: Some(cb_init::<A, F>),
2393        getEndianness: Some(cb_endianness::<A>),
2394        getAddressSize: Some(cb_address_size::<A>),
2395        getDefaultIntegerSize: Some(cb_default_integer_size::<A>),
2396        getInstructionAlignment: Some(cb_instruction_alignment::<A>),
2397        // TODO: Make getOpcodeDisplayLength optional.
2398        getMaxInstructionLength: Some(cb_max_instr_len::<A>),
2399        // TODO: Make getOpcodeDisplayLength optional.
2400        getOpcodeDisplayLength: Some(cb_opcode_display_len::<A>),
2401        getAssociatedArchitectureByAddress: Some(cb_associated_arch_by_addr::<A>),
2402        getInstructionInfo: Some(cb_instruction_info::<A>),
2403        getInstructionText: Some(cb_get_instruction_text::<A>),
2404        freeInstructionText: Some(cb_free_instruction_text),
2405        getInstructionLowLevelIL: Some(cb_instruction_llil::<A>),
2406        analyzeBasicBlocks: Some(cb_analyze_basic_blocks::<A>),
2407        liftFunction: Some(cb_lift_function::<A>),
2408
2409        getRegisterName: Some(cb_reg_name::<A>),
2410        getFlagName: Some(cb_flag_name::<A>),
2411        getFlagWriteTypeName: Some(cb_flag_write_name::<A>),
2412        getSemanticFlagClassName: Some(cb_semantic_flag_class_name::<A>),
2413        getSemanticFlagGroupName: Some(cb_semantic_flag_group_name::<A>),
2414
2415        getFullWidthRegisters: Some(cb_registers_full_width::<A>),
2416        getAllRegisters: Some(cb_registers_all::<A>),
2417        getAllFlags: Some(cb_flags::<A>),
2418        getAllFlagWriteTypes: Some(cb_flag_write_types::<A>),
2419        getAllSemanticFlagClasses: Some(cb_semantic_flag_classes::<A>),
2420        getAllSemanticFlagGroups: Some(cb_semantic_flag_groups::<A>),
2421
2422        getFlagRole: Some(cb_flag_role::<A>),
2423        getFlagsRequiredForFlagCondition: Some(cb_flags_required_for_flag_cond::<A>),
2424
2425        getFlagsRequiredForSemanticFlagGroup: Some(cb_flags_required_for_semantic_flag_group::<A>),
2426        getFlagConditionsForSemanticFlagGroup: Some(
2427            cb_flag_conditions_for_semantic_flag_group::<A>,
2428        ),
2429        freeFlagConditionsForSemanticFlagGroup: Some(
2430            cb_free_flag_conditions_for_semantic_flag_group::<A>,
2431        ),
2432
2433        getFlagsWrittenByFlagWriteType: Some(cb_flags_written_by_write_type::<A>),
2434        getSemanticClassForFlagWriteType: Some(cb_semantic_class_for_flag_write_type::<A>),
2435
2436        getFlagWriteLowLevelIL: Some(cb_flag_write_llil::<A>),
2437        getFlagConditionLowLevelIL: Some(cb_flag_cond_llil::<A>),
2438        getSemanticFlagGroupLowLevelIL: Some(cb_flag_group_llil::<A>),
2439
2440        freeRegisterList: Some(cb_free_register_list),
2441        getRegisterInfo: Some(cb_register_info::<A>),
2442        getStackPointerRegister: Some(cb_stack_pointer::<A>),
2443        getLinkRegister: Some(cb_link_reg::<A>),
2444        getGlobalRegisters: Some(cb_registers_global::<A>),
2445        getSystemRegisters: Some(cb_registers_system::<A>),
2446
2447        getRegisterStackName: Some(cb_reg_stack_name::<A>),
2448        getAllRegisterStacks: Some(cb_reg_stacks::<A>),
2449        getRegisterStackInfo: Some(cb_reg_stack_info::<A>),
2450
2451        getIntrinsicClass: Some(cb_intrinsic_class::<A>),
2452        getIntrinsicName: Some(cb_intrinsic_name::<A>),
2453        getAllIntrinsics: Some(cb_intrinsics::<A>),
2454        getIntrinsicInputs: Some(cb_intrinsic_inputs::<A>),
2455        freeNameAndTypeList: Some(cb_free_name_and_types::<A>),
2456        getIntrinsicOutputs: Some(cb_intrinsic_outputs::<A>),
2457        freeTypeList: Some(cb_free_type_list::<A>),
2458
2459        canAssemble: Some(cb_can_assemble::<A>),
2460        assemble: Some(cb_assemble::<A>),
2461
2462        isNeverBranchPatchAvailable: Some(cb_is_never_branch_patch_available::<A>),
2463        isAlwaysBranchPatchAvailable: Some(cb_is_always_branch_patch_available::<A>),
2464        isInvertBranchPatchAvailable: Some(cb_is_invert_branch_patch_available::<A>),
2465        isSkipAndReturnZeroPatchAvailable: Some(cb_is_skip_and_return_zero_patch_available::<A>),
2466        isSkipAndReturnValuePatchAvailable: Some(cb_is_skip_and_return_value_patch_available::<A>),
2467
2468        convertToNop: Some(cb_convert_to_nop::<A>),
2469        alwaysBranch: Some(cb_always_branch::<A>),
2470        invertBranch: Some(cb_invert_branch::<A>),
2471        skipAndReturnValue: Some(cb_skip_and_return_value::<A>),
2472    };
2473
2474    unsafe {
2475        let res = BNRegisterArchitecture(name.as_ptr(), &mut custom_arch as *mut _);
2476
2477        assert!(!res.is_null());
2478
2479        (*raw).arch.assume_init_mut()
2480    }
2481}
2482
2483#[derive(Debug)]
2484pub struct CustomArchitectureHandle<A>
2485where
2486    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2487{
2488    handle: *mut A,
2489}
2490
2491unsafe impl<A> Send for CustomArchitectureHandle<A> where
2492    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
2493{
2494}
2495
2496unsafe impl<A> Sync for CustomArchitectureHandle<A> where
2497    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
2498{
2499}
2500
2501impl<A> Clone for CustomArchitectureHandle<A>
2502where
2503    A: 'static + Architecture<Handle = Self> + Send + Sync,
2504{
2505    fn clone(&self) -> Self {
2506        *self
2507    }
2508}
2509
2510impl<A> Copy for CustomArchitectureHandle<A> where
2511    A: 'static + Architecture<Handle = Self> + Send + Sync
2512{
2513}
2514
2515impl<A> Borrow<A> for CustomArchitectureHandle<A>
2516where
2517    A: 'static + Architecture<Handle = Self> + Send + Sync,
2518{
2519    fn borrow(&self) -> &A {
2520        unsafe { &*self.handle }
2521    }
2522}