binaryninja/
architecture.rs

1// Copyright 2021-2026 Vector 35 Inc.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15//! Architectures provide disassembly, lifting, and associated metadata about a CPU to inform
16//! analysis and decompilation.
17//!
18//! For more information see the [`Architecture`] trait and the [`CoreArchitecture`] structure for
19//! querying already registered architectures.
20
21// RegisterInfo purge
22use binaryninjacore_sys::*;
23use std::fmt::{Debug, Formatter};
24
25use crate::{
26    calling_convention::CoreCallingConvention,
27    data_buffer::DataBuffer,
28    disassembly::InstructionTextToken,
29    function::Function,
30    platform::Platform,
31    rc::*,
32    relocation::CoreRelocationHandler,
33    string::{IntoCStr, *},
34    types::{NameAndType, Type},
35    Endianness,
36};
37use std::ops::Deref;
38use std::{
39    borrow::Borrow,
40    ffi::{c_char, c_void, CString},
41    hash::Hash,
42    mem::MaybeUninit,
43};
44
45use std::ptr::NonNull;
46
47use crate::function_recognizer::FunctionRecognizer;
48use crate::relocation::{CustomRelocationHandlerHandle, RelocationHandler};
49
50use crate::confidence::Conf;
51use crate::low_level_il::expression::ValueExpr;
52use crate::low_level_il::lifting::{
53    get_default_flag_cond_llil, get_default_flag_write_llil, LowLevelILFlagWriteOp,
54};
55use crate::low_level_il::{LowLevelILMutableExpression, LowLevelILMutableFunction};
56
57pub mod basic_block;
58pub mod branches;
59pub mod flag;
60pub mod instruction;
61pub mod intrinsic;
62pub mod register;
63
64// Re-export all the submodules to keep from breaking everyone's code.
65// We split these out just to clarify each part, not necessarily to enforce an extra namespace.
66pub use basic_block::*;
67pub use branches::*;
68pub use flag::*;
69pub use instruction::*;
70pub use intrinsic::*;
71pub use register::*;
72
73/// The [`Architecture`] trait is the backbone of Binary Ninja's analysis capabilities. It tells the
74/// core how to interpret the machine code into LLIL, a generic intermediate representation for
75/// program analysis.
76///
77/// To add support for a new Instruction Set Architecture (ISA), you must implement this trait and
78/// register it. The core analysis loop relies on your implementation for three critical stages:
79///
80/// 1.  **Disassembly ([`Architecture::instruction_text`])**: Machine code into human-readable text (e.g., `55` -> `push rbp`).
81/// 2.  **Control Flow Analysis ([`Architecture::instruction_info`])**: Identifying where execution goes next (e.g., "This is a `call` instruction, it targets address `0x401000`").
82/// 3.  **Lifting ([`Architecture::instruction_llil`])**: Translating machine code into **Low Level Intermediate Language (LLIL)**, which enables decompilation and automated analysis.
83pub trait Architecture: 'static + Sized + AsRef<CoreArchitecture> {
84    type Handle: Borrow<Self> + Clone;
85
86    /// The [`RegisterInfo`] associated with this architecture.
87    type RegisterInfo: RegisterInfo<RegType = Self::Register>;
88
89    /// The [`Register`] associated with this architecture.
90    type Register: Register<InfoType = Self::RegisterInfo>;
91
92    /// The [`RegisterStackInfo`] associated with this architecture.
93    ///
94    /// You may only set this to [`UnusedRegisterStack`] if [`Self::RegisterStack`] is as well.
95    type RegisterStackInfo: RegisterStackInfo<
96        RegType = Self::Register,
97        RegInfoType = Self::RegisterInfo,
98        RegStackType = Self::RegisterStack,
99    >;
100
101    /// The [`RegisterStack`] associated with this architecture.
102    ///
103    /// If you do not override [`Architecture::register_stack_from_id`] and [`Architecture::register_stacks`],
104    /// you may set this to [`UnusedRegisterStack`].
105    type RegisterStack: RegisterStack<
106        InfoType = Self::RegisterStackInfo,
107        RegType = Self::Register,
108        RegInfoType = Self::RegisterInfo,
109    >;
110
111    /// The [`Flag`] associated with this architecture.
112    ///
113    /// If you do not override [`Architecture::flag_from_id`] and [`Architecture::flags`], you may
114    /// set this to [`UnusedFlag`].
115    type Flag: Flag<FlagClass = Self::FlagClass>;
116
117    /// The [`FlagWrite`] associated with this architecture.
118    ///
119    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
120    /// this points to a custom [`FlagWrite`] with the following functions defined:
121    ///
122    /// - [`Architecture::flag_write_types`]
123    /// - [`Architecture::flag_write_from_id`]
124    type FlagWrite: FlagWrite<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
125
126    /// The [`FlagClass`] associated with this architecture.
127    ///
128    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
129    /// this points to a custom [`FlagClass`] with the following functions defined:
130    ///
131    /// - [`Architecture::flag_classes`]
132    /// - [`Architecture::flag_class_from_id`]
133    type FlagClass: FlagClass;
134
135    /// The [`FlagGroup`] associated with this architecture.
136    ///
137    /// Can only be set to [`UnusedFlag`] if [`Self::Flag`] is as well. Otherwise, it is expected that
138    /// this points to a custom [`FlagGroup`] with the following functions defined:
139    ///
140    /// - [`Architecture::flag_groups`]
141    /// - [`Architecture::flag_group_from_id`]
142    type FlagGroup: FlagGroup<FlagType = Self::Flag, FlagClass = Self::FlagClass>;
143
144    type Intrinsic: Intrinsic;
145
146    fn endianness(&self) -> Endianness;
147    fn address_size(&self) -> usize;
148    fn default_integer_size(&self) -> usize;
149    fn instruction_alignment(&self) -> usize;
150
151    /// The maximum length of an instruction in bytes. This is used to determine the size of the buffer
152    /// given to callbacks such as [`Architecture::instruction_info`], [`Architecture::instruction_text`]
153    /// and [`Architecture::instruction_llil`].
154    ///
155    /// NOTE: The maximum **CANNOT** be greater than 256.
156    fn max_instr_len(&self) -> usize;
157
158    /// How many bytes to display in the opcode space before displaying a `...`, typically set to
159    /// the [`Architecture::max_instr_len`], however, can be overridden to display a truncated opcode.
160    fn opcode_display_len(&self) -> usize {
161        self.max_instr_len()
162    }
163
164    /// In binaries with multiple architectures, you may wish to associate a specific architecture
165    /// with a given virtual address. This can be seen in armv7 where odd addresses are associated
166    /// with the thumb architecture.
167    fn associated_arch_by_addr(&self, _addr: u64) -> CoreArchitecture {
168        *self.as_ref()
169    }
170
171    /// Returns the [`InstructionInfo`] at the given virtual address with `data`.
172    ///
173    /// The [`InstructionInfo`] object should always fill the proper length and branches if not, the
174    /// next instruction will likely be incorrect.
175    fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo>;
176
177    /// Disassembles a raw byte sequence into a human-readable list of text tokens.
178    ///
179    /// This function is responsible for the visual representation of assembly instructions.
180    /// It does *not* define semantics (use [`Architecture::instruction_llil`] for that);
181    /// it simply tells the UI how to print the instruction.
182    ///
183    /// # Returns
184    ///
185    /// An `Option` containing a tuple:
186    ///
187    /// * `usize`: The size of the decoded instruction in bytes. Is used to advance to the next instruction.
188    /// * `Vec<InstructionTextToken>`: A list of text tokens representing the instruction.
189    ///
190    /// Returns `None` if the bytes do not form a valid instruction.
191    fn instruction_text(
192        &self,
193        data: &[u8],
194        addr: u64,
195    ) -> Option<(usize, Vec<InstructionTextToken>)>;
196
197    /// Disassembles a raw byte sequence into a human-readable list of text tokens.
198    ///
199    /// This function is responsible for the visual representation of assembly instructions.
200    /// It does *not* define semantics (use [`Architecture::instruction_llil`] for that);
201    /// it simply tells the UI how to print the instruction. This variant includes contextual data, which
202    /// can be produced by analyze_basic_blocks
203    ///
204    /// # Returns
205    ///
206    /// An `Option` containing a tuple:
207    ///
208    /// * `usize`: The size of the decoded instruction in bytes. Is used to advance to the next instruction.
209    /// * `Vec<InstructionTextToken>`: A list of text tokens representing the instruction.
210    ///
211    /// Returns `None` if the bytes do not form a valid instruction.
212    fn instruction_text_with_context(
213        &self,
214        data: &[u8],
215        addr: u64,
216        _context: Option<NonNull<c_void>>,
217    ) -> Option<(usize, Vec<InstructionTextToken>)> {
218        self.instruction_text(data, addr)
219    }
220
221    // TODO: Why do we need to return a boolean here? Does `None` not represent the same thing?
222    /// Appends arbitrary low-level il instructions to `il`.
223    ///
224    /// If `None` is returned, no instructions were appended and the data is invalid. If `Some` is returned,
225    /// the instructions consumed length is returned (necessary for variable length instruction decoding).
226    fn instruction_llil(
227        &self,
228        data: &[u8],
229        addr: u64,
230        il: &LowLevelILMutableFunction,
231    ) -> Option<(usize, bool)>;
232
233    /// Performs basic block recovery and commits the results to the function analysis.
234    ///
235    /// NOTE: Only implement this method if function-level analysis is required. Otherwise, do not
236    /// implement to let default basic block analysis take place.
237    fn analyze_basic_blocks(
238        &self,
239        function: &mut Function,
240        context: &mut BasicBlockAnalysisContext,
241    ) {
242        unsafe {
243            BNArchitectureDefaultAnalyzeBasicBlocks(function.handle, context.handle);
244        }
245    }
246
247    fn lift_function(
248        &self,
249        function: LowLevelILMutableFunction,
250        context: &mut FunctionLifterContext,
251    ) -> bool {
252        unsafe { BNArchitectureDefaultLiftFunction(function.handle, context.handle) }
253    }
254
255    /// Fallback flag value calculation path. This method is invoked when the core is unable to
256    /// recover the flag using semantics and resorts to emitting instructions that explicitly set each
257    /// observed flag to the value of an expression returned by this function.
258    ///
259    /// This function *MUST NOT* append instructions that have side effects.
260    ///
261    /// This function *MUST NOT* observe the values of other flags.
262    ///
263    /// This function *MUST* return `None` or an expression representing a boolean value.
264    fn flag_write_llil<'a>(
265        &self,
266        flag: Self::Flag,
267        flag_write_type: Self::FlagWrite,
268        op: LowLevelILFlagWriteOp<Self::Register>,
269        il: &'a LowLevelILMutableFunction,
270    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
271        let role = flag.role(flag_write_type.class());
272        Some(get_default_flag_write_llil(self, role, op, il))
273    }
274
275    /// Determines what flags need to be examined to attempt automatic recovery of the flag uses semantics.
276    ///
277    /// If automatic recovery is not possible, the [`Architecture::flag_cond_llil`] method will be invoked
278    /// to give this [`Architecture`] implementation arbitrary control over the expression to be evaluated.
279    fn flags_required_for_flag_condition(
280        &self,
281        _condition: FlagCondition,
282        _class: Option<Self::FlagClass>,
283    ) -> Vec<Self::Flag> {
284        Vec::new()
285    }
286
287    /// This function *MUST NOT* append instructions that have side effects.
288    ///
289    /// This function *MUST NOT* observe the values of flags not returned by
290    /// `flags_required_for_flag_condition`.
291    ///
292    /// This function *MUST* return `None` or an expression representing a boolean value.
293    fn flag_cond_llil<'a>(
294        &self,
295        cond: FlagCondition,
296        class: Option<Self::FlagClass>,
297        il: &'a LowLevelILMutableFunction,
298    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
299        Some(get_default_flag_cond_llil(self, cond, class, il))
300    }
301
302    /// Performs fallback resolution when the core was unable to recover the semantics of a
303    /// `LLIL_FLAG_GROUP` expression. This occurs when multiple instructions may have set the flags
304    /// at the flag group query, or when the `FlagGroup::flag_conditions()` map doesn't have an entry
305    /// for the `FlagClass` associated with the `FlagWrite` type of the expression that last set
306    /// the flags required by the `FlagGroup` `group`.
307    ///
308    /// In this fallback path, the `Architecture` must generate the boolean expression in terms of
309    /// the values of that flags returned by `group`'s `flags_required` method.
310    ///
311    /// This function must return an expression representing a boolean (as in, size of `0`) value.
312    /// It is not allowed to add any instructions that can cause side effects.
313    ///
314    /// This function must not observe the values of any flag not returned by `group`'s
315    /// `flags_required` method.
316    fn flag_group_llil<'a>(
317        &self,
318        _group: Self::FlagGroup,
319        _il: &'a LowLevelILMutableFunction,
320    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
321        None
322    }
323
324    fn registers_all(&self) -> Vec<Self::Register>;
325
326    fn register_from_id(&self, id: RegisterId) -> Option<Self::Register>;
327
328    fn registers_full_width(&self) -> Vec<Self::Register>;
329
330    // TODO: Document the difference between global and system registers.
331    fn registers_global(&self) -> Vec<Self::Register> {
332        Vec::new()
333    }
334
335    // TODO: Document the difference between global and system registers.
336    fn registers_system(&self) -> Vec<Self::Register> {
337        Vec::new()
338    }
339
340    fn stack_pointer_reg(&self) -> Option<Self::Register>;
341
342    fn link_reg(&self) -> Option<Self::Register> {
343        None
344    }
345
346    /// List of concrete register stacks for this architecture.
347    ///
348    /// You **must** override the following functions as well:
349    ///
350    /// - [`Architecture::register_stack_from_id`]
351    fn register_stacks(&self) -> Vec<Self::RegisterStack> {
352        Vec::new()
353    }
354
355    /// Get the [`Self::RegisterStack`] associated with the given [`RegisterStackId`].
356    ///
357    /// You **must** override the following functions as well:
358    ///
359    /// - [`Architecture::register_stacks`]
360    fn register_stack_from_id(&self, _id: RegisterStackId) -> Option<Self::RegisterStack> {
361        None
362    }
363
364    /// List of concrete flags for this architecture.
365    ///
366    /// You **must** override the following functions as well:
367    ///
368    /// - [`Architecture::flag_from_id`]
369    /// - [`Architecture::flag_write_types`]
370    /// - [`Architecture::flag_write_from_id`]
371    /// - [`Architecture::flag_classes`]
372    /// - [`Architecture::flag_class_from_id`]
373    /// - [`Architecture::flag_groups`]
374    /// - [`Architecture::flag_group_from_id`]
375    fn flags(&self) -> Vec<Self::Flag> {
376        Vec::new()
377    }
378
379    /// Get the [`Self::Flag`] associated with the given [`FlagId`].
380    ///
381    /// You **must** override the following functions as well:
382    ///
383    /// - [`Architecture::flags`]
384    /// - [`Architecture::flag_write_types`]
385    /// - [`Architecture::flag_write_from_id`]
386    /// - [`Architecture::flag_classes`]
387    /// - [`Architecture::flag_class_from_id`]
388    /// - [`Architecture::flag_groups`]
389    /// - [`Architecture::flag_group_from_id`]
390    fn flag_from_id(&self, _id: FlagId) -> Option<Self::Flag> {
391        None
392    }
393
394    /// List of concrete flag write types for this architecture.
395    ///
396    /// You **must** override the following functions as well:
397    ///
398    /// - [`Architecture::flags`]
399    /// - [`Architecture::flag_from_id`]
400    /// - [`Architecture::flag_write_from_id`]
401    /// - [`Architecture::flag_classes`]
402    /// - [`Architecture::flag_class_from_id`]
403    /// - [`Architecture::flag_groups`]
404    /// - [`Architecture::flag_group_from_id`]
405    fn flag_write_types(&self) -> Vec<Self::FlagWrite> {
406        Vec::new()
407    }
408
409    /// Get the [`Self::FlagWrite`] associated with the given [`FlagWriteId`].
410    ///
411    /// You **must** override the following functions as well:
412    ///
413    /// - [`Architecture::flags`]
414    /// - [`Architecture::flag_from_id`]
415    /// - [`Architecture::flag_write_types`]
416    /// - [`Architecture::flag_classes`]
417    /// - [`Architecture::flag_class_from_id`]
418    /// - [`Architecture::flag_groups`]
419    /// - [`Architecture::flag_group_from_id`]
420    fn flag_write_from_id(&self, _id: FlagWriteId) -> Option<Self::FlagWrite> {
421        None
422    }
423
424    /// List of concrete flag classes for this architecture.
425    ///
426    /// You **must** override the following functions as well:
427    ///
428    /// - [`Architecture::flags`]
429    /// - [`Architecture::flag_from_id`]
430    /// - [`Architecture::flag_write_from_id`]
431    /// - [`Architecture::flag_class_from_id`]
432    /// - [`Architecture::flag_groups`]
433    /// - [`Architecture::flag_group_from_id`]
434    fn flag_classes(&self) -> Vec<Self::FlagClass> {
435        Vec::new()
436    }
437
438    /// Get the [`Self::FlagClass`] associated with the given [`FlagClassId`].
439    ///
440    /// You **must** override the following functions as well:
441    ///
442    /// - [`Architecture::flags`]
443    /// - [`Architecture::flag_from_id`]
444    /// - [`Architecture::flag_write_from_id`]
445    /// - [`Architecture::flag_classes`]
446    /// - [`Architecture::flag_groups`]
447    /// - [`Architecture::flag_group_from_id`]
448    fn flag_class_from_id(&self, _id: FlagClassId) -> Option<Self::FlagClass> {
449        None
450    }
451
452    /// List of concrete flag groups for this architecture.
453    ///
454    /// You **must** override the following functions as well:
455    ///
456    /// - [`Architecture::flags`]
457    /// - [`Architecture::flag_from_id`]
458    /// - [`Architecture::flag_write_from_id`]
459    /// - [`Architecture::flag_classes`]
460    /// - [`Architecture::flag_class_from_id`]
461    /// - [`Architecture::flag_group_from_id`]
462    fn flag_groups(&self) -> Vec<Self::FlagGroup> {
463        Vec::new()
464    }
465
466    /// Get the [`Self::FlagGroup`] associated with the given [`FlagGroupId`].
467    ///
468    /// You **must** override the following functions as well:
469    ///
470    /// - [`Architecture::flags`]
471    /// - [`Architecture::flag_from_id`]
472    /// - [`Architecture::flag_write_from_id`]
473    /// - [`Architecture::flag_classes`]
474    /// - [`Architecture::flag_class_from_id`]
475    /// - [`Architecture::flag_groups`]
476    fn flag_group_from_id(&self, _id: FlagGroupId) -> Option<Self::FlagGroup> {
477        None
478    }
479
480    /// List of concrete intrinsics for this architecture.
481    ///
482    /// You **must** override the following functions as well:
483    ///
484    /// - [`Architecture::intrinsic_from_id`]
485    fn intrinsics(&self) -> Vec<Self::Intrinsic> {
486        Vec::new()
487    }
488
489    fn intrinsic_class(&self, _id: IntrinsicId) -> BNIntrinsicClass {
490        BNIntrinsicClass::GeneralIntrinsicClass
491    }
492
493    /// Get the [`Self::Intrinsic`] associated with the given [`IntrinsicId`].
494    ///
495    /// You **must** override the following functions as well:
496    ///
497    /// - [`Architecture::intrinsics`]
498    fn intrinsic_from_id(&self, _id: IntrinsicId) -> Option<Self::Intrinsic> {
499        None
500    }
501
502    /// Let the UI display this patch option.
503    ///
504    /// If set to true, you must override [`Architecture::assemble`].
505    fn can_assemble(&self) -> bool {
506        false
507    }
508
509    /// Assemble the code at the specified address and return the machine code in bytes.
510    ///
511    /// If overridden, you must set [`Architecture::can_assemble`] to `true`.
512    fn assemble(&self, _code: &str, _addr: u64) -> Result<Vec<u8>, String> {
513        Err("Assemble unsupported".into())
514    }
515
516    /// Let the UI display this patch option.
517    ///
518    /// If set to true, you must override [`Architecture::invert_branch`].
519    fn is_never_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
520        self.is_invert_branch_patch_available(data, addr)
521    }
522
523    /// Let the UI display this patch option.
524    ///
525    /// If set to true, you must override [`Architecture::always_branch`].
526    fn is_always_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
527        false
528    }
529
530    /// Let the UI display this patch option.
531    ///
532    /// If set to true, you must override [`Architecture::invert_branch`].
533    fn is_invert_branch_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
534        false
535    }
536
537    /// Let the UI display this patch option.
538    ///
539    /// If set to true, you must override [`Architecture::skip_and_return_value`].
540    fn is_skip_and_return_zero_patch_available(&self, data: &[u8], addr: u64) -> bool {
541        self.is_skip_and_return_value_patch_available(data, addr)
542    }
543
544    /// Let the UI display this patch option.
545    ///
546    /// If set to true, you must override [`Architecture::skip_and_return_value`].
547    fn is_skip_and_return_value_patch_available(&self, _data: &[u8], _addr: u64) -> bool {
548        false
549    }
550
551    fn convert_to_nop(&self, _data: &mut [u8], _addr: u64) -> bool {
552        false
553    }
554
555    /// Patch the instruction to always branch.
556    ///
557    /// If overridden, you must also override [`Architecture::is_always_branch_patch_available`].
558    fn always_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
559        false
560    }
561
562    /// Patch the instruction to invert the branch condition.
563    ///
564    /// If overridden, you must also override [`Architecture::is_invert_branch_patch_available`].
565    fn invert_branch(&self, _data: &mut [u8], _addr: u64) -> bool {
566        false
567    }
568
569    /// Patch the instruction to skip and return value.
570    ///
571    /// If overridden, you must also override [`Architecture::is_skip_and_return_value_patch_available`].
572    fn skip_and_return_value(&self, _data: &mut [u8], _addr: u64, _value: u64) -> bool {
573        false
574    }
575
576    fn handle(&self) -> Self::Handle;
577}
578
579pub trait ArchitectureWithFunctionContext: Architecture {
580    type FunctionArchContext: Send + Sync + 'static;
581
582    fn instruction_text_with_typed_context(
583        &self,
584        data: &[u8],
585        addr: u64,
586        _context: Option<&Self::FunctionArchContext>,
587    ) -> Option<(usize, Vec<InstructionTextToken>)> {
588        self.instruction_text(data, addr)
589    }
590}
591
592pub struct FunctionLifterContext {
593    pub(crate) handle: *mut BNFunctionLifterContext,
594}
595
596impl FunctionLifterContext {
597    pub unsafe fn from_raw(handle: *mut BNFunctionLifterContext) -> Self {
598        debug_assert!(!handle.is_null());
599
600        FunctionLifterContext { handle }
601    }
602
603    pub fn get_function_arch_context<A: ArchitectureWithFunctionContext>(
604        &self,
605        _arch: &A,
606    ) -> Option<&A::FunctionArchContext> {
607        unsafe {
608            let ptr = (*self.handle).functionArchContext;
609            if ptr.is_null() {
610                None
611            } else {
612                Some(&*(ptr as *const A::FunctionArchContext))
613            }
614        }
615    }
616}
617
618// TODO: WTF?!?!?!?
619pub struct CoreArchitectureList(*mut *mut BNArchitecture, usize);
620
621impl Deref for CoreArchitectureList {
622    type Target = [CoreArchitecture];
623
624    fn deref(&self) -> &Self::Target {
625        unsafe { std::slice::from_raw_parts_mut(self.0 as *mut CoreArchitecture, self.1) }
626    }
627}
628
629impl Drop for CoreArchitectureList {
630    fn drop(&mut self) {
631        unsafe {
632            BNFreeArchitectureList(self.0);
633        }
634    }
635}
636
637#[derive(Copy, Clone, Eq, PartialEq, Hash)]
638pub struct CoreArchitecture {
639    pub(crate) handle: *mut BNArchitecture,
640}
641
642impl CoreArchitecture {
643    // TODO: Leave a note on architecture lifetimes. Specifically that they are never freed.
644    pub unsafe fn from_raw(handle: *mut BNArchitecture) -> Self {
645        debug_assert!(!handle.is_null());
646        CoreArchitecture { handle }
647    }
648
649    pub fn list_all() -> CoreArchitectureList {
650        let mut count: usize = 0;
651        let archs = unsafe { BNGetArchitectureList(&mut count) };
652
653        CoreArchitectureList(archs, count)
654    }
655
656    pub fn by_name(name: &str) -> Option<Self> {
657        let name = name.to_cstr();
658        let handle = unsafe { BNGetArchitectureByName(name.as_ptr()) };
659        match handle.is_null() {
660            false => Some(CoreArchitecture { handle }),
661            true => None,
662        }
663    }
664
665    pub fn name(&self) -> String {
666        unsafe { BnString::into_string(BNGetArchitectureName(self.handle)) }
667    }
668}
669
670unsafe impl Send for CoreArchitecture {}
671unsafe impl Sync for CoreArchitecture {}
672
673impl AsRef<CoreArchitecture> for CoreArchitecture {
674    fn as_ref(&self) -> &Self {
675        self
676    }
677}
678
679impl Architecture for CoreArchitecture {
680    type Handle = Self;
681
682    type RegisterInfo = CoreRegisterInfo;
683    type Register = CoreRegister;
684    type RegisterStackInfo = CoreRegisterStackInfo;
685    type RegisterStack = CoreRegisterStack;
686    type Flag = CoreFlag;
687    type FlagWrite = CoreFlagWrite;
688    type FlagClass = CoreFlagClass;
689    type FlagGroup = CoreFlagGroup;
690    type Intrinsic = CoreIntrinsic;
691
692    fn endianness(&self) -> Endianness {
693        unsafe { BNGetArchitectureEndianness(self.handle) }
694    }
695
696    fn address_size(&self) -> usize {
697        unsafe { BNGetArchitectureAddressSize(self.handle) }
698    }
699
700    fn default_integer_size(&self) -> usize {
701        unsafe { BNGetArchitectureDefaultIntegerSize(self.handle) }
702    }
703
704    fn instruction_alignment(&self) -> usize {
705        unsafe { BNGetArchitectureInstructionAlignment(self.handle) }
706    }
707
708    fn max_instr_len(&self) -> usize {
709        unsafe { BNGetArchitectureMaxInstructionLength(self.handle) }
710    }
711
712    fn opcode_display_len(&self) -> usize {
713        unsafe { BNGetArchitectureOpcodeDisplayLength(self.handle) }
714    }
715
716    fn associated_arch_by_addr(&self, addr: u64) -> CoreArchitecture {
717        let handle = unsafe { BNGetAssociatedArchitectureByAddress(self.handle, addr as *mut _) };
718        CoreArchitecture { handle }
719    }
720
721    fn instruction_info(&self, data: &[u8], addr: u64) -> Option<InstructionInfo> {
722        let mut info = BNInstructionInfo::default();
723        if unsafe { BNGetInstructionInfo(self.handle, data.as_ptr(), addr, data.len(), &mut info) }
724        {
725            Some(info.into())
726        } else {
727            None
728        }
729    }
730
731    fn instruction_text(
732        &self,
733        data: &[u8],
734        addr: u64,
735    ) -> Option<(usize, Vec<InstructionTextToken>)> {
736        let mut consumed = data.len();
737        let mut count: usize = 0;
738        let mut result: *mut BNInstructionTextToken = std::ptr::null_mut();
739
740        unsafe {
741            if BNGetInstructionText(
742                self.handle,
743                data.as_ptr(),
744                addr,
745                &mut consumed,
746                &mut result,
747                &mut count,
748            ) {
749                let instr_text_tokens = std::slice::from_raw_parts(result, count)
750                    .iter()
751                    .map(InstructionTextToken::from_raw)
752                    .collect();
753                BNFreeInstructionText(result, count);
754                Some((consumed, instr_text_tokens))
755            } else {
756                None
757            }
758        }
759    }
760
761    fn instruction_text_with_context(
762        &self,
763        data: &[u8],
764        addr: u64,
765        context: Option<NonNull<c_void>>,
766    ) -> Option<(usize, Vec<InstructionTextToken>)> {
767        let mut consumed = data.len();
768        let mut count: usize = 0;
769        let mut result: *mut BNInstructionTextToken = std::ptr::null_mut();
770        let ctx_ptr: *mut c_void = context.map_or(std::ptr::null_mut(), |p| p.as_ptr());
771        unsafe {
772            if BNGetInstructionTextWithContext(
773                self.handle,
774                data.as_ptr(),
775                addr,
776                &mut consumed,
777                ctx_ptr,
778                &mut result,
779                &mut count,
780            ) {
781                let instr_text_tokens = std::slice::from_raw_parts(result, count)
782                    .iter()
783                    .map(InstructionTextToken::from_raw)
784                    .collect();
785                BNFreeInstructionText(result, count);
786                Some((consumed, instr_text_tokens))
787            } else {
788                None
789            }
790        }
791    }
792
793    fn instruction_llil(
794        &self,
795        data: &[u8],
796        addr: u64,
797        il: &LowLevelILMutableFunction,
798    ) -> Option<(usize, bool)> {
799        let mut size = data.len();
800        let success = unsafe {
801            BNGetInstructionLowLevelIL(
802                self.handle,
803                data.as_ptr(),
804                addr,
805                &mut size as *mut _,
806                il.handle,
807            )
808        };
809
810        if !success {
811            None
812        } else {
813            Some((size, true))
814        }
815    }
816
817    /// Performs basic block recovery and commits the results to the function analysis.
818    ///
819    /// NOTE: Only implement this method if function-level analysis is required. Otherwise, do not
820    /// implement to let default basic block analysis take place.
821    ///
822    /// NOTE: The default implementation exists in C++ here: <https://github.com/Vector35/binaryninja-api/blob/dev/defaultabb.cpp>
823    fn analyze_basic_blocks(
824        &self,
825        function: &mut Function,
826        context: &mut BasicBlockAnalysisContext,
827    ) {
828        unsafe {
829            BNArchitectureAnalyzeBasicBlocks(self.handle, function.handle, context.handle);
830        }
831    }
832
833    fn lift_function(
834        &self,
835        function: LowLevelILMutableFunction,
836        context: &mut FunctionLifterContext,
837    ) -> bool {
838        unsafe { BNArchitectureLiftFunction(self.handle, function.handle, context.handle) }
839    }
840
841    fn flag_write_llil<'a>(
842        &self,
843        _flag: Self::Flag,
844        _flag_write: Self::FlagWrite,
845        _op: LowLevelILFlagWriteOp<Self::Register>,
846        _il: &'a LowLevelILMutableFunction,
847    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
848        None
849    }
850
851    fn flags_required_for_flag_condition(
852        &self,
853        condition: FlagCondition,
854        class: Option<Self::FlagClass>,
855    ) -> Vec<Self::Flag> {
856        let class_id_raw = class.map(|c| c.id().0).unwrap_or(0);
857
858        unsafe {
859            let mut count: usize = 0;
860            let flags = BNGetArchitectureFlagsRequiredForFlagCondition(
861                self.handle,
862                condition,
863                class_id_raw,
864                &mut count,
865            );
866
867            let ret = std::slice::from_raw_parts(flags, count)
868                .iter()
869                .map(|&id| FlagId::from(id))
870                .filter_map(|flag| CoreFlag::new(*self, flag))
871                .collect();
872
873            BNFreeRegisterList(flags);
874
875            ret
876        }
877    }
878
879    fn flag_cond_llil<'a>(
880        &self,
881        _cond: FlagCondition,
882        _class: Option<Self::FlagClass>,
883        _il: &'a LowLevelILMutableFunction,
884    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
885        None
886    }
887
888    fn flag_group_llil<'a>(
889        &self,
890        _group: Self::FlagGroup,
891        _il: &'a LowLevelILMutableFunction,
892    ) -> Option<LowLevelILMutableExpression<'a, ValueExpr>> {
893        None
894    }
895
896    fn registers_all(&self) -> Vec<CoreRegister> {
897        unsafe {
898            let mut count: usize = 0;
899            let registers_raw = BNGetAllArchitectureRegisters(self.handle, &mut count);
900
901            let ret = std::slice::from_raw_parts(registers_raw, count)
902                .iter()
903                .map(|&id| RegisterId::from(id))
904                .filter_map(|reg| CoreRegister::new(*self, reg))
905                .collect();
906
907            BNFreeRegisterList(registers_raw);
908
909            ret
910        }
911    }
912
913    fn register_from_id(&self, id: RegisterId) -> Option<CoreRegister> {
914        CoreRegister::new(*self, id)
915    }
916
917    fn registers_full_width(&self) -> Vec<CoreRegister> {
918        unsafe {
919            let mut count: usize = 0;
920            let registers_raw = BNGetFullWidthArchitectureRegisters(self.handle, &mut count);
921
922            let ret = std::slice::from_raw_parts(registers_raw, count)
923                .iter()
924                .map(|&id| RegisterId::from(id))
925                .filter_map(|reg| CoreRegister::new(*self, reg))
926                .collect();
927
928            BNFreeRegisterList(registers_raw);
929
930            ret
931        }
932    }
933
934    fn registers_global(&self) -> Vec<CoreRegister> {
935        unsafe {
936            let mut count: usize = 0;
937            let registers_raw = BNGetArchitectureGlobalRegisters(self.handle, &mut count);
938
939            let ret = std::slice::from_raw_parts(registers_raw, count)
940                .iter()
941                .map(|&id| RegisterId::from(id))
942                .filter_map(|reg| CoreRegister::new(*self, reg))
943                .collect();
944
945            BNFreeRegisterList(registers_raw);
946
947            ret
948        }
949    }
950
951    fn registers_system(&self) -> Vec<CoreRegister> {
952        unsafe {
953            let mut count: usize = 0;
954            let registers_raw = BNGetArchitectureSystemRegisters(self.handle, &mut count);
955
956            let ret = std::slice::from_raw_parts(registers_raw, count)
957                .iter()
958                .map(|&id| RegisterId::from(id))
959                .filter_map(|reg| CoreRegister::new(*self, reg))
960                .collect();
961
962            BNFreeRegisterList(registers_raw);
963
964            ret
965        }
966    }
967
968    fn stack_pointer_reg(&self) -> Option<CoreRegister> {
969        match unsafe { BNGetArchitectureStackPointerRegister(self.handle) } {
970            0xffff_ffff => None,
971            reg => Some(CoreRegister::new(*self, reg.into())?),
972        }
973    }
974
975    fn link_reg(&self) -> Option<CoreRegister> {
976        match unsafe { BNGetArchitectureLinkRegister(self.handle) } {
977            0xffff_ffff => None,
978            reg => Some(CoreRegister::new(*self, reg.into())?),
979        }
980    }
981
982    fn register_stacks(&self) -> Vec<CoreRegisterStack> {
983        unsafe {
984            let mut count: usize = 0;
985            let reg_stacks_raw = BNGetAllArchitectureRegisterStacks(self.handle, &mut count);
986
987            let ret = std::slice::from_raw_parts(reg_stacks_raw, count)
988                .iter()
989                .map(|&id| RegisterStackId::from(id))
990                .filter_map(|reg_stack| CoreRegisterStack::new(*self, reg_stack))
991                .collect();
992
993            BNFreeRegisterList(reg_stacks_raw);
994
995            ret
996        }
997    }
998
999    fn register_stack_from_id(&self, id: RegisterStackId) -> Option<CoreRegisterStack> {
1000        CoreRegisterStack::new(*self, id)
1001    }
1002
1003    fn flags(&self) -> Vec<CoreFlag> {
1004        unsafe {
1005            let mut count: usize = 0;
1006            let flags_raw = BNGetAllArchitectureFlags(self.handle, &mut count);
1007
1008            let ret = std::slice::from_raw_parts(flags_raw, count)
1009                .iter()
1010                .map(|&id| FlagId::from(id))
1011                .filter_map(|flag| CoreFlag::new(*self, flag))
1012                .collect();
1013
1014            BNFreeRegisterList(flags_raw);
1015
1016            ret
1017        }
1018    }
1019
1020    fn flag_from_id(&self, id: FlagId) -> Option<CoreFlag> {
1021        CoreFlag::new(*self, id)
1022    }
1023
1024    fn flag_write_types(&self) -> Vec<CoreFlagWrite> {
1025        unsafe {
1026            let mut count: usize = 0;
1027            let flag_writes_raw = BNGetAllArchitectureFlagWriteTypes(self.handle, &mut count);
1028
1029            let ret = std::slice::from_raw_parts(flag_writes_raw, count)
1030                .iter()
1031                .map(|&id| FlagWriteId::from(id))
1032                .filter_map(|flag_write| CoreFlagWrite::new(*self, flag_write))
1033                .collect();
1034
1035            BNFreeRegisterList(flag_writes_raw);
1036
1037            ret
1038        }
1039    }
1040
1041    fn flag_write_from_id(&self, id: FlagWriteId) -> Option<CoreFlagWrite> {
1042        CoreFlagWrite::new(*self, id)
1043    }
1044
1045    fn flag_classes(&self) -> Vec<CoreFlagClass> {
1046        unsafe {
1047            let mut count: usize = 0;
1048            let flag_classes_raw = BNGetAllArchitectureSemanticFlagClasses(self.handle, &mut count);
1049
1050            let ret = std::slice::from_raw_parts(flag_classes_raw, count)
1051                .iter()
1052                .map(|&id| FlagClassId::from(id))
1053                .filter_map(|flag_class| CoreFlagClass::new(*self, flag_class))
1054                .collect();
1055
1056            BNFreeRegisterList(flag_classes_raw);
1057
1058            ret
1059        }
1060    }
1061
1062    fn flag_class_from_id(&self, id: FlagClassId) -> Option<CoreFlagClass> {
1063        CoreFlagClass::new(*self, id)
1064    }
1065
1066    fn flag_groups(&self) -> Vec<CoreFlagGroup> {
1067        unsafe {
1068            let mut count: usize = 0;
1069            let flag_groups_raw = BNGetAllArchitectureSemanticFlagGroups(self.handle, &mut count);
1070
1071            let ret = std::slice::from_raw_parts(flag_groups_raw, count)
1072                .iter()
1073                .map(|&id| FlagGroupId::from(id))
1074                .filter_map(|flag_group| CoreFlagGroup::new(*self, flag_group))
1075                .collect();
1076
1077            BNFreeRegisterList(flag_groups_raw);
1078
1079            ret
1080        }
1081    }
1082
1083    fn flag_group_from_id(&self, id: FlagGroupId) -> Option<CoreFlagGroup> {
1084        CoreFlagGroup::new(*self, id)
1085    }
1086
1087    fn intrinsics(&self) -> Vec<CoreIntrinsic> {
1088        unsafe {
1089            let mut count: usize = 0;
1090            let intrinsics_raw = BNGetAllArchitectureIntrinsics(self.handle, &mut count);
1091
1092            let intrinsics = std::slice::from_raw_parts_mut(intrinsics_raw, count)
1093                .iter()
1094                .map(|&id| IntrinsicId::from(id))
1095                .filter_map(|intrinsic| CoreIntrinsic::new(*self, intrinsic))
1096                .collect();
1097
1098            BNFreeRegisterList(intrinsics_raw);
1099
1100            intrinsics
1101        }
1102    }
1103
1104    fn intrinsic_from_id(&self, id: IntrinsicId) -> Option<CoreIntrinsic> {
1105        CoreIntrinsic::new(*self, id)
1106    }
1107
1108    fn can_assemble(&self) -> bool {
1109        unsafe { BNCanArchitectureAssemble(self.handle) }
1110    }
1111
1112    fn assemble(&self, code: &str, addr: u64) -> Result<Vec<u8>, String> {
1113        let code = CString::new(code).map_err(|_| "Invalid encoding in code string".to_string())?;
1114
1115        let result = DataBuffer::new(&[]);
1116        // TODO: This is actually a list of errors.
1117        let mut error_raw: *mut c_char = std::ptr::null_mut();
1118        let res = unsafe {
1119            BNAssemble(
1120                self.handle,
1121                code.as_ptr(),
1122                addr,
1123                result.as_raw(),
1124                &mut error_raw as *mut *mut c_char,
1125            )
1126        };
1127
1128        let error = raw_to_string(error_raw);
1129        unsafe {
1130            BNFreeString(error_raw);
1131        }
1132
1133        if res {
1134            Ok(result.get_data().to_vec())
1135        } else {
1136            Err(error.unwrap_or_else(|| "Assemble failed".into()))
1137        }
1138    }
1139
1140    fn is_never_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1141        unsafe {
1142            BNIsArchitectureNeverBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1143        }
1144    }
1145
1146    fn is_always_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1147        unsafe {
1148            BNIsArchitectureAlwaysBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1149        }
1150    }
1151
1152    fn is_invert_branch_patch_available(&self, data: &[u8], addr: u64) -> bool {
1153        unsafe {
1154            BNIsArchitectureInvertBranchPatchAvailable(self.handle, data.as_ptr(), addr, data.len())
1155        }
1156    }
1157
1158    fn is_skip_and_return_zero_patch_available(&self, data: &[u8], addr: u64) -> bool {
1159        unsafe {
1160            BNIsArchitectureSkipAndReturnZeroPatchAvailable(
1161                self.handle,
1162                data.as_ptr(),
1163                addr,
1164                data.len(),
1165            )
1166        }
1167    }
1168
1169    fn is_skip_and_return_value_patch_available(&self, data: &[u8], addr: u64) -> bool {
1170        unsafe {
1171            BNIsArchitectureSkipAndReturnValuePatchAvailable(
1172                self.handle,
1173                data.as_ptr(),
1174                addr,
1175                data.len(),
1176            )
1177        }
1178    }
1179
1180    fn convert_to_nop(&self, data: &mut [u8], addr: u64) -> bool {
1181        unsafe { BNArchitectureConvertToNop(self.handle, data.as_mut_ptr(), addr, data.len()) }
1182    }
1183
1184    fn always_branch(&self, data: &mut [u8], addr: u64) -> bool {
1185        unsafe { BNArchitectureAlwaysBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1186    }
1187
1188    fn invert_branch(&self, data: &mut [u8], addr: u64) -> bool {
1189        unsafe { BNArchitectureInvertBranch(self.handle, data.as_mut_ptr(), addr, data.len()) }
1190    }
1191
1192    fn skip_and_return_value(&self, data: &mut [u8], addr: u64, value: u64) -> bool {
1193        unsafe {
1194            BNArchitectureSkipAndReturnValue(
1195                self.handle,
1196                data.as_mut_ptr(),
1197                addr,
1198                data.len(),
1199                value,
1200            )
1201        }
1202    }
1203
1204    fn handle(&self) -> CoreArchitecture {
1205        *self
1206    }
1207}
1208
1209impl Debug for CoreArchitecture {
1210    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
1211        f.debug_struct("CoreArchitecture")
1212            .field("name", &self.name())
1213            .field("endianness", &self.endianness())
1214            .field("address_size", &self.address_size())
1215            .field("instruction_alignment", &self.instruction_alignment())
1216            .finish()
1217    }
1218}
1219
1220macro_rules! cc_func {
1221    ($get_name:ident, $get_api:ident, $set_name:ident, $set_api:ident) => {
1222        fn $get_name(&self) -> Option<Ref<CoreCallingConvention>> {
1223            let arch = self.as_ref();
1224
1225            unsafe {
1226                let cc = $get_api(arch.handle);
1227
1228                if cc.is_null() {
1229                    None
1230                } else {
1231                    Some(CoreCallingConvention::ref_from_raw(
1232                        cc,
1233                        self.as_ref().handle(),
1234                    ))
1235                }
1236            }
1237        }
1238
1239        fn $set_name(&self, cc: &CoreCallingConvention) {
1240            let arch = self.as_ref();
1241
1242            assert!(
1243                cc.arch_handle.borrow().as_ref().handle == arch.handle,
1244                "use of calling convention with non-matching architecture!"
1245            );
1246
1247            unsafe {
1248                $set_api(arch.handle, cc.handle);
1249            }
1250        }
1251    };
1252}
1253
1254/// Contains helper methods for all types implementing 'Architecture'
1255pub trait ArchitectureExt: Architecture {
1256    fn register_by_name(&self, name: &str) -> Option<Self::Register> {
1257        let name = name.to_cstr();
1258
1259        match unsafe { BNGetArchitectureRegisterByName(self.as_ref().handle, name.as_ptr()) } {
1260            0xffff_ffff => None,
1261            reg => self.register_from_id(reg.into()),
1262        }
1263    }
1264
1265    fn calling_convention_by_name(&self, name: &str) -> Option<Ref<CoreCallingConvention>> {
1266        let name = name.to_cstr();
1267        unsafe {
1268            let result = NonNull::new(BNGetArchitectureCallingConventionByName(
1269                self.as_ref().handle,
1270                name.as_ptr(),
1271            ))?;
1272            Some(CoreCallingConvention::ref_from_raw(
1273                result.as_ptr(),
1274                self.as_ref().handle(),
1275            ))
1276        }
1277    }
1278
1279    fn calling_conventions(&self) -> Array<CoreCallingConvention> {
1280        unsafe {
1281            let mut count = 0;
1282            let calling_convs =
1283                BNGetArchitectureCallingConventions(self.as_ref().handle, &mut count);
1284            Array::new(calling_convs, count, self.as_ref().handle())
1285        }
1286    }
1287
1288    cc_func!(
1289        get_default_calling_convention,
1290        BNGetArchitectureDefaultCallingConvention,
1291        set_default_calling_convention,
1292        BNSetArchitectureDefaultCallingConvention
1293    );
1294
1295    cc_func!(
1296        get_cdecl_calling_convention,
1297        BNGetArchitectureCdeclCallingConvention,
1298        set_cdecl_calling_convention,
1299        BNSetArchitectureCdeclCallingConvention
1300    );
1301
1302    cc_func!(
1303        get_stdcall_calling_convention,
1304        BNGetArchitectureStdcallCallingConvention,
1305        set_stdcall_calling_convention,
1306        BNSetArchitectureStdcallCallingConvention
1307    );
1308
1309    cc_func!(
1310        get_fastcall_calling_convention,
1311        BNGetArchitectureFastcallCallingConvention,
1312        set_fastcall_calling_convention,
1313        BNSetArchitectureFastcallCallingConvention
1314    );
1315
1316    fn standalone_platform(&self) -> Option<Ref<Platform>> {
1317        unsafe {
1318            let handle = BNGetArchitectureStandalonePlatform(self.as_ref().handle);
1319
1320            if handle.is_null() {
1321                return None;
1322            }
1323
1324            Some(Platform::ref_from_raw(handle))
1325        }
1326    }
1327
1328    fn relocation_handler(&self, view_name: &str) -> Option<Ref<CoreRelocationHandler>> {
1329        let view_name = match CString::new(view_name) {
1330            Ok(view_name) => view_name,
1331            Err(_) => return None,
1332        };
1333
1334        unsafe {
1335            let handle =
1336                BNArchitectureGetRelocationHandler(self.as_ref().handle, view_name.as_ptr());
1337
1338            if handle.is_null() {
1339                return None;
1340            }
1341
1342            Some(CoreRelocationHandler::ref_from_raw(handle))
1343        }
1344    }
1345
1346    fn register_relocation_handler<R, F>(&self, name: &str, func: F)
1347    where
1348        R: 'static
1349            + RelocationHandler<Handle = CustomRelocationHandlerHandle<R>>
1350            + Send
1351            + Sync
1352            + Sized,
1353        F: FnOnce(CustomRelocationHandlerHandle<R>, CoreRelocationHandler) -> R,
1354    {
1355        crate::relocation::register_relocation_handler(self.as_ref(), name, func);
1356    }
1357
1358    fn register_function_recognizer<R>(&self, recognizer: R)
1359    where
1360        R: 'static + FunctionRecognizer + Send + Sync + Sized,
1361    {
1362        crate::function_recognizer::register_arch_function_recognizer(self.as_ref(), recognizer);
1363    }
1364}
1365
1366impl<T: Architecture> ArchitectureExt for T {}
1367
1368/// Registers a new architecture with the given name.
1369///
1370/// NOTE: This function should only be called within `CorePluginInit`.
1371pub fn register_architecture<A, F>(name: &str, func: F) -> &'static A
1372where
1373    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync + Sized,
1374    F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1375{
1376    register_architecture_impl(name, func, |_| {})
1377}
1378
1379fn register_architecture_impl<A, F, C>(name: &str, func: F, customize: C) -> &'static A
1380where
1381    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync + Sized,
1382    F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1383    C: FnOnce(&mut BNCustomArchitecture),
1384{
1385    #[repr(C)]
1386    struct ArchitectureBuilder<A, F>
1387    where
1388        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1389        F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1390    {
1391        arch: MaybeUninit<A>,
1392        func: Option<F>,
1393    }
1394
1395    extern "C" fn cb_init<A, F>(ctxt: *mut c_void, obj: *mut BNArchitecture)
1396    where
1397        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1398        F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
1399    {
1400        unsafe {
1401            let custom_arch = &mut *(ctxt as *mut ArchitectureBuilder<A, F>);
1402            let custom_arch_handle = CustomArchitectureHandle {
1403                handle: ctxt as *mut A,
1404            };
1405
1406            let create = custom_arch.func.take().unwrap();
1407            custom_arch
1408                .arch
1409                .write(create(custom_arch_handle, CoreArchitecture::from_raw(obj)));
1410        }
1411    }
1412
1413    extern "C" fn cb_endianness<A>(ctxt: *mut c_void) -> BNEndianness
1414    where
1415        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1416    {
1417        let custom_arch = unsafe { &*(ctxt as *mut A) };
1418        custom_arch.endianness()
1419    }
1420
1421    extern "C" fn cb_address_size<A>(ctxt: *mut c_void) -> usize
1422    where
1423        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1424    {
1425        let custom_arch = unsafe { &*(ctxt as *mut A) };
1426        custom_arch.address_size()
1427    }
1428
1429    extern "C" fn cb_default_integer_size<A>(ctxt: *mut c_void) -> usize
1430    where
1431        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1432    {
1433        let custom_arch = unsafe { &*(ctxt as *mut A) };
1434        custom_arch.default_integer_size()
1435    }
1436
1437    extern "C" fn cb_instruction_alignment<A>(ctxt: *mut c_void) -> usize
1438    where
1439        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1440    {
1441        let custom_arch = unsafe { &*(ctxt as *mut A) };
1442        custom_arch.instruction_alignment()
1443    }
1444
1445    extern "C" fn cb_max_instr_len<A>(ctxt: *mut c_void) -> usize
1446    where
1447        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1448    {
1449        let custom_arch = unsafe { &*(ctxt as *mut A) };
1450        custom_arch.max_instr_len()
1451    }
1452
1453    extern "C" fn cb_opcode_display_len<A>(ctxt: *mut c_void) -> usize
1454    where
1455        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1456    {
1457        let custom_arch = unsafe { &*(ctxt as *mut A) };
1458        custom_arch.opcode_display_len()
1459    }
1460
1461    extern "C" fn cb_associated_arch_by_addr<A>(
1462        ctxt: *mut c_void,
1463        addr: *mut u64,
1464    ) -> *mut BNArchitecture
1465    where
1466        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1467    {
1468        let custom_arch = unsafe { &*(ctxt as *mut A) };
1469        let addr = unsafe { *(addr) };
1470
1471        custom_arch.associated_arch_by_addr(addr).handle
1472    }
1473
1474    extern "C" fn cb_instruction_info<A>(
1475        ctxt: *mut c_void,
1476        data: *const u8,
1477        addr: u64,
1478        len: usize,
1479        result: *mut BNInstructionInfo,
1480    ) -> bool
1481    where
1482        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1483    {
1484        let custom_arch = unsafe { &*(ctxt as *mut A) };
1485        let data = unsafe { std::slice::from_raw_parts(data, len) };
1486
1487        match custom_arch.instruction_info(data, addr) {
1488            Some(info) => {
1489                // SAFETY: Passed in to be written to
1490                unsafe { *result = info.into() };
1491                true
1492            }
1493            None => false,
1494        }
1495    }
1496
1497    extern "C" fn cb_get_instruction_text<A>(
1498        ctxt: *mut c_void,
1499        data: *const u8,
1500        addr: u64,
1501        len: *mut usize,
1502        result: *mut *mut BNInstructionTextToken,
1503        count: *mut usize,
1504    ) -> bool
1505    where
1506        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1507    {
1508        let custom_arch = unsafe { &*(ctxt as *mut A) };
1509        let data = unsafe { std::slice::from_raw_parts(data, *len) };
1510        let result = unsafe { &mut *result };
1511
1512        let Some((res_size, res_tokens)) = custom_arch.instruction_text(data, addr) else {
1513            return false;
1514        };
1515
1516        let res_tokens: Box<[BNInstructionTextToken]> = res_tokens
1517            .into_iter()
1518            .map(InstructionTextToken::into_raw)
1519            .collect();
1520        unsafe {
1521            // NOTE: Freed with `cb_free_instruction_text`
1522            let res_tokens = Box::leak(res_tokens);
1523            *result = res_tokens.as_mut_ptr();
1524            *count = res_tokens.len();
1525            *len = res_size;
1526        }
1527        true
1528    }
1529
1530    pub unsafe extern "C" fn cb_get_instruction_text_with_context<A>(
1531        ctxt: *mut c_void,
1532        data: *const u8,
1533        addr: u64,
1534        len: *mut usize,
1535        context: *mut c_void,
1536        result: *mut *mut BNInstructionTextToken,
1537        count: *mut usize,
1538    ) -> bool
1539    where
1540        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1541    {
1542        let custom_arch = unsafe { &*(ctxt as *mut A) };
1543        let data = unsafe { std::slice::from_raw_parts(data, *len) };
1544        let result = unsafe { &mut *result };
1545        let context = NonNull::new(context);
1546
1547        let Some((res_size, res_tokens)) =
1548            custom_arch.instruction_text_with_context(data, addr, context)
1549        else {
1550            return false;
1551        };
1552
1553        let res_tokens: Box<[BNInstructionTextToken]> = res_tokens
1554            .into_iter()
1555            .map(InstructionTextToken::into_raw)
1556            .collect();
1557        unsafe {
1558            // NOTE: Freed with `cb_free_instruction_text`
1559            let res_tokens = Box::leak(res_tokens);
1560            *result = res_tokens.as_mut_ptr();
1561            *count = res_tokens.len();
1562            *len = res_size;
1563        }
1564        true
1565    }
1566
1567    extern "C" fn cb_free_instruction_text(tokens: *mut BNInstructionTextToken, count: usize) {
1568        unsafe {
1569            let raw_tokens = std::slice::from_raw_parts_mut(tokens, count);
1570            let boxed_tokens = Box::from_raw(raw_tokens);
1571            for token in boxed_tokens {
1572                InstructionTextToken::free_raw(token);
1573            }
1574        }
1575    }
1576
1577    extern "C" fn cb_instruction_llil<A>(
1578        ctxt: *mut c_void,
1579        data: *const u8,
1580        addr: u64,
1581        len: *mut usize,
1582        il: *mut BNLowLevelILFunction,
1583    ) -> bool
1584    where
1585        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1586    {
1587        let custom_arch = unsafe { &*(ctxt as *mut A) };
1588        let data = unsafe { std::slice::from_raw_parts(data, *len) };
1589        let lifter = unsafe {
1590            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
1591        };
1592
1593        match custom_arch.instruction_llil(data, addr, &lifter) {
1594            Some((res_len, res_value)) => {
1595                unsafe { *len = res_len };
1596                res_value
1597            }
1598            None => false,
1599        }
1600    }
1601
1602    extern "C" fn cb_analyze_basic_blocks<A>(
1603        ctxt: *mut c_void,
1604        function: *mut BNFunction,
1605        context: *mut BNBasicBlockAnalysisContext,
1606    ) where
1607        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1608    {
1609        let custom_arch = unsafe { &*(ctxt as *mut A) };
1610        let mut function = unsafe { Function::from_raw(function) };
1611        let mut context: BasicBlockAnalysisContext =
1612            unsafe { BasicBlockAnalysisContext::from_raw(context) };
1613        custom_arch.analyze_basic_blocks(&mut function, &mut context);
1614    }
1615
1616    extern "C" fn cb_lift_function<A>(
1617        ctxt: *mut c_void,
1618        function: *mut BNLowLevelILFunction,
1619        context: *mut BNFunctionLifterContext,
1620    ) -> bool
1621    where
1622        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1623    {
1624        let custom_arch = unsafe { &*(ctxt as *mut A) };
1625        let function = unsafe {
1626            LowLevelILMutableFunction::from_raw_with_arch(function, Some(*custom_arch.as_ref()))
1627        };
1628        let mut context: FunctionLifterContext =
1629            unsafe { FunctionLifterContext::from_raw(context) };
1630        custom_arch.lift_function(function, &mut context)
1631    }
1632
1633    extern "C" fn cb_reg_name<A>(ctxt: *mut c_void, reg: u32) -> *mut c_char
1634    where
1635        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1636    {
1637        let custom_arch = unsafe { &*(ctxt as *mut A) };
1638
1639        match custom_arch.register_from_id(reg.into()) {
1640            Some(reg) => BnString::into_raw(BnString::new(reg.name().as_ref())),
1641            None => BnString::into_raw(BnString::new("invalid_reg")),
1642        }
1643    }
1644
1645    extern "C" fn cb_flag_name<A>(ctxt: *mut c_void, flag: u32) -> *mut c_char
1646    where
1647        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1648    {
1649        let custom_arch = unsafe { &*(ctxt as *mut A) };
1650
1651        match custom_arch.flag_from_id(flag.into()) {
1652            Some(flag) => BnString::into_raw(BnString::new(flag.name().as_ref())),
1653            None => BnString::into_raw(BnString::new("invalid_flag")),
1654        }
1655    }
1656
1657    extern "C" fn cb_flag_write_name<A>(ctxt: *mut c_void, flag_write: u32) -> *mut c_char
1658    where
1659        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1660    {
1661        let custom_arch = unsafe { &*(ctxt as *mut A) };
1662
1663        match custom_arch.flag_write_from_id(flag_write.into()) {
1664            Some(flag_write) => BnString::into_raw(BnString::new(flag_write.name().as_ref())),
1665            None => BnString::into_raw(BnString::new("invalid_flag_write")),
1666        }
1667    }
1668
1669    extern "C" fn cb_semantic_flag_class_name<A>(ctxt: *mut c_void, class: u32) -> *mut c_char
1670    where
1671        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1672    {
1673        let custom_arch = unsafe { &*(ctxt as *mut A) };
1674
1675        match custom_arch.flag_class_from_id(class.into()) {
1676            Some(class) => BnString::into_raw(BnString::new(class.name().as_ref())),
1677            None => BnString::into_raw(BnString::new("invalid_flag_class")),
1678        }
1679    }
1680
1681    extern "C" fn cb_semantic_flag_group_name<A>(ctxt: *mut c_void, group: u32) -> *mut c_char
1682    where
1683        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1684    {
1685        let custom_arch = unsafe { &*(ctxt as *mut A) };
1686
1687        match custom_arch.flag_group_from_id(group.into()) {
1688            Some(group) => BnString::into_raw(BnString::new(group.name().as_ref())),
1689            None => BnString::into_raw(BnString::new("invalid_flag_group")),
1690        }
1691    }
1692
1693    extern "C" fn cb_registers_full_width<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1694    where
1695        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1696    {
1697        let custom_arch = unsafe { &*(ctxt as *mut A) };
1698        let mut regs: Box<[_]> = custom_arch
1699            .registers_full_width()
1700            .iter()
1701            .map(|r| r.id().0)
1702            .collect();
1703
1704        // SAFETY: `count` is an out parameter
1705        unsafe { *count = regs.len() };
1706        let regs_ptr = regs.as_mut_ptr();
1707        std::mem::forget(regs);
1708        regs_ptr
1709    }
1710
1711    extern "C" fn cb_registers_all<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1712    where
1713        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1714    {
1715        let custom_arch = unsafe { &*(ctxt as *mut A) };
1716        let mut regs: Box<[_]> = custom_arch
1717            .registers_all()
1718            .iter()
1719            .map(|r| r.id().0)
1720            .collect();
1721
1722        // SAFETY: `count` is an out parameter
1723        unsafe { *count = regs.len() };
1724        let regs_ptr = regs.as_mut_ptr();
1725        std::mem::forget(regs);
1726        regs_ptr
1727    }
1728
1729    extern "C" fn cb_registers_global<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1730    where
1731        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1732    {
1733        let custom_arch = unsafe { &*(ctxt as *mut A) };
1734        let mut regs: Box<[_]> = custom_arch
1735            .registers_global()
1736            .iter()
1737            .map(|r| r.id().0)
1738            .collect();
1739
1740        // SAFETY: `count` is an out parameter
1741        unsafe { *count = regs.len() };
1742        let regs_ptr = regs.as_mut_ptr();
1743        std::mem::forget(regs);
1744        regs_ptr
1745    }
1746
1747    extern "C" fn cb_registers_system<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1748    where
1749        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1750    {
1751        let custom_arch = unsafe { &*(ctxt as *mut A) };
1752        let mut regs: Box<[_]> = custom_arch
1753            .registers_system()
1754            .iter()
1755            .map(|r| r.id().0)
1756            .collect();
1757
1758        // SAFETY: `count` is an out parameter
1759        unsafe { *count = regs.len() };
1760        let regs_ptr = regs.as_mut_ptr();
1761        std::mem::forget(regs);
1762        regs_ptr
1763    }
1764
1765    extern "C" fn cb_flags<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1766    where
1767        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1768    {
1769        let custom_arch = unsafe { &*(ctxt as *mut A) };
1770        let mut flags: Box<[_]> = custom_arch.flags().iter().map(|f| f.id().0).collect();
1771
1772        // SAFETY: `count` is an out parameter
1773        unsafe { *count = flags.len() };
1774        let flags_ptr = flags.as_mut_ptr();
1775        std::mem::forget(flags);
1776        flags_ptr
1777    }
1778
1779    extern "C" fn cb_flag_write_types<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1780    where
1781        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1782    {
1783        let custom_arch = unsafe { &*(ctxt as *mut A) };
1784        let mut flag_writes: Box<[_]> = custom_arch
1785            .flag_write_types()
1786            .iter()
1787            .map(|f| f.id().0)
1788            .collect();
1789
1790        // SAFETY: `count` is an out parameter
1791        unsafe { *count = flag_writes.len() };
1792        let flags_ptr = flag_writes.as_mut_ptr();
1793        std::mem::forget(flag_writes);
1794        flags_ptr
1795    }
1796
1797    extern "C" fn cb_semantic_flag_classes<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1798    where
1799        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1800    {
1801        let custom_arch = unsafe { &*(ctxt as *mut A) };
1802        let mut flag_classes: Box<[_]> = custom_arch
1803            .flag_classes()
1804            .iter()
1805            .map(|f| f.id().0)
1806            .collect();
1807
1808        // SAFETY: `count` is an out parameter
1809        unsafe { *count = flag_classes.len() };
1810        let flags_ptr = flag_classes.as_mut_ptr();
1811        std::mem::forget(flag_classes);
1812        flags_ptr
1813    }
1814
1815    extern "C" fn cb_semantic_flag_groups<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
1816    where
1817        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1818    {
1819        let custom_arch = unsafe { &*(ctxt as *mut A) };
1820        let mut flag_groups: Box<[_]> =
1821            custom_arch.flag_groups().iter().map(|f| f.id().0).collect();
1822
1823        // SAFETY: `count` is an out parameter
1824        unsafe { *count = flag_groups.len() };
1825        let flags_ptr = flag_groups.as_mut_ptr();
1826        std::mem::forget(flag_groups);
1827        flags_ptr
1828    }
1829
1830    extern "C" fn cb_flag_role<A>(ctxt: *mut c_void, flag: u32, class: u32) -> BNFlagRole
1831    where
1832        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1833    {
1834        let custom_arch = unsafe { &*(ctxt as *mut A) };
1835
1836        if let (Some(flag), class) = (
1837            custom_arch.flag_from_id(FlagId(flag)),
1838            custom_arch.flag_class_from_id(FlagClassId(class)),
1839        ) {
1840            flag.role(class)
1841        } else {
1842            FlagRole::SpecialFlagRole
1843        }
1844    }
1845
1846    extern "C" fn cb_flags_required_for_flag_cond<A>(
1847        ctxt: *mut c_void,
1848        cond: BNLowLevelILFlagCondition,
1849        class: u32,
1850        count: *mut usize,
1851    ) -> *mut u32
1852    where
1853        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1854    {
1855        let custom_arch = unsafe { &*(ctxt as *mut A) };
1856        let class = custom_arch.flag_class_from_id(FlagClassId(class));
1857        let mut flags: Box<[_]> = custom_arch
1858            .flags_required_for_flag_condition(cond, class)
1859            .iter()
1860            .map(|f| f.id().0)
1861            .collect();
1862
1863        // SAFETY: `count` is an out parameter
1864        unsafe { *count = flags.len() };
1865        let flags_ptr = flags.as_mut_ptr();
1866        std::mem::forget(flags);
1867        flags_ptr
1868    }
1869
1870    extern "C" fn cb_flags_required_for_semantic_flag_group<A>(
1871        ctxt: *mut c_void,
1872        group: u32,
1873        count: *mut usize,
1874    ) -> *mut u32
1875    where
1876        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1877    {
1878        let custom_arch = unsafe { &*(ctxt as *mut A) };
1879
1880        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
1881            let mut flags: Box<[_]> = group.flags_required().iter().map(|f| f.id().0).collect();
1882
1883            // SAFETY: `count` is an out parameter
1884            unsafe { *count = flags.len() };
1885            let flags_ptr = flags.as_mut_ptr();
1886            std::mem::forget(flags);
1887            flags_ptr
1888        } else {
1889            unsafe {
1890                *count = 0;
1891            }
1892            std::ptr::null_mut()
1893        }
1894    }
1895
1896    extern "C" fn cb_flag_conditions_for_semantic_flag_group<A>(
1897        ctxt: *mut c_void,
1898        group: u32,
1899        count: *mut usize,
1900    ) -> *mut BNFlagConditionForSemanticClass
1901    where
1902        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1903    {
1904        let custom_arch = unsafe { &*(ctxt as *mut A) };
1905
1906        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
1907            let flag_conditions = group.flag_conditions();
1908            let mut flags: Box<[_]> = flag_conditions
1909                .iter()
1910                .map(|(&class, &condition)| BNFlagConditionForSemanticClass {
1911                    semanticClass: class.id().0,
1912                    condition,
1913                })
1914                .collect();
1915
1916            // SAFETY: `count` is an out parameter
1917            unsafe { *count = flags.len() };
1918            let flags_ptr = flags.as_mut_ptr();
1919            std::mem::forget(flags);
1920            flags_ptr
1921        } else {
1922            unsafe {
1923                *count = 0;
1924            }
1925            std::ptr::null_mut()
1926        }
1927    }
1928
1929    extern "C" fn cb_free_flag_conditions_for_semantic_flag_group<A>(
1930        _ctxt: *mut c_void,
1931        conds: *mut BNFlagConditionForSemanticClass,
1932        count: usize,
1933    ) where
1934        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1935    {
1936        if conds.is_null() {
1937            return;
1938        }
1939
1940        unsafe {
1941            let flags_ptr = std::ptr::slice_from_raw_parts_mut(conds, count);
1942            let _flags = Box::from_raw(flags_ptr);
1943        }
1944    }
1945
1946    extern "C" fn cb_flags_written_by_write_type<A>(
1947        ctxt: *mut c_void,
1948        write_type: u32,
1949        count: *mut usize,
1950    ) -> *mut u32
1951    where
1952        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1953    {
1954        let custom_arch = unsafe { &*(ctxt as *mut A) };
1955
1956        if let Some(write_type) = custom_arch.flag_write_from_id(FlagWriteId(write_type)) {
1957            let mut flags_written: Box<[_]> = write_type
1958                .flags_written()
1959                .iter()
1960                .map(|f| f.id().0)
1961                .collect();
1962
1963            // SAFETY: `count` is an out parameter
1964            unsafe { *count = flags_written.len() };
1965            let flags_ptr = flags_written.as_mut_ptr();
1966            std::mem::forget(flags_written);
1967            flags_ptr
1968        } else {
1969            unsafe {
1970                *count = 0;
1971            }
1972            std::ptr::null_mut()
1973        }
1974    }
1975
1976    extern "C" fn cb_semantic_class_for_flag_write_type<A>(
1977        ctxt: *mut c_void,
1978        write_type: u32,
1979    ) -> u32
1980    where
1981        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
1982    {
1983        let custom_arch = unsafe { &*(ctxt as *mut A) };
1984        custom_arch
1985            .flag_write_from_id(FlagWriteId(write_type))
1986            .map(|w| w.class())
1987            .and_then(|c| c.map(|c| c.id().0))
1988            .unwrap_or(0)
1989    }
1990
1991    extern "C" fn cb_flag_write_llil<A>(
1992        ctxt: *mut c_void,
1993        op: BNLowLevelILOperation,
1994        size: usize,
1995        flag_write: u32,
1996        flag: u32,
1997        operands_raw: *mut BNRegisterOrConstant,
1998        operand_count: usize,
1999        il: *mut BNLowLevelILFunction,
2000    ) -> usize
2001    where
2002        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2003    {
2004        let custom_arch = unsafe { &*(ctxt as *mut A) };
2005        let flag_write = custom_arch.flag_write_from_id(FlagWriteId(flag_write));
2006        let flag = custom_arch.flag_from_id(FlagId(flag));
2007        let operands = unsafe { std::slice::from_raw_parts(operands_raw, operand_count) };
2008        let lifter = unsafe {
2009            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2010        };
2011
2012        if let (Some(flag_write), Some(flag)) = (flag_write, flag) {
2013            if let Some(op) = LowLevelILFlagWriteOp::from_op(custom_arch, size, op, operands) {
2014                if let Some(expr) = custom_arch.flag_write_llil(flag, flag_write, op, &lifter) {
2015                    // TODO verify that returned expr is a bool value
2016                    return expr.index.0;
2017                }
2018            } else {
2019                tracing::warn!(
2020                    "unable to unpack flag write op: {:?} with {} operands",
2021                    op,
2022                    operands.len()
2023                );
2024            }
2025
2026            let role = flag.role(flag_write.class());
2027
2028            unsafe {
2029                BNGetDefaultArchitectureFlagWriteLowLevelIL(
2030                    custom_arch.as_ref().handle,
2031                    op,
2032                    size,
2033                    role,
2034                    operands_raw,
2035                    operand_count,
2036                    il,
2037                )
2038            }
2039        } else {
2040            // TODO this should be impossible; requires bad flag/flag_write ids passed in;
2041            // explode more violently
2042            lifter.unimplemented().index.0
2043        }
2044    }
2045
2046    extern "C" fn cb_flag_cond_llil<A>(
2047        ctxt: *mut c_void,
2048        cond: FlagCondition,
2049        class: u32,
2050        il: *mut BNLowLevelILFunction,
2051    ) -> usize
2052    where
2053        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2054    {
2055        let custom_arch = unsafe { &*(ctxt as *mut A) };
2056        let class = custom_arch.flag_class_from_id(FlagClassId(class));
2057
2058        let lifter = unsafe {
2059            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2060        };
2061        if let Some(expr) = custom_arch.flag_cond_llil(cond, class, &lifter) {
2062            // TODO verify that returned expr is a bool value
2063            return expr.index.0;
2064        }
2065
2066        lifter.unimplemented().index.0
2067    }
2068
2069    extern "C" fn cb_flag_group_llil<A>(
2070        ctxt: *mut c_void,
2071        group: u32,
2072        il: *mut BNLowLevelILFunction,
2073    ) -> usize
2074    where
2075        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2076    {
2077        let custom_arch = unsafe { &*(ctxt as *mut A) };
2078        let lifter = unsafe {
2079            LowLevelILMutableFunction::from_raw_with_arch(il, Some(*custom_arch.as_ref()))
2080        };
2081
2082        if let Some(group) = custom_arch.flag_group_from_id(FlagGroupId(group)) {
2083            if let Some(expr) = custom_arch.flag_group_llil(group, &lifter) {
2084                // TODO verify that returned expr is a bool value
2085                return expr.index.0;
2086            }
2087        }
2088
2089        lifter.unimplemented().index.0
2090    }
2091
2092    extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32, count: usize) {
2093        if regs.is_null() {
2094            return;
2095        }
2096
2097        unsafe {
2098            let regs_ptr = std::ptr::slice_from_raw_parts_mut(regs, count);
2099            let _regs = Box::from_raw(regs_ptr);
2100        }
2101    }
2102
2103    extern "C" fn cb_register_info<A>(ctxt: *mut c_void, reg: u32, result: *mut BNRegisterInfo)
2104    where
2105        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2106    {
2107        let custom_arch = unsafe { &*(ctxt as *mut A) };
2108        let result = unsafe { &mut *result };
2109
2110        if let Some(reg) = custom_arch.register_from_id(RegisterId(reg)) {
2111            let info = reg.info();
2112
2113            result.fullWidthRegister = match info.parent() {
2114                Some(p) => p.id().0,
2115                None => reg.id().0,
2116            };
2117
2118            result.offset = info.offset();
2119            result.size = info.size();
2120            result.extend = info.implicit_extend().into();
2121        }
2122    }
2123
2124    extern "C" fn cb_stack_pointer<A>(ctxt: *mut c_void) -> u32
2125    where
2126        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2127    {
2128        let custom_arch = unsafe { &*(ctxt as *mut A) };
2129
2130        if let Some(reg) = custom_arch.stack_pointer_reg() {
2131            reg.id().0
2132        } else {
2133            0xffff_ffff
2134        }
2135    }
2136
2137    extern "C" fn cb_link_reg<A>(ctxt: *mut c_void) -> u32
2138    where
2139        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2140    {
2141        let custom_arch = unsafe { &*(ctxt as *mut A) };
2142
2143        if let Some(reg) = custom_arch.link_reg() {
2144            reg.id().0
2145        } else {
2146            0xffff_ffff
2147        }
2148    }
2149
2150    extern "C" fn cb_reg_stack_name<A>(ctxt: *mut c_void, stack: u32) -> *mut c_char
2151    where
2152        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2153    {
2154        let custom_arch = unsafe { &*(ctxt as *mut A) };
2155
2156        match custom_arch.register_stack_from_id(RegisterStackId(stack)) {
2157            Some(stack) => BnString::into_raw(BnString::new(stack.name().as_ref())),
2158            None => BnString::into_raw(BnString::new("invalid_reg_stack")),
2159        }
2160    }
2161
2162    extern "C" fn cb_reg_stacks<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2163    where
2164        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2165    {
2166        let custom_arch = unsafe { &*(ctxt as *mut A) };
2167        let mut regs: Box<[_]> = custom_arch
2168            .register_stacks()
2169            .iter()
2170            .map(|r| r.id().0)
2171            .collect();
2172
2173        // SAFETY: Passed in to be written
2174        unsafe { *count = regs.len() };
2175        let regs_ptr = regs.as_mut_ptr();
2176        std::mem::forget(regs);
2177        regs_ptr
2178    }
2179
2180    extern "C" fn cb_reg_stack_info<A>(
2181        ctxt: *mut c_void,
2182        stack: u32,
2183        result: *mut BNRegisterStackInfo,
2184    ) where
2185        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2186    {
2187        let custom_arch = unsafe { &*(ctxt as *mut A) };
2188        let result = unsafe { &mut *result };
2189
2190        if let Some(stack) = custom_arch.register_stack_from_id(RegisterStackId(stack)) {
2191            let info = stack.info();
2192
2193            let (reg, count) = info.storage_regs();
2194            result.firstStorageReg = reg.id().0;
2195            result.storageCount = count as u32;
2196
2197            if let Some((reg, count)) = info.top_relative_regs() {
2198                result.firstTopRelativeReg = reg.id().0;
2199                result.topRelativeCount = count as u32;
2200            } else {
2201                result.firstTopRelativeReg = 0xffff_ffff;
2202                result.topRelativeCount = 0;
2203            }
2204
2205            result.stackTopReg = info.stack_top_reg().id().0;
2206        }
2207    }
2208
2209    extern "C" fn cb_intrinsic_class<A>(ctxt: *mut c_void, intrinsic: u32) -> BNIntrinsicClass
2210    where
2211        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2212    {
2213        let custom_arch = unsafe { &*(ctxt as *mut A) };
2214        match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
2215            Some(intrinsic) => intrinsic.class(),
2216            // TODO: Make this unreachable?
2217            None => BNIntrinsicClass::GeneralIntrinsicClass,
2218        }
2219    }
2220
2221    extern "C" fn cb_intrinsic_name<A>(ctxt: *mut c_void, intrinsic: u32) -> *mut c_char
2222    where
2223        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2224    {
2225        let custom_arch = unsafe { &*(ctxt as *mut A) };
2226        match custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) {
2227            Some(intrinsic) => BnString::into_raw(BnString::new(intrinsic.name())),
2228            None => BnString::into_raw(BnString::new("invalid_intrinsic")),
2229        }
2230    }
2231
2232    extern "C" fn cb_intrinsics<A>(ctxt: *mut c_void, count: *mut usize) -> *mut u32
2233    where
2234        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2235    {
2236        let custom_arch = unsafe { &*(ctxt as *mut A) };
2237        let mut intrinsics: Box<[_]> = custom_arch.intrinsics().iter().map(|i| i.id().0).collect();
2238
2239        // SAFETY: Passed in to be written
2240        unsafe { *count = intrinsics.len() };
2241        let intrinsics_ptr = intrinsics.as_mut_ptr();
2242        std::mem::forget(intrinsics);
2243        intrinsics_ptr
2244    }
2245
2246    extern "C" fn cb_intrinsic_inputs<A>(
2247        ctxt: *mut c_void,
2248        intrinsic: u32,
2249        count: *mut usize,
2250    ) -> *mut BNNameAndType
2251    where
2252        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2253    {
2254        let custom_arch = unsafe { &*(ctxt as *mut A) };
2255
2256        let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
2257            // SAFETY: Passed in to be written
2258            unsafe {
2259                *count = 0;
2260            }
2261            return std::ptr::null_mut();
2262        };
2263
2264        let inputs = intrinsic.inputs();
2265        // NOTE: The into_raw will leak and be freed later by `cb_free_name_and_types`.
2266        let raw_inputs: Box<[_]> = inputs.into_iter().map(NameAndType::into_raw).collect();
2267
2268        // SAFETY: Passed in to be written
2269        unsafe {
2270            *count = raw_inputs.len();
2271        }
2272
2273        if raw_inputs.is_empty() {
2274            std::ptr::null_mut()
2275        } else {
2276            // Core is responsible for calling back to `cb_free_name_and_types`.
2277            Box::leak(raw_inputs).as_mut_ptr()
2278        }
2279    }
2280
2281    extern "C" fn cb_free_name_and_types<A>(
2282        _ctxt: *mut c_void,
2283        nt: *mut BNNameAndType,
2284        count: usize,
2285    ) where
2286        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2287    {
2288        if nt.is_null() {
2289            return;
2290        }
2291
2292        // Reconstruct the box and drop.
2293        let nt_ptr = std::ptr::slice_from_raw_parts_mut(nt, count);
2294        // SAFETY: nt_ptr is a pointer to a Box.
2295        let boxed_name_and_types = unsafe { Box::from_raw(nt_ptr) };
2296        for nt in boxed_name_and_types {
2297            NameAndType::free_raw(nt);
2298        }
2299    }
2300
2301    extern "C" fn cb_intrinsic_outputs<A>(
2302        ctxt: *mut c_void,
2303        intrinsic: u32,
2304        count: *mut usize,
2305    ) -> *mut BNTypeWithConfidence
2306    where
2307        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2308    {
2309        let custom_arch = unsafe { &*(ctxt as *mut A) };
2310
2311        let Some(intrinsic) = custom_arch.intrinsic_from_id(IntrinsicId(intrinsic)) else {
2312            // SAFETY: Passed in to be written
2313            unsafe {
2314                *count = 0;
2315            }
2316            return std::ptr::null_mut();
2317        };
2318
2319        let outputs = intrinsic.outputs();
2320        let raw_outputs: Box<[BNTypeWithConfidence]> = outputs
2321            .into_iter()
2322            // Leaked to be freed later by `cb_free_type_list`.
2323            .map(Conf::<Ref<Type>>::into_raw)
2324            .collect();
2325
2326        // SAFETY: Passed in to be written
2327        unsafe {
2328            *count = raw_outputs.len();
2329        }
2330
2331        if raw_outputs.is_empty() {
2332            std::ptr::null_mut()
2333        } else {
2334            // Core is responsible for calling back to `cb_free_type_list`.
2335            Box::leak(raw_outputs).as_mut_ptr()
2336        }
2337    }
2338
2339    extern "C" fn cb_free_type_list<A>(
2340        ctxt: *mut c_void,
2341        tl: *mut BNTypeWithConfidence,
2342        count: usize,
2343    ) where
2344        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2345    {
2346        let _custom_arch = unsafe { &*(ctxt as *mut A) };
2347        if !tl.is_null() {
2348            let boxed_types =
2349                unsafe { Box::from_raw(std::ptr::slice_from_raw_parts_mut(tl, count)) };
2350            for ty in boxed_types {
2351                Conf::<Ref<Type>>::free_raw(ty);
2352            }
2353        }
2354    }
2355
2356    extern "C" fn cb_can_assemble<A>(ctxt: *mut c_void) -> bool
2357    where
2358        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2359    {
2360        let custom_arch = unsafe { &*(ctxt as *mut A) };
2361        custom_arch.can_assemble()
2362    }
2363
2364    extern "C" fn cb_assemble<A>(
2365        ctxt: *mut c_void,
2366        code: *const c_char,
2367        addr: u64,
2368        buffer: *mut BNDataBuffer,
2369        errors: *mut *mut c_char,
2370    ) -> bool
2371    where
2372        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2373    {
2374        let custom_arch = unsafe { &*(ctxt as *mut A) };
2375        let code = raw_to_string(code).unwrap_or("".into());
2376        let mut buffer = DataBuffer::from_raw(buffer);
2377
2378        let result = match custom_arch.assemble(&code, addr) {
2379            Ok(result) => {
2380                buffer.set_data(&result);
2381                unsafe {
2382                    *errors = BnString::into_raw(BnString::new(""));
2383                }
2384                true
2385            }
2386            Err(result) => {
2387                unsafe {
2388                    *errors = BnString::into_raw(BnString::new(result));
2389                }
2390                false
2391            }
2392        };
2393
2394        // Caller owns the data buffer, don't free it
2395        std::mem::forget(buffer);
2396
2397        result
2398    }
2399
2400    extern "C" fn cb_is_never_branch_patch_available<A>(
2401        ctxt: *mut c_void,
2402        data: *const u8,
2403        addr: u64,
2404        len: usize,
2405    ) -> bool
2406    where
2407        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2408    {
2409        let custom_arch = unsafe { &*(ctxt as *mut A) };
2410        let data = unsafe { std::slice::from_raw_parts(data, len) };
2411        custom_arch.is_never_branch_patch_available(data, addr)
2412    }
2413
2414    extern "C" fn cb_is_always_branch_patch_available<A>(
2415        ctxt: *mut c_void,
2416        data: *const u8,
2417        addr: u64,
2418        len: usize,
2419    ) -> bool
2420    where
2421        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2422    {
2423        let custom_arch = unsafe { &*(ctxt as *mut A) };
2424        let data = unsafe { std::slice::from_raw_parts(data, len) };
2425        custom_arch.is_always_branch_patch_available(data, addr)
2426    }
2427
2428    extern "C" fn cb_is_invert_branch_patch_available<A>(
2429        ctxt: *mut c_void,
2430        data: *const u8,
2431        addr: u64,
2432        len: usize,
2433    ) -> bool
2434    where
2435        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2436    {
2437        let custom_arch = unsafe { &*(ctxt as *mut A) };
2438        let data = unsafe { std::slice::from_raw_parts(data, len) };
2439        custom_arch.is_invert_branch_patch_available(data, addr)
2440    }
2441
2442    extern "C" fn cb_is_skip_and_return_zero_patch_available<A>(
2443        ctxt: *mut c_void,
2444        data: *const u8,
2445        addr: u64,
2446        len: usize,
2447    ) -> bool
2448    where
2449        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2450    {
2451        let custom_arch = unsafe { &*(ctxt as *mut A) };
2452        let data = unsafe { std::slice::from_raw_parts(data, len) };
2453        custom_arch.is_skip_and_return_zero_patch_available(data, addr)
2454    }
2455
2456    extern "C" fn cb_is_skip_and_return_value_patch_available<A>(
2457        ctxt: *mut c_void,
2458        data: *const u8,
2459        addr: u64,
2460        len: usize,
2461    ) -> bool
2462    where
2463        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2464    {
2465        let custom_arch = unsafe { &*(ctxt as *mut A) };
2466        let data = unsafe { std::slice::from_raw_parts(data, len) };
2467        custom_arch.is_skip_and_return_value_patch_available(data, addr)
2468    }
2469
2470    extern "C" fn cb_convert_to_nop<A>(
2471        ctxt: *mut c_void,
2472        data: *mut u8,
2473        addr: u64,
2474        len: usize,
2475    ) -> bool
2476    where
2477        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2478    {
2479        let custom_arch = unsafe { &*(ctxt as *mut A) };
2480        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2481        custom_arch.convert_to_nop(data, addr)
2482    }
2483
2484    extern "C" fn cb_always_branch<A>(
2485        ctxt: *mut c_void,
2486        data: *mut u8,
2487        addr: u64,
2488        len: usize,
2489    ) -> bool
2490    where
2491        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2492    {
2493        let custom_arch = unsafe { &*(ctxt as *mut A) };
2494        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2495        custom_arch.always_branch(data, addr)
2496    }
2497
2498    extern "C" fn cb_invert_branch<A>(
2499        ctxt: *mut c_void,
2500        data: *mut u8,
2501        addr: u64,
2502        len: usize,
2503    ) -> bool
2504    where
2505        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2506    {
2507        let custom_arch = unsafe { &*(ctxt as *mut A) };
2508        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2509        custom_arch.invert_branch(data, addr)
2510    }
2511
2512    extern "C" fn cb_skip_and_return_value<A>(
2513        ctxt: *mut c_void,
2514        data: *mut u8,
2515        addr: u64,
2516        len: usize,
2517        val: u64,
2518    ) -> bool
2519    where
2520        A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2521    {
2522        let custom_arch = unsafe { &*(ctxt as *mut A) };
2523        let data = unsafe { std::slice::from_raw_parts_mut(data, len) };
2524        custom_arch.skip_and_return_value(data, addr, val)
2525    }
2526
2527    let name = name.to_cstr();
2528
2529    let uninit_arch = ArchitectureBuilder {
2530        arch: MaybeUninit::zeroed(),
2531        func: Some(func),
2532    };
2533
2534    let raw = Box::into_raw(Box::new(uninit_arch));
2535    let mut custom_arch = BNCustomArchitecture {
2536        context: raw as *mut _,
2537        init: Some(cb_init::<A, F>),
2538        getEndianness: Some(cb_endianness::<A>),
2539        getAddressSize: Some(cb_address_size::<A>),
2540        getDefaultIntegerSize: Some(cb_default_integer_size::<A>),
2541        getInstructionAlignment: Some(cb_instruction_alignment::<A>),
2542        // TODO: Make getOpcodeDisplayLength optional.
2543        getMaxInstructionLength: Some(cb_max_instr_len::<A>),
2544        // TODO: Make getOpcodeDisplayLength optional.
2545        getOpcodeDisplayLength: Some(cb_opcode_display_len::<A>),
2546        getAssociatedArchitectureByAddress: Some(cb_associated_arch_by_addr::<A>),
2547        getInstructionInfo: Some(cb_instruction_info::<A>),
2548        getInstructionText: Some(cb_get_instruction_text::<A>),
2549        getInstructionTextWithContext: Some(cb_get_instruction_text_with_context::<A>),
2550        freeInstructionText: Some(cb_free_instruction_text),
2551        getInstructionLowLevelIL: Some(cb_instruction_llil::<A>),
2552        analyzeBasicBlocks: Some(cb_analyze_basic_blocks::<A>),
2553        liftFunction: Some(cb_lift_function::<A>),
2554        freeFunctionArchContext: None,
2555
2556        getRegisterName: Some(cb_reg_name::<A>),
2557        getFlagName: Some(cb_flag_name::<A>),
2558        getFlagWriteTypeName: Some(cb_flag_write_name::<A>),
2559        getSemanticFlagClassName: Some(cb_semantic_flag_class_name::<A>),
2560        getSemanticFlagGroupName: Some(cb_semantic_flag_group_name::<A>),
2561
2562        getFullWidthRegisters: Some(cb_registers_full_width::<A>),
2563        getAllRegisters: Some(cb_registers_all::<A>),
2564        getAllFlags: Some(cb_flags::<A>),
2565        getAllFlagWriteTypes: Some(cb_flag_write_types::<A>),
2566        getAllSemanticFlagClasses: Some(cb_semantic_flag_classes::<A>),
2567        getAllSemanticFlagGroups: Some(cb_semantic_flag_groups::<A>),
2568
2569        getFlagRole: Some(cb_flag_role::<A>),
2570        getFlagsRequiredForFlagCondition: Some(cb_flags_required_for_flag_cond::<A>),
2571
2572        getFlagsRequiredForSemanticFlagGroup: Some(cb_flags_required_for_semantic_flag_group::<A>),
2573        getFlagConditionsForSemanticFlagGroup: Some(
2574            cb_flag_conditions_for_semantic_flag_group::<A>,
2575        ),
2576        freeFlagConditionsForSemanticFlagGroup: Some(
2577            cb_free_flag_conditions_for_semantic_flag_group::<A>,
2578        ),
2579
2580        getFlagsWrittenByFlagWriteType: Some(cb_flags_written_by_write_type::<A>),
2581        getSemanticClassForFlagWriteType: Some(cb_semantic_class_for_flag_write_type::<A>),
2582
2583        getFlagWriteLowLevelIL: Some(cb_flag_write_llil::<A>),
2584        getFlagConditionLowLevelIL: Some(cb_flag_cond_llil::<A>),
2585        getSemanticFlagGroupLowLevelIL: Some(cb_flag_group_llil::<A>),
2586
2587        freeRegisterList: Some(cb_free_register_list),
2588        getRegisterInfo: Some(cb_register_info::<A>),
2589        getStackPointerRegister: Some(cb_stack_pointer::<A>),
2590        getLinkRegister: Some(cb_link_reg::<A>),
2591        getGlobalRegisters: Some(cb_registers_global::<A>),
2592        getSystemRegisters: Some(cb_registers_system::<A>),
2593
2594        getRegisterStackName: Some(cb_reg_stack_name::<A>),
2595        getAllRegisterStacks: Some(cb_reg_stacks::<A>),
2596        getRegisterStackInfo: Some(cb_reg_stack_info::<A>),
2597
2598        getIntrinsicClass: Some(cb_intrinsic_class::<A>),
2599        getIntrinsicName: Some(cb_intrinsic_name::<A>),
2600        getAllIntrinsics: Some(cb_intrinsics::<A>),
2601        getIntrinsicInputs: Some(cb_intrinsic_inputs::<A>),
2602        freeNameAndTypeList: Some(cb_free_name_and_types::<A>),
2603        getIntrinsicOutputs: Some(cb_intrinsic_outputs::<A>),
2604        freeTypeList: Some(cb_free_type_list::<A>),
2605
2606        canAssemble: Some(cb_can_assemble::<A>),
2607        assemble: Some(cb_assemble::<A>),
2608
2609        isNeverBranchPatchAvailable: Some(cb_is_never_branch_patch_available::<A>),
2610        isAlwaysBranchPatchAvailable: Some(cb_is_always_branch_patch_available::<A>),
2611        isInvertBranchPatchAvailable: Some(cb_is_invert_branch_patch_available::<A>),
2612        isSkipAndReturnZeroPatchAvailable: Some(cb_is_skip_and_return_zero_patch_available::<A>),
2613        isSkipAndReturnValuePatchAvailable: Some(cb_is_skip_and_return_value_patch_available::<A>),
2614
2615        convertToNop: Some(cb_convert_to_nop::<A>),
2616        alwaysBranch: Some(cb_always_branch::<A>),
2617        invertBranch: Some(cb_invert_branch::<A>),
2618        skipAndReturnValue: Some(cb_skip_and_return_value::<A>),
2619    };
2620
2621    customize(&mut custom_arch);
2622
2623    unsafe {
2624        let res = BNRegisterArchitecture(name.as_ptr(), &mut custom_arch as *mut _);
2625
2626        assert!(!res.is_null());
2627
2628        (*raw).arch.assume_init_mut()
2629    }
2630}
2631
2632pub fn register_architecture_with_function_context<A, F>(name: &str, func: F) -> &'static A
2633where
2634    A: 'static
2635        + ArchitectureWithFunctionContext<Handle = CustomArchitectureHandle<A>>
2636        + Send
2637        + Sync
2638        + Sized,
2639    F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
2640{
2641    unsafe extern "C" fn cb_free_function_arch_context_typed<A>(
2642        _ctxt: *mut c_void,
2643        context: *mut c_void,
2644    ) where
2645        A: 'static
2646            + ArchitectureWithFunctionContext<Handle = CustomArchitectureHandle<A>>
2647            + Send
2648            + Sync,
2649    {
2650        if context.is_null() {
2651            return;
2652        }
2653        // The context was allocated via Box::into_raw in set_function_arch_context,
2654        // so we reconstruct the Box here and let it drop.
2655        let _ = unsafe { Box::from_raw(context as *mut A::FunctionArchContext) };
2656    }
2657
2658    unsafe extern "C" fn cb_get_instruction_text_with_context_typed<A>(
2659        ctxt: *mut c_void,
2660        data: *const u8,
2661        addr: u64,
2662        len: *mut usize,
2663        context: *mut c_void,
2664        result: *mut *mut BNInstructionTextToken,
2665        count: *mut usize,
2666    ) -> bool
2667    where
2668        A: 'static
2669            + ArchitectureWithFunctionContext<Handle = CustomArchitectureHandle<A>>
2670            + Send
2671            + Sync,
2672    {
2673        let custom_arch = unsafe { &*(ctxt as *mut A) };
2674        let data = unsafe { std::slice::from_raw_parts(data, *len) };
2675        let result = unsafe { &mut *result };
2676        let typed_context: Option<&A::FunctionArchContext> = if context.is_null() {
2677            None
2678        } else {
2679            Some(unsafe { &*(context as *const A::FunctionArchContext) })
2680        };
2681
2682        let Some((res_size, res_tokens)) =
2683            custom_arch.instruction_text_with_typed_context(data, addr, typed_context)
2684        else {
2685            return false;
2686        };
2687
2688        let res_tokens: Box<[BNInstructionTextToken]> = res_tokens
2689            .into_iter()
2690            .map(InstructionTextToken::into_raw)
2691            .collect();
2692        unsafe {
2693            let res_tokens = Box::leak(res_tokens);
2694            *result = res_tokens.as_mut_ptr();
2695            *count = res_tokens.len();
2696            *len = res_size;
2697        }
2698        true
2699    }
2700
2701    register_architecture_impl(name, func, |custom_arch| {
2702        custom_arch.freeFunctionArchContext = Some(cb_free_function_arch_context_typed::<A>);
2703        custom_arch.getInstructionTextWithContext =
2704            Some(cb_get_instruction_text_with_context_typed::<A>);
2705    })
2706}
2707
2708#[derive(Debug)]
2709pub struct CustomArchitectureHandle<A>
2710where
2711    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
2712{
2713    handle: *mut A,
2714}
2715
2716unsafe impl<A> Send for CustomArchitectureHandle<A> where
2717    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
2718{
2719}
2720
2721unsafe impl<A> Sync for CustomArchitectureHandle<A> where
2722    A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync
2723{
2724}
2725
2726impl<A> Clone for CustomArchitectureHandle<A>
2727where
2728    A: 'static + Architecture<Handle = Self> + Send + Sync,
2729{
2730    fn clone(&self) -> Self {
2731        *self
2732    }
2733}
2734
2735impl<A> Copy for CustomArchitectureHandle<A> where
2736    A: 'static + Architecture<Handle = Self> + Send + Sync
2737{
2738}
2739
2740impl<A> Borrow<A> for CustomArchitectureHandle<A>
2741where
2742    A: 'static + Architecture<Handle = Self> + Send + Sync,
2743{
2744    fn borrow(&self) -> &A {
2745        unsafe { &*self.handle }
2746    }
2747}