1use binaryninjacore_sys::*;
18use std::fmt::{Debug, Formatter};
19
20use std::ops::Range;
21
22use crate::binary_view::BinaryView;
23use crate::rc::*;
24
25#[must_use]
26#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
27pub struct SegmentBuilder {
28 ea: Range<u64>,
29 parent_backing: Option<Range<u64>>,
30 flags: SegmentFlags,
31 is_auto: bool,
32}
33
34impl SegmentBuilder {
35 pub fn new(ea: Range<u64>) -> Self {
36 SegmentBuilder {
37 ea,
38 parent_backing: None,
39 flags: Default::default(),
40 is_auto: false,
41 }
42 }
43
44 pub fn parent_backing(mut self, parent_backing: Range<u64>) -> Self {
48 self.parent_backing = Some(parent_backing);
49 self
50 }
51
52 pub fn flags(mut self, flags: SegmentFlags) -> Self {
53 self.flags = flags;
54 self
55 }
56
57 pub fn is_auto(mut self, is_auto: bool) -> Self {
58 self.is_auto = is_auto;
59 self
60 }
61
62 pub(crate) fn create(self, view: &BinaryView) {
63 let ea_start = self.ea.start;
64 let ea_len = self.ea.end.wrapping_sub(ea_start);
65 let (b_start, b_len) = self
66 .parent_backing
67 .map_or((0, 0), |s| (s.start, s.end.wrapping_sub(s.start)));
68
69 unsafe {
70 if self.is_auto {
71 BNAddAutoSegment(
72 view.handle,
73 ea_start,
74 ea_len,
75 b_start,
76 b_len,
77 self.flags.into_raw(),
78 );
79 } else {
80 BNAddUserSegment(
81 view.handle,
82 ea_start,
83 ea_len,
84 b_start,
85 b_len,
86 self.flags.into_raw(),
87 );
88 }
89 }
90 }
91}
92
93#[derive(PartialEq, Eq, Hash)]
94pub struct Segment {
95 handle: *mut BNSegment,
96}
97
98impl Segment {
99 pub(crate) unsafe fn from_raw(handle: *mut BNSegment) -> Self {
100 assert!(!handle.is_null());
101 Self { handle }
102 }
103
104 pub(crate) unsafe fn ref_from_raw(handle: *mut BNSegment) -> Ref<Self> {
105 assert!(!handle.is_null());
106 Ref::new(Self { handle })
107 }
108
109 pub fn builder(ea_range: Range<u64>) -> SegmentBuilder {
118 SegmentBuilder::new(ea_range)
119 }
120
121 pub fn address_range(&self) -> Range<u64> {
122 let start = unsafe { BNSegmentGetStart(self.handle) };
123 let end = unsafe { BNSegmentGetEnd(self.handle) };
124 start..end
125 }
126
127 pub fn parent_backing(&self) -> Option<Range<u64>> {
128 let start = unsafe { BNSegmentGetDataOffset(self.handle) };
129 let end = unsafe { BNSegmentGetDataEnd(self.handle) };
130
131 if start != end {
132 Some(start..end)
133 } else {
134 None
135 }
136 }
137
138 pub fn flags(&self) -> SegmentFlags {
139 let raw_flags = unsafe { BNSegmentGetFlags(self.handle) };
140 SegmentFlags::from_raw(raw_flags)
141 }
142
143 pub fn executable(&self) -> bool {
144 self.flags().executable
145 }
146
147 pub fn writable(&self) -> bool {
148 self.flags().writable
149 }
150
151 pub fn readable(&self) -> bool {
152 self.flags().readable
153 }
154
155 pub fn contains_data(&self) -> bool {
156 self.flags().contains_data
157 }
158
159 pub fn contains_code(&self) -> bool {
160 self.flags().contains_code
161 }
162
163 pub fn deny_write(&self) -> bool {
164 self.flags().deny_write
165 }
166
167 pub fn deny_execute(&self) -> bool {
168 self.flags().deny_execute
169 }
170
171 pub fn auto_defined(&self) -> bool {
172 unsafe { BNSegmentIsAutoDefined(self.handle) }
173 }
174}
175
176impl Debug for Segment {
177 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
178 f.debug_struct("Segment")
179 .field("address_range", &self.address_range())
180 .field("parent_backing", &self.parent_backing())
181 .field("auto_defined", &self.auto_defined())
182 .field("flags", &self.flags())
183 .finish()
184 }
185}
186
187impl ToOwned for Segment {
188 type Owned = Ref<Self>;
189
190 fn to_owned(&self) -> Self::Owned {
191 unsafe { RefCountable::inc_ref(self) }
192 }
193}
194
195unsafe impl RefCountable for Segment {
196 unsafe fn inc_ref(handle: &Self) -> Ref<Self> {
197 Ref::new(Self {
198 handle: BNNewSegmentReference(handle.handle),
199 })
200 }
201
202 unsafe fn dec_ref(handle: &Self) {
203 BNFreeSegment(handle.handle);
204 }
205}
206
207impl CoreArrayProvider for Segment {
208 type Raw = *mut BNSegment;
209 type Context = ();
210 type Wrapped<'a> = Guard<'a, Segment>;
211}
212
213unsafe impl CoreArrayProviderInner for Segment {
214 unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
215 BNFreeSegmentList(raw, count);
216 }
217
218 unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
219 Guard::new(Segment::from_raw(*raw), context)
220 }
221}
222
223#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
224pub struct SegmentFlags {
225 pub executable: bool,
226 pub writable: bool,
227 pub readable: bool,
228 pub contains_data: bool,
229 pub contains_code: bool,
230 pub deny_write: bool,
231 pub deny_execute: bool,
232}
233
234impl SegmentFlags {
235 pub fn new() -> Self {
236 Self::default()
237 }
238
239 pub fn executable(mut self, executable: bool) -> Self {
240 self.executable = executable;
241 self
242 }
243
244 pub fn writable(mut self, writable: bool) -> Self {
245 self.writable = writable;
246 self
247 }
248
249 pub fn readable(mut self, readable: bool) -> Self {
250 self.readable = readable;
251 self
252 }
253
254 pub fn contains_data(mut self, contains_data: bool) -> Self {
255 self.contains_data = contains_data;
256 self
257 }
258
259 pub fn contains_code(mut self, contains_code: bool) -> Self {
260 self.contains_code = contains_code;
261 self
262 }
263
264 pub fn deny_write(mut self, deny_write: bool) -> Self {
265 self.deny_write = deny_write;
266 self
267 }
268
269 pub fn deny_execute(mut self, deny_execute: bool) -> Self {
270 self.deny_execute = deny_execute;
271 self
272 }
273
274 pub(crate) fn from_raw(flags: u32) -> Self {
275 Self {
276 executable: flags & 0x01 != 0,
277 writable: flags & 0x02 != 0,
278 readable: flags & 0x04 != 0,
279 contains_data: flags & 0x08 != 0,
280 contains_code: flags & 0x10 != 0,
281 deny_write: flags & 0x20 != 0,
282 deny_execute: flags & 0x40 != 0,
283 }
284 }
285
286 #[allow(clippy::wrong_self_convention)]
287 pub(crate) fn into_raw(&self) -> u32 {
288 (self.executable as u32)
289 | (self.writable as u32) << 1
290 | (self.readable as u32) << 2
291 | (self.contains_data as u32) << 3
292 | (self.contains_code as u32) << 4
293 | (self.deny_write as u32) << 5
294 | (self.deny_execute as u32) << 6
295 }
296}