1use crate::{
4 asm::Assembler,
5 wasm::{ToLSBytes, Type},
6 Error, Result,
7};
8use smallvec::SmallVec;
9use std::ops::{Deref, DerefMut};
10
11mod cmp;
12mod embed;
13mod float;
14mod integer;
15mod memory;
16mod ret;
17mod stack;
18
19#[derive(Default, Debug, Clone)]
21pub struct MacroAssembler {
22 pub(crate) asm: Assembler,
24}
25
26impl Deref for MacroAssembler {
27 type Target = Assembler;
28
29 fn deref(&self) -> &Self::Target {
30 &self.asm
31 }
32}
33
34impl DerefMut for MacroAssembler {
35 fn deref_mut(&mut self) -> &mut Self::Target {
36 &mut self.asm
37 }
38}
39
40pub struct MemoryInfo {
42 pub offset: SmallVec<[u8; 8]>,
44
45 pub size: usize,
47}
48
49impl MacroAssembler {
50 pub fn memory_write(&mut self, ty: impl Type) -> Result<MemoryInfo> {
52 let offset = self.mp.to_ls_bytes();
53
54 let size = ty.align();
56 self.increment_mp(size)?;
57
58 self.memory_write_at(&offset)?;
60 Ok(MemoryInfo { offset, size })
61 }
62
63 pub fn memory_write_bytes(&mut self, bytes: &[u8]) -> Result<MemoryInfo> {
65 let len = bytes.len();
66
67 self.push(bytes)?;
69 self.memory_write(len)
70 }
71
72 pub fn memory_write_at(&mut self, offset: &[u8]) -> Result<()> {
76 self.push(offset)?;
77 self._mstore()?;
78
79 Ok(())
80 }
81
82 pub fn pc(&self) -> u16 {
84 self.asm.buffer().len() as u16
85 }
86
87 pub fn pc_offset(&self) -> u16 {
89 if self.pc() > 0xff {
90 3
91 } else {
92 2
93 }
94 }
95
96 pub fn push(&mut self, bytes: &[u8]) -> Result<()> {
98 tracing::trace!("push bytes: 0x{}", hex::encode(bytes));
99
100 let len = bytes.len();
108 match len {
109 0 => self.asm._push0(),
110 1 => self.asm._push1(),
111 2 => self.asm._push2(),
112 3 => self.asm._push3(),
113 4 => self.asm._push4(),
114 5 => self.asm._push5(),
115 6 => self.asm._push6(),
116 7 => self.asm._push7(),
117 8 => self.asm._push8(),
118 9 => self.asm._push9(),
119 10 => self.asm._push10(),
120 11 => self.asm._push11(),
121 12 => self.asm._push12(),
122 13 => self.asm._push13(),
123 14 => self.asm._push14(),
124 15 => self.asm._push15(),
125 16 => self.asm._push16(),
126 17 => self.asm._push17(),
127 18 => self.asm._push18(),
128 19 => self.asm._push19(),
129 20 => self.asm._push20(),
130 21 => self.asm._push21(),
131 22 => self.asm._push22(),
132 23 => self.asm._push23(),
133 24 => self.asm._push24(),
134 25 => self.asm._push25(),
135 26 => self.asm._push26(),
136 27 => self.asm._push27(),
137 28 => self.asm._push28(),
138 29 => self.asm._push29(),
139 30 => self.asm._push30(),
140 31 => self.asm._push31(),
141 32 => self.asm._push32(),
142 _ => return Err(Error::StackIndexOutOfRange(len as u16)),
143 }?;
144
145 self.asm.emitn(bytes);
146 Ok(())
147 }
148
149 pub fn mp_offset<F>(&self, f: F) -> Result<SmallVec<[u8; 8]>>
151 where
152 F: Fn(usize) -> Result<usize>,
153 {
154 Ok(f(self.mp)?.to_ls_bytes())
155 }
156
157 pub fn sp(&self) -> u16 {
159 self.asm.sp
160 }
161
162 pub fn swap(&mut self, index: u16) -> Result<()> {
164 tracing::trace!("swap index: {}", index);
165 match index {
166 0 => Ok(()),
167 1 => self.asm._swap1(),
168 2 => self.asm._swap2(),
169 3 => self.asm._swap3(),
170 4 => self.asm._swap4(),
171 5 => self.asm._swap5(),
172 6 => self.asm._swap6(),
173 7 => self.asm._swap7(),
174 8 => self.asm._swap8(),
175 9 => self.asm._swap9(),
176 10 => self.asm._swap10(),
177 11 => self.asm._swap11(),
178 12 => self.asm._swap12(),
179 13 => self.asm._swap13(),
180 14 => self.asm._swap14(),
181 15 => self.asm._swap15(),
182 16 => self.asm._swap16(),
183 _ => Err(Error::StackIndexOutOfRange(index)),
184 }
185 }
186
187 pub fn dup(&mut self, index: u16) -> Result<()> {
189 tracing::trace!("dup index: {}", index);
190 match index {
191 0 => Ok(()),
192 1 => self.asm._dup1(),
193 2 => self.asm._dup2(),
194 3 => self.asm._dup3(),
195 4 => self.asm._dup4(),
196 5 => self.asm._dup5(),
197 6 => self.asm._dup6(),
198 7 => self.asm._dup7(),
199 8 => self.asm._dup8(),
200 9 => self.asm._dup9(),
201 10 => self.asm._dup10(),
202 11 => self.asm._dup11(),
203 12 => self.asm._dup12(),
204 13 => self.asm._dup13(),
205 14 => self.asm._dup14(),
206 15 => self.asm._dup15(),
207 16 => self.asm._dup16(),
208 _ => Err(Error::StackIndexOutOfRange(index)),
209 }
210 }
211
212 pub fn shift_stack(&mut self, count: u16, from_top: bool) -> Result<()> {
216 let mut swaps = 0;
217
218 if from_top {
219 swaps = count;
220 while swaps > 0 {
221 self.swap(swaps)?;
222 swaps -= 1;
223 }
224 } else {
225 while swaps < count {
233 swaps += 1;
234 self.swap(swaps)?;
235 }
236 }
237
238 Ok(())
239 }
240
241 pub fn _return(&mut self) -> Result<()> {
250 self.emit_return_value(&[1])
251 }
252
253 pub fn emit_return_value(&mut self, value: &[u8]) -> Result<()> {
256 self.push(value)?;
258
259 self.push(&[0])?;
261 self.asm._mstore()?;
262
263 self.push(&[32])?;
265 self.push(&[0])?;
266 self.asm._return()?;
267
268 Ok(())
269 }
270}