@@ -5,7 +5,8 @@ use crate::jets::seam::util::get_by;
5
5
use crate :: jets:: util:: slot;
6
6
use crate :: jets:: { Jet , JetErr :: * } ;
7
7
use crate :: mem:: { NockStack , Preserve } ;
8
- use crate :: noun:: { slot_pam, DirectAtom , Noun , D , NOUN_NONE , T } ;
8
+ use crate :: mug:: mug_u32;
9
+ use crate :: noun:: { slot_bar, slot_pam, DirectAtom , Noun , D , NOUN_NONE , T } ;
9
10
use crate :: unifying_equality:: unifying_equality;
10
11
use ares_macros:: tas;
11
12
use either:: { Left , Right } ;
@@ -82,14 +83,14 @@ pub struct Pile {
82
83
83
84
impl Pile {
84
85
pub fn new ( context : & mut Context , pile : Noun ) -> Self {
85
- let well = slot ( pile, slot_pam ( 5 ) )
86
+ let well = slot ( pile, slot_pam ( 3 ) )
86
87
. unwrap ( )
87
88
. as_atom ( )
88
89
. unwrap ( )
89
90
. as_u64 ( )
90
91
. unwrap ( ) as usize ;
91
92
Self {
92
- sans : slot ( pile, slot_pam ( 3 ) )
93
+ sans : slot ( pile, slot_pam ( 1 ) )
93
94
. unwrap ( )
94
95
. as_atom ( )
95
96
. unwrap ( )
@@ -114,28 +115,20 @@ impl Preserve for Pile {
114
115
}
115
116
}
116
117
117
- // +$ pile
118
- // $: =bell
119
- // walt=(list @uvre)
120
- // sans=$~(0v1 @uvre) :: 0v0 reserved for indirect
121
- // will=(map @uwoo blob)
122
- // well=$~(0w2 @uwoo) :: 0w0 indirect, 0w1 direct
123
- // ==
124
- // hill=(map @uxor pile)
125
-
126
118
#[ derive( Copy , Clone ) ]
127
119
pub struct Hill {
128
120
pub lent : usize ,
129
121
pub data : * mut Pile ,
130
122
}
131
123
132
124
impl Hill {
133
- /// Transforms the $hill from the `CgContext` into a `Hill` structure and
125
+ /// Transforms the $hill from the `CgContext.fuji ` into a `Hill` structure and
134
126
/// allocates it on the NockStack.
135
127
fn new ( context : & mut Context ) -> Self {
136
128
let stack = & mut context. stack ;
137
129
let fuji = context. cg_context . fuji ;
138
- let well = slot ( fuji, slot_pam ( 5 ) ) // total number of blocks
130
+ let mut hill = slot ( fuji, slot_pam ( 2 ) ) . expect ( "Codegen fuji should have hill" ) ;
131
+ let next = slot ( fuji, slot_pam ( 3 ) ) // total number of piles
139
132
. expect ( "Codegen fuji should have next" )
140
133
. as_atom ( )
141
134
. unwrap ( )
@@ -144,24 +137,23 @@ impl Hill {
144
137
unsafe {
145
138
let hill_p = stack. struct_alloc :: < Hill > ( 1 ) ;
146
139
* hill_p = Hill {
147
- data : stack. struct_alloc :: < Pile > ( well ) ,
148
- lent : well ,
140
+ data : stack. struct_alloc :: < Pile > ( next ) ,
141
+ lent : next ,
149
142
} ;
150
- let piles: & mut [ Pile ] = from_raw_parts_mut ( ( * hill_p) . data , well) ;
151
- let mut hill = slot ( fuji, slot_pam ( 2 ) ) . expect ( "Codegen fuji should have hill" ) ;
143
+ let pils: & mut [ Pile ] = from_raw_parts_mut ( ( * hill_p) . data , next) ;
152
144
let mut i = 0 ;
153
- while i < well {
145
+ while i < next {
154
146
// XX walk tree manually
155
147
let piln = get_by ( & mut context. stack , & mut hill, & mut D ( i as u64 ) )
156
148
. unwrap ( )
157
149
. unwrap ( ) ;
158
150
let pile = Pile :: new ( context, piln) ;
159
- piles [ i] = pile;
151
+ pils [ i] = pile;
160
152
i += 1 ;
161
153
}
162
154
Self {
163
- data : piles . as_mut_ptr ( ) ,
164
- lent : well ,
155
+ data : pils . as_mut_ptr ( ) ,
156
+ lent : next ,
165
157
}
166
158
}
167
159
}
@@ -170,21 +162,26 @@ impl Hill {
170
162
unsafe { from_raw_parts ( self . data as * const Pile , self . lent ) }
171
163
}
172
164
173
- fn _as_mut_slice < ' a > ( & mut self ) -> & ' a mut [ Pile ] {
165
+ fn as_mut_slice < ' a > ( & mut self ) -> & ' a mut [ Pile ] {
174
166
unsafe { from_raw_parts_mut ( self . data as * mut Pile , self . lent ) }
175
167
}
176
168
}
177
169
178
170
impl Preserve for Hill {
179
171
unsafe fn preserve ( & mut self , stack : & mut NockStack ) {
172
+ // if stack.is_in_frame(self as *const Hill) {
180
173
let dest: * mut Hill = stack. struct_alloc_in_previous_frame ( 1 ) ;
181
- copy_nonoverlapping ( self as * mut Hill , dest, 1 ) ;
174
+ ( * dest) . data = stack. struct_alloc_in_previous_frame :: < Pile > ( self . lent ) ;
175
+ ( * dest) . lent = self . lent ;
176
+ copy_nonoverlapping :: < Pile > ( self . data , ( * dest) . data , self . lent ) ;
182
177
let mut i = 0 ;
183
178
while i < self . lent {
184
- let pile = ( self . data as * mut Pile ) . add ( i) . as_mut ( ) . unwrap ( ) ;
179
+ let pile = ( ( * dest ) . data as * mut Pile ) . add ( i) . as_mut ( ) . unwrap ( ) ;
185
180
stack. preserve ( pile) ;
186
181
i += 1 ;
187
182
}
183
+ ( * self ) = * dest;
184
+ // }
188
185
}
189
186
190
187
unsafe fn assert_in_stack ( & self , stack : & NockStack ) {
@@ -208,7 +205,7 @@ pub struct Blocks {
208
205
}
209
206
210
207
impl Blocks {
211
- /// Transforms the $hill from the `CgContext` into a `Blocks` structure and
208
+ /// Transforms the $will into a `Blocks` structure and
212
209
/// allocates it on the NockStack.
213
210
fn new ( context : & mut Context , will : & mut Noun , well : usize ) -> Self {
214
211
let stack = & mut context. stack ;
@@ -239,21 +236,26 @@ impl Blocks {
239
236
unsafe { from_raw_parts ( self . data as * const Noun , self . lent ) }
240
237
}
241
238
242
- fn _as_mut_slice < ' a > ( & mut self ) -> & ' a mut [ Noun ] {
239
+ fn as_mut_slice < ' a > ( & mut self ) -> & ' a mut [ Noun ] {
243
240
unsafe { from_raw_parts_mut ( self . data as * mut Noun , self . lent ) }
244
241
}
245
242
}
246
243
247
244
impl Preserve for Blocks {
248
245
unsafe fn preserve ( & mut self , stack : & mut NockStack ) {
246
+ // if stack.is_in_frame(self as *const Blocks) {
249
247
let dest: * mut Blocks = stack. struct_alloc_in_previous_frame ( 1 ) ;
250
- copy_nonoverlapping ( self as * mut Blocks , dest, 1 ) ;
248
+ ( * dest) . data = stack. struct_alloc_in_previous_frame :: < Noun > ( self . lent ) ;
249
+ ( * dest) . lent = self . lent ;
250
+ copy_nonoverlapping :: < Noun > ( self . data , ( * dest) . data , self . lent ) ;
251
251
let mut i = 0 ;
252
252
while i < self . lent {
253
- let block = ( self . data as * mut Noun ) . add ( i) . as_mut ( ) . unwrap ( ) ;
254
- stack. preserve ( block ) ;
253
+ let blob = ( ( * dest ) . data as * mut Noun ) . add ( i) . as_mut ( ) . unwrap ( ) ;
254
+ stack. preserve ( blob ) ;
255
255
i += 1 ;
256
256
}
257
+ ( * self ) = * dest;
258
+ // }
257
259
}
258
260
259
261
unsafe fn assert_in_stack ( & self , stack : & NockStack ) {
@@ -779,7 +781,6 @@ pub fn cg_interpret_with_snapshot(
779
781
}
780
782
tas ! ( b"jmp" ) => {
781
783
// [%jmp a=@uxor v=(list @uvre)]
782
- eprintln ! ( "jmp : {}" , inst_cell) ;
783
784
let t_jmp = inst_cell. tail ( ) . as_cell ( ) . unwrap ( ) ;
784
785
785
786
let jmp_a = t_jmp. head ( ) . as_atom ( ) . unwrap ( ) . as_u64 ( ) . unwrap ( ) as usize ;
0 commit comments