YAP 7.1.0
grow.c
1/*************************************************************************
2 * *
3 * YAP Prolog *
4 * *
5 * Yap Prolog was developed at NCCUP - Universidade do Porto *
6 * *
7 * Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
8 * *
9 **************************************************************************
10 * *
11 * File: grow.c *
12 * Last rev: Thu Feb 23 1989 vv *
13 * mods: *
14 * comments: Shifting the stacks *
15 * *
16 *************************************************************************/
17
18#include "absmi.h"
19#include "YapHeap.h"
20#include "yapio.h"
21#include "alloc.h"
22#include "sshift.h"
23#include "YapCompile.h"
24#include "attvar.h"
25#include "cut_c.h"
26#if HAVE_STRING_H
27#include <string.h>
28#endif
29#if YAPOR_THREADS
30#include "opt.mavar.h"
31#endif /* YAPOR_THREADS */
32
33#if !HAVE_STRNCAT
34#define strncat(s0,s1,sz) strcat(s0,s1)
35#endif
36
37typedef enum {
38 STACK_SHIFTING = 0,
39 STACK_COPYING = 1,
40 STACK_INCREMENTAL_COPYING = 2
41} what_stack_copying;
42
43
44
45static Int p_growheap( USES_REGS1 );
46static Int p_growstack( USES_REGS1 );
47static Int p_inform_trail_overflows( USES_REGS1 );
48static Int p_inform_heap_overflows( USES_REGS1 );
49static Int p_inform_stack_overflows( USES_REGS1 );
50
51/* #define undf7 */
52/* #define undf5 */
53
54static int growstack(size_t CACHE_TYPE);
55static void MoveGlobal( CACHE_TYPE1 );
56static void MoveLocalAndTrail( CACHE_TYPE1 );
57static void SetHeapRegs(bool CACHE_TYPE);
58static void AdjustTrail(bool, bool CACHE_TYPE);
59static void AdjustLocal(bool CACHE_TYPE);
60static void AdjustGlobal(Int, bool CACHE_TYPE);
61static void AdjustGrowStack( CACHE_TYPE1 );
62static int static_growheap(size_t,bool,struct intermediates *,tr_fr_ptr *, TokEntry **, VarEntry ** CACHE_TYPE);
63static void cpcellsd(CELL *, CELL *, CELL);
64static CELL AdjustAppl(CELL CACHE_TYPE);
65static CELL AdjustPair(CELL CACHE_TYPE);
66static void AdjustStacksAndTrail(Int, bool CACHE_TYPE);
67static void AdjustRegs(int CACHE_TYPE);
68static Term AdjustGlobTerm(Term CACHE_TYPE);
69
70static void
71LeaveGrowMode(prolog_exec_mode grow_mode)
72{
73 CACHE_REGS
74 LOCAL_PrologMode &= ~grow_mode;
75}
76
77
78static void
79cpcellsd(register CELL *Dest, register CELL *Org, CELL NOf)
80{
81#if HAVE_MEMMOVE
82 memmove((void *)Dest, (void *)Org, NOf*sizeof(CELL));
83#else
84 register Int n_of = NOf;
85 for (; n_of >= 0; n_of--)
86 *Dest++ = *Org++;
87#endif
88}
89
90
91static void
92SetHeapRegs(bool copying_threads USES_REGS)
93{
94#ifdef undf7
95 Sfprintf(stderr,"HeapBase = %x\tHeapTop=%x\nGlobalBase=%x\tGlobalTop=%x\nLocalBase=%x\tLocatTop=%x\n", Yap_HeapBase, HeapTop, LOCAL_GlobalBase, H, LCL0, ASP);
96#endif
97 /* The old stack pointers */
98 LOCAL_OldLCL0 = LCL0;
99 LOCAL_OldASP = ASP;
100 LOCAL_OldGlobalBase = (CELL *)LOCAL_GlobalBase;
101 LOCAL_OldH = HR;
102 LOCAL_OldH0 = H0;
103 LOCAL_OldTrailBase = LOCAL_TrailBase;
104 LOCAL_OldTrailTop = LOCAL_TrailTop;
105 LOCAL_OldTR = TR;
106 LOCAL_OldHeapBase = Yap_HeapBase;
107 LOCAL_OldHeapTop = HeapTop;
108 /* Adjust stack addresses */
109 LOCAL_TrailBase = TrailAddrAdjust(LOCAL_TrailBase);
110 LOCAL_TrailTop = TrailAddrAdjust(LOCAL_TrailTop);
111 CurrentTrailTop = (tr_fr_ptr)(LOCAL_TrailTop-MinTrailGap);
112 LOCAL_GlobalBase = BaseAddrAdjust(LOCAL_GlobalBase);
113 LOCAL_LocalBase = LocalAddrAdjust(LOCAL_LocalBase);
114#if !USE_SYSTEM_MALLOC && !USE_DL_MALLOC
115 AuxSp = PtoBaseAdjust(AuxSp);
116 AuxTop = (ADDR)PtoBaseAdjust((CELL *)AuxTop);
117#endif
118#if !USE_SYSTEM_MALLOC
119 if (HeapLim)
120 HeapLim = BaseAddrAdjust(HeapLim);
121#endif
122 /* The registers pointing to one of the stacks */
123 if (ENV)
124 ENV = PtoLocAdjust(ENV);
125 if (ASP)
126 ASP = PtoLocAdjust(ASP);
127 if (H0)
128 H0 = PtoGloAdjust(H0);
129 if (LCL0)
130 LCL0 = PtoLocAdjust(LCL0);
131 if (HR)
132 HR = PtoGloAdjust(HR);
133 if (Yap_REGS.CUT_C_TOP)
134 Yap_REGS.CUT_C_TOP = CutCAdjust(Yap_REGS.CUT_C_TOP);
135 if (HB)
136 HB = PtoGloAdjust(HB);
137 if (LOCAL_OpenArray)
138 LOCAL_OpenArray = PtoGloAdjust(LOCAL_OpenArray);
139 if (B)
140 B = ChoicePtrAdjust(B);
141#ifdef YAPOR_THREADS
142 {
143 choiceptr cpt;
144 cpt = Get_LOCAL_top_cp();
145 if (cpt) {
146 // cpt = ChoicePtrAdjust( cpt );
147 Set_LOCAL_top_cp( cpt );
148 }
149 }
150#endif
151#ifdef TABLING
152 if (B_FZ)
153 B_FZ = ChoicePtrAdjust(B_FZ);
154 if (BB)
155 BB = ChoicePtrAdjust(BB);
156 if (H_FZ)
157 H_FZ = PtoGloAdjust(H_FZ);
158 if (TR_FZ)
159 TR_FZ = PtoTRAdjust(TR_FZ);
160#endif /* TABLING */
161 if (TR)
162 TR = PtoTRAdjust(TR);
163 if (YENV)
164 YENV = PtoLocAdjust(YENV);
165 if (IsOldGlobalPtr(S))
166 S = PtoGloAdjust(S);
167 else if (IsOldLocalPtr(S))
168 S = PtoLocAdjust(S);
169 if (!copying_threads) {
170 if (LOCAL_GlobalArena)
171 LOCAL_GlobalArena = AbsAppl(PtoGloAdjust(RepAppl(LOCAL_GlobalArena)));
172 }
173 if (LOCAL_AttsMutableList)
174 LOCAL_AttsMutableList = AbsAppl(PtoGloAdjust(RepAppl(LOCAL_AttsMutableList)));
175 if (LOCAL_WokenGoals)
176 LOCAL_WokenGoals = AbsAppl(PtoGloAdjust(RepAppl(LOCAL_WokenGoals)));
177 LOCAL_GcGeneration = AbsAppl(PtoGloAdjust(RepAppl(LOCAL_GcGeneration)));
178 LOCAL_GcPhase = AbsAppl(PtoGloAdjust(RepAppl(LOCAL_GcPhase)));
179}
180
181static void
182MoveLocalAndTrail( USES_REGS1 )
183{
184 /* cpcellsd(To,From,NOfCells) - copy the cells downwards */
185#if USE_SYSTEM_MALLOC
186 cpcellsd(ASP, (CELL *)((char *)LOCAL_OldASP+LOCAL_BaseDiff), (CELL *)LOCAL_OldTR - LOCAL_OldASP);
187#else
188 cpcellsd(ASP, LOCAL_OldASP, (CELL *)LOCAL_OldTR - LOCAL_OldASP);
189#endif
190}
191
192#ifdef YAPOR_THREADS
193
194static void
195CopyLocalAndTrail( USES_REGS1 )
196{
197 /* cpcellsd(To,From,NOfCells) - copy the cells downwards */
198#if USE_SYSTEM_MALLOC
199 cpcellsd((void *)ASP, (void *)LOCAL_OldASP, (CELL *)LOCAL_OldTR - LOCAL_OldASP);
200#endif
201}
202
203static void
204IncrementalCopyStacksFromWorker( USES_REGS1 )
205{
206 memcpy((void *) PtoGloAdjust((CELL *)LOCAL_start_global_copy),
207 (void *) (LOCAL_start_global_copy),
208 (size_t) (LOCAL_end_global_copy - LOCAL_start_global_copy));
209 memcpy((void *) PtoLocAdjust((CELL *)LOCAL_start_local_copy),
210 (void *) LOCAL_start_local_copy,
211 (size_t) (LOCAL_end_local_copy - LOCAL_start_local_copy));
212 memcpy((void *) PtoTRAdjust((tr_fr_ptr)LOCAL_start_trail_copy),
213 (void *) (LOCAL_start_trail_copy),
214 (size_t) (LOCAL_end_trail_copy - LOCAL_start_trail_copy));
215}
216
217#ifndef TABLING
218static CELL
219worker_p_binding(int worker_p, CELL *aux_ptr)
220{
221 CACHE_REGS
222 if (aux_ptr > HR) {
223 CELL reg = REMOTE_ThreadHandle(worker_p).current_yaam_regs->LCL0_[aux_ptr-LCL0];
224 reg = AdjustGlobTerm(reg PASS_REGS);
225 return reg;
226 } else {
227 CELL reg = REMOTE_ThreadHandle(worker_p).current_yaam_regs-> H0_[aux_ptr-H0];
228 reg = AdjustGlobTerm(reg PASS_REGS);
229 return reg;
230 }
231}
232#endif
233
234static void
235RestoreTrail(int worker_p USES_REGS)
236{
237 tr_fr_ptr aux_tr;
238
239 /* install fase --> TR and LOCAL_top_cp->cp_tr are equal */
240 aux_tr = ((choiceptr) LOCAL_start_local_copy)->cp_tr;
241 TR = ((choiceptr) LOCAL_end_local_copy)->cp_tr;
242 if (TR == aux_tr)
243 return;
244 if (aux_tr < TR){
245 Yap_Error(SYSTEM_ERROR_INTERNAL, TermNil, "oops");
246 }
247 Yap_NEW_MAHASH((ma_h_inner_struct *)HR PASS_REGS);
248 while (TR != aux_tr) {
249 CELL aux_cell = TrailTerm(--aux_tr);
250 if (IsVarTerm(aux_cell)) {
251 if (aux_cell < LOCAL_start_global_copy || EQUAL_OR_YOUNGER_CP((choiceptr)LOCAL_end_local_copy, (choiceptr)aux_cell)) {
252 YAPOR_ERROR_CHECKING((CELL *)aux_cell < H0, "RestoreTrail: aux_cell < H0");
253 YAPOR_ERROR_CHECKING((ADDR)aux_cell > LOCAL_LocalBase, "RestoreTrail: aux_cell > LocalBase");
254#ifdef TABLING
255 *((CELL *) aux_cell) = TrailVal(aux_tr);
256#else
257 *((CELL *) aux_cell) = worker_p_binding(worker_p, CellPtr(aux_cell));
258#endif /* TABLING */
259 }
260#ifdef TABLING
261 } else if (IsPairTerm(aux_cell)) {
262 /* avoid frozen segments */
263 aux_cell = (CELL) RepPair(aux_cell);
264 if (IN_BETWEEN(LOCAL_TrailBase, aux_cell, LOCAL_TrailTop)) {
265 aux_tr = (tr_fr_ptr) aux_cell;
266 }
267#endif /* TABLING */
268#ifdef MULTI_ASSIGNMENT_VARIABLES
269 } else if (IsApplTerm(aux_cell)) {
270 CELL *cell_ptr = RepAppl(aux_cell);
271 if (((CELL *)aux_cell < Get_LOCAL_top_cp()->cp_h ||
272 EQUAL_OR_YOUNGER_CP(Get_LOCAL_top_cp(), (choiceptr)aux_cell)) &&
273 !Yap_lookup_ma_var(cell_ptr PASS_REGS)) {
274 /* first time we found the variable, let's put the new value */
275#ifdef TABLING
276 *cell_ptr = TrailVal(aux_tr);
277#else
278 *cell_ptr = worker_p_binding(worker_p, cell_ptr);
279#endif /* TABLING */
280 }
281 /* skip the old value */
282 aux_tr--;
283#endif /* MULTI_ASSIGNMENT_VARIABLES */
284 }
285 }
286}
287
288#endif /* YAPOR_THREADS */
289
290static void
291MoveGlobal( USES_REGS1 )
292{
293 /*
294 * cpcellsd(To,From,NOfCells) - copy the cells downwards - in
295 * absmi.asm
296 */
297 cpcellsd((CELL *)LOCAL_GlobalBase, (CELL *)LOCAL_OldGlobalBase, LOCAL_OldH - (CELL *)LOCAL_OldGlobalBase);
298}
299
300static void
301MoveExpandedGlobal( USES_REGS1 )
302{
303 /*
304 * cpcellsd(To,From,NOfCells) - copy the cells downwards - in
305 * absmi.asm
306 */
307 cpcellsd((CELL *)(LOCAL_GlobalBase+(LOCAL_GDiff-LOCAL_BaseDiff)), (CELL *)LOCAL_GlobalBase, LOCAL_OldH - (CELL *)LOCAL_OldGlobalBase);
308}
309
310static void
311MoveGlobalWithHole( USES_REGS1 )
312{
313 /*
314 * cpcellsd(To,From,NOfCells) - copy the cells downwards - in
315 * absmi.asm
316 */
317#if USE_SYSTEM_MALLOC
318 cpcellsd((CELL *)((char *)LOCAL_GlobalBase+(LOCAL_GDiff0-LOCAL_BaseDiff)), (CELL *)LOCAL_GlobalBase, LOCAL_OldH - (CELL *)LOCAL_OldGlobalBase);
319#else
320 cpcellsd((CELL *)((char *)LOCAL_OldGlobalBase+LOCAL_GDiff0), (CELL *)LOCAL_OldGlobalBase, LOCAL_OldH - (CELL *)LOCAL_OldGlobalBase);
321#endif
322}
323
324static void
325MoveHalfGlobal(CELL *OldPt, size_t ncells USES_REGS)
326{
327 /*
328 * cpcellsd(To,From,NOfCells) - copy the cells downwards - in
329 * absmi.asm
330 */
331 CELL *NewPt = (CELL *)((char*)OldPt+LOCAL_GDiff);
332 CELL *IntPt = (CELL *)((char*)OldPt+LOCAL_GDiff0);
333 cpcellsd(NewPt, IntPt, ncells);
334}
335
336static inline CELL
337AdjustAppl(register CELL t0 USES_REGS)
338{
339 register CELL *t = RepAppl(t0);
340
341 if (IsOldGlobalPtr(t))
342 return (AbsAppl(PtoGloAdjust(t)));
343 else if (IsOldTrailPtr(t))
344 return (AbsAppl(CellPtoTRAdjust(t)));
345 else if (IsHeapP(t))
346 return (AbsAppl(CellPtoHeapAdjust(t)));
347#ifdef DEBUG
348 else {
349 /* strange cell */
350 /* Sfprintf(stderr,"% garbage appl %lx found in stacks by stack shifter\n", t0);*/
351 }
352#endif
353 return(t0);
354}
355
356static inline CELL
357AdjustPair(register CELL t0 USES_REGS)
358{
359 register CELL *t = RepPair(t0);
360
361 if (IsOldGlobalPtr(t))
362 return (AbsPair(PtoGloAdjust(t)));
363 if (IsOldTrailPtr(t))
364 return (AbsPair(CellPtoTRAdjust(t)));
365 else if (IsHeapP(t))
366 return (AbsPair(CellPtoHeapAdjust(t)));
367#ifdef DEBUG
368 /* Sfprintf(stderr,"% garbage pair %lx found in stacks by stack shifter\n", t0);*/
369#endif
370 return(t0);
371}
372
373static void
374AdjustTrail(bool adjusting_heap, bool thread_copying USES_REGS)
375{
376 volatile tr_fr_ptr ptt, tr_base = (tr_fr_ptr)LOCAL_TrailBase;
377
378#if defined(YAPOR_THREADS)
379 if (thread_copying == STACK_INCREMENTAL_COPYING) {
380 ptt = (tr_fr_ptr)(LOCAL_end_trail_copy);
381 tr_base = (tr_fr_ptr)(LOCAL_start_trail_copy);
382 } else {
383#endif
384 ptt = TR;
385#if defined(YAPOR_THREADS)
386 }
387#endif
388 /* moving the trail is simple, yeaahhh! */
389 while (ptt != tr_base) {
390 register CELL reg = TrailTerm(ptt-1);
391#ifdef FROZEN_STACKS
392 register CELL reg2 = TrailVal(ptt-1);
393#endif
394
395 ptt--;
396 if (IsVarTerm(reg)) {
397 if (IsOldLocalInTR(reg))
398 TrailTerm(ptt) = LocalAdjust(reg);
399 else if (IsOldGlobal(reg))
400 TrailTerm(ptt) = GlobalAdjust(reg);
401 else if (IsOldTrail(reg))
402 TrailTerm(ptt) = TrailAdjust(reg);
403 else if (thread_copying) {
404 RESET_VARIABLE(&TrailTerm(ptt));
405 }
406 } else if (IsPairTerm(reg)) {
407 TrailTerm(ptt) = AdjustPair(reg PASS_REGS);
408#ifdef MULTI_ASSIGNMENT_VARIABLES /* does not work with new structures */
409 } else if (IsApplTerm(reg)) {
410 TrailTerm(ptt) = AdjustAppl(reg PASS_REGS);
411#endif
412 }
413#ifdef FROZEN_STACKS
414 if (IsVarTerm(reg2)) {
415 if (IsOldLocal(reg2))
416 TrailVal(ptt) = LocalAdjust(reg2);
417 else if (IsOldGlobal(reg2))
418 TrailVal(ptt) = GlobalAdjust(reg2);
419 else if (IsOldTrail(reg2))
420 TrailVal(ptt) = TrailAdjust(reg2);
421 } else if (IsApplTerm(reg2)) {
422 TrailVal(ptt) = AdjustAppl(reg2 PASS_REGS);
423 } else if (IsPairTerm(reg2)) {
424 TrailVal(ptt) = AdjustPair(reg2 PASS_REGS);
425 }
426#endif
427 }
428
429}
430
431static void
432fixPointerCells(CELL *pt, CELL *pt_bot, bool thread_copying USES_REGS)
433{
434 while (pt > pt_bot) {
435 CELL reg = *--pt;
436 if (IsVarTerm(reg)) {
437 if (IsOldLocal(reg))
438 *pt = LocalAdjust(reg);
439 else if (IsOldGlobal(reg))
440 *pt = GlobalAdjust(reg);
441 else if (IsOldTrail(reg))
442 *pt = TrailAdjust(reg);
443 else if (IsOldCode(reg))
444 *pt = CodeAdjust(reg);
445 } else if (IsApplTerm(reg)) {
446 *pt = AdjustAppl(reg PASS_REGS);
447 } else if (IsPairTerm(reg)) {
448 *pt = AdjustPair(reg PASS_REGS);
449 }
450 }
451}
452
453
454#ifdef TABLING
455static void
456fix_tabling_info( USES_REGS1 )
457{
458 /* we must fix the dependency frames and the subgoal frames, as they are
459 pointing back to the global stack. */
460 struct dependency_frame *df;
461 struct subgoal_frame *sg;
462
463 df = LOCAL_top_dep_fr;
464 while (df) {
465 if (DepFr_backchain_cp(df))
466 DepFr_backchain_cp(df) = ChoicePtrAdjust(DepFr_backchain_cp(df));
467 if (DepFr_leader_cp(df))
468 DepFr_leader_cp(df) = ChoicePtrAdjust(DepFr_leader_cp(df));
469 if (DepFr_cons_cp(df))
470 DepFr_cons_cp(df) = ConsumerChoicePtrAdjust(DepFr_cons_cp(df));
471 df = DepFr_next(df);
472 }
473 sg = LOCAL_top_sg_fr;
474 while (sg) {
475 if (SgFr_gen_cp(sg))
476 SgFr_gen_cp(sg) = GeneratorChoicePtrAdjust(SgFr_gen_cp(sg));
477 sg = SgFr_next(sg);
478 }
479}
480#endif /* TABLING */
481
482
483static void
484AdjustSlots(bool thread_copying USES_REGS)
485{
486 CELL *pt = LOCAL_SlotBase+LOCAL_CurSlot;
487 CELL *pt_bot = LOCAL_SlotBase;
488 fixPointerCells( pt, pt_bot, thread_copying PASS_REGS);
489}
490
491static void
492AdjustLocal(bool thread_copying USES_REGS)
493{
494 register CELL *pt, *pt_bot;
495
496 /* Adjusting the local */
497#if defined(YAPOR_THREADS)
498 if (thread_copying == STACK_INCREMENTAL_COPYING) {
499 pt = (CELL *) (LOCAL_end_local_copy);
500 pt_bot = (CELL *) (LOCAL_start_local_copy);
501 } else {
502#endif
503 pt = LCL0;
504 pt_bot = ASP;
505#if defined(YAPOR_THREADS)
506 }
507#endif
508#ifdef TABLING
509 fix_tabling_info( PASS_REGS1 );
510#endif /* TABLING */
511 fixPointerCells( pt, pt_bot, thread_copying PASS_REGS);
512 AdjustSlots( thread_copying PASS_REGS);
513}
514
515static Term
516AdjustGlobTerm(Term reg USES_REGS)
517{
518 if (IsVarTerm(reg)) {
519 if (IsOldGlobal(reg))
520 return GlobalAdjust(reg);
521 else if (IsOldLocal(reg))
522 return LocalAdjust(reg);
523#ifdef MULTI_ASSIGNMENT_VARIABLES
524 else if (IsOldTrail(reg))
525 return TrailAdjust(reg);
526#endif
527 } else if (IsApplTerm(reg))
528 return AdjustAppl(reg PASS_REGS);
529 else if (IsPairTerm(reg))
530 return AdjustPair(reg PASS_REGS);
531 return AtomTermAdjust(reg);
532}
533
534static void
535AdjustGlobal(Int sz, bool thread_copying USES_REGS)
536{
537 ArrayEntry *al = LOCAL_DynamicArrays;
538 StaticArrayEntry *sal = LOCAL_StaticArrays;
539 GlobalEntry *gl = LOCAL_GlobalVariables;
540
541 while (al) {
542 al->ValueOfVE = AdjustGlobTerm(al->ValueOfVE PASS_REGS);
543 al = al->NextAE;
544 }
545 while (gl) {
546 if (IsVarTerm(gl->global) ||
547 !IsAtomOrIntTerm(gl->global)) {
548 gl->global = AdjustGlobTerm(gl->global PASS_REGS);
549 }
550 gl = gl->NextGE;
551 }
552 while (sal) {
553 if (sal->ArrayType == array_of_nb_terms) {
554 UInt arity = -sal->ArrayEArity, i;
555 for (i=0; i < arity; i++) {
556 /* sal->ValueOfVE.lterms[i].tlive = AdjustGlobTerm(sal->ValueOfVE.lterms[i].tlive); */
557 Term tlive = sal->ValueOfVE.lterms[i].tlive;
558 if (!IsVarTerm(tlive) || !IsUnboundVar(&sal->ValueOfVE.lterms[i].tlive)) {
559 sal->ValueOfVE.lterms[i].tlive = AdjustGlobTerm(sal->ValueOfVE.lterms[i].tlive PASS_REGS);
560 }
561 }
562 }
563 sal = sal->NextAE;
564 }
565
566 /*
567 * to clean the global now that functors are just variables pointing to
568 * the code
569 */
570#if defined(YAPOR_THREADS)
571 if (thread_copying == STACK_INCREMENTAL_COPYING) {
572 hpt = (CELL *) (LOCAL_start_global_copy);
573 hpt_max = (CELL *) (LOCAL_end_global_copy);
574 } else {
575#endif
576 CELL *hpt = LOCAL_OldH0;
577#if defined(YAPOR_THREADS)
578 }
579#endif
580 hpt = H0;
581
582 while (hpt < HR) {
583 CELL reg;
584 reg = *hpt;
585 if (IsVarTerm(reg)) {
586 if (IsOldGlobal(reg))
587 *hpt = GlobalAdjust(reg);
588 else if (IsOldLocal(reg))
589 *hpt = LocalAdjust(reg);
590 else if (IsOldTrail(reg)) {
591 *hpt = TrailAdjust(reg);
592 } else if ( IsExtensionFunctor((Functor)reg) && reg > 0 && reg % sizeof(CELL)==0 ) {
593 Functor f;
594 ssize_t bigsz = SizeOfOpaqueTerm(hpt,reg);
595 if (bigsz <= 0 || hpt + bigsz > HR ||!IsAtomTerm(hpt[bigsz-1])) {
596 *hpt++ = reg;
597 continue;
598 }
599 f = (Functor)reg;
600 CELL end = CloseExtension(hpt);
601 if (f==FunctorBlob) {
602 YAP_Opaque_CallOnGCMark f;
603 YAP_Opaque_CallOnGCRelocate f2;
604 Term t = AbsAppl(hpt);
605 CELL ar[256];
606
607 if( (f = Yap_blob_gc_mark_handler(t)) ) {
608 Int n = (f)(Yap_BlobTag(t), Yap_BlobInfo(t), ar, 256);
609 if ( (f2 = Yap_blob_gc_relocate_handler(t)) < 0 ) {
610 int out = (f2)(Yap_BlobTag(t), Yap_BlobInfo(t), ar, n);
611 if (out < 0) {
612 Yap_Error(RESOURCE_ERROR_HEAP,TermNil,"bad restore of slot internal variables");
613 return;
614 }
615 }
616 }
617
618 }
619 hpt += bigsz-1;
620 *hpt=end;
621
622 }else {
623 *hpt = CodeAdjust(reg);
624 }
625 } else if (IsApplTerm(reg)){
626 *hpt = AdjustAppl(reg PASS_REGS);
627 }else if (IsPairTerm(reg)){
628 *hpt = AdjustPair(reg PASS_REGS);
629 }else if (IsAtomTerm(reg)) {
630 *hpt = AtomTermAdjust(reg);
631 }
632 hpt++;
633 }
634}
635
636/*
637 * When growing the stack we need to adjust: the local stack cells pointing
638 * to the local; the local stack cells and the X terms pointing to the global
639 * (just once) the trail cells pointing both to the global and to the local
640 */
641static void
642AdjustStacksAndTrail(Int sz, bool copying_threads USES_REGS)
643{
644 AdjustTrail(true, copying_threads PASS_REGS);
645 AdjustLocal(copying_threads PASS_REGS);
646 AdjustGlobal(sz, copying_threads PASS_REGS);
647}
648
649void
650Yap_AdjustStacksAndTrail(void)
651{
652 CACHE_REGS
653 AdjustStacksAndTrail(0, FALSE PASS_REGS);
654}
655
656/*
657 * When growing the stack we need to adjust: the local cells pointing to the
658 * local; the trail cells pointing to the local
659 */
660static void
661AdjustGrowStack( USES_REGS1 )
662{
663 AdjustTrail(FALSE, STACK_SHIFTING PASS_REGS);
664 AdjustLocal(STACK_SHIFTING PASS_REGS);
665}
666
667static void
668AdjustRegs(int n USES_REGS)
669{
670 int i;
671 CELL reg;
672
673 for (i = 1; i < n; ++i) {
674 reg = (CELL) XREGS[i];
675 if (IsVarTerm(reg)) {
676 if (IsOldLocal(reg))
677 reg = LocalAdjust(reg);
678 else if (IsOldGlobal(reg))
679 reg = GlobalAdjust(reg);
680 else if (IsOldTrail(reg))
681 reg = TrailAdjust(reg);
682 else if (IsOldCode(reg))
683 reg = CodeAdjust(reg);
684 } else if (IsApplTerm(reg))
685 reg = AdjustAppl(reg PASS_REGS);
686 else if (IsPairTerm(reg))
687 reg = AdjustPair(reg PASS_REGS);
688 XREGS[i] = (Term) reg;
689 }
690}
691
692static void
693AdjustVarTable(VarEntry *ves USES_REGS)
694{
695 ves->VarAdr = TermNil;
696 if (ves->VarRight != NULL) {
697 if (IsOldVarTableTrailPtr(ves->VarRight)) {
698 ves->VarRight = (VarEntry *)TrailAddrAdjust((ADDR)(ves->VarRight));
699 }
700 AdjustVarTable(ves->VarRight PASS_REGS);
701 }
702 if (ves->VarLeft != NULL) {
703 if (IsOldVarTableTrailPtr(ves->VarLeft)) {
704 ves->VarLeft = (VarEntry *)TrailAddrAdjust((ADDR)(ves->VarLeft));
705 }
706 AdjustVarTable(ves->VarLeft PASS_REGS);
707 }
708}
709
710/*
711 If we have to shift while we are scanning we need to adjust all
712 pointers created by the scanner (Tokens and Variables)
713*/
714static void
715AdjustScannerStacks(TokEntry **tksp, VarEntry **vep USES_REGS)
716{
717 TokEntry *tks = *tksp;
718 VarEntry *ves = *vep;
719
720 if (tks != NULL) {
721 if (IsOldTokenTrailPtr(tks)) {
722 tks = *tksp = TokEntryAdjust(tks);
723 }
724 }
725 while (tks != NULL) {
726 TokEntry *tktmp;
727
728 switch (tks->Tok) {
729 case Number_tok:
730 if (IsApplTerm(tks->TokInfo)) {
731 tks->TokInfo = AdjustAppl(tks->TokInfo PASS_REGS);
732 }
733 break;
734 case Var_tok:
735 case String_tok:
736 case BQString_tok:
737 if (IsOldTrail(tks->TokInfo))
738 tks->TokInfo = TrailAdjust(tks->TokInfo);
739 break;
740 case Name_tok:
741 tks->TokInfo = (Term)AtomAdjust((Atom)(tks->TokInfo));
742 break;
743 default:
744 break;
745 }
746 tktmp = tks->TokNext;
747 if (tktmp != NULL) {
748 if (IsOldTokenTrailPtr(tktmp)) {
749 tktmp = TokEntryAdjust(tktmp);
750 tks->TokNext = tktmp;
751 }
752 }
753 tks = tktmp;
754 }
755 if (ves != NULL) {
756 if (IsOldVarTableTrailPtr(ves))
757 ves = *vep = (VarEntry *)TrailAddrAdjust((ADDR)ves);
758 AdjustVarTable(ves PASS_REGS);
759 }
760 ves = LOCAL_AnonVarTable;
761 if (ves != NULL) {
762 if (IsOldVarTableTrailPtr(ves))
763 ves = LOCAL_AnonVarTable = VarEntryAdjust(ves);
764 }
765 while (ves != NULL) {
766 VarEntry *vetmp = ves->VarLeft;
767 if (vetmp != NULL) {
768 if (IsOldVarTableTrailPtr(vetmp)) {
769 vetmp = VarEntryAdjust(vetmp);
770 }
771 ves->VarLeft = vetmp;
772 }
773 ves->VarAdr = TermNil;
774 ves = vetmp;
775 }
776}
777
778void
779Yap_AdjustRegs(int n)
780{
781 CACHE_REGS
782 AdjustRegs(n PASS_REGS);
783}
784
785/* Used by do_goal() when we're short of heap space */
786static int
787static_growheap(size_t esize, bool fix_code, struct intermediates *cip, tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep USES_REGS)
788{
789 Int size = esize;
790 UInt start_growth_time, growth_time;
791 int gc_verbose;
792 size_t minimal_request = 0L;
793
794 /* adjust to a multiple of 256) */
795 if (size < YAP_ALLOC_SIZE)
796 size = YAP_ALLOC_SIZE;
797 size = AdjustPageSize(size);
798 LOCAL_ErrorMessage = NULL;
799 if (!Yap_ExtendWorkSpace(size)) {
800 Int min_size = AdjustPageSize(((CELL)LOCAL_TrailTop-(CELL)LOCAL_GlobalBase)+MinHeapGap);
801
802 LOCAL_ErrorMessage = NULL;
803 if (size < min_size) size = min_size;
804 minimal_request = size;
805 size = Yap_ExtendWorkSpaceThroughHole(size);
806 if (size < 0) {
807 LOCAL_ErrorMessage = "Database crashed against Stacks";
808 return FALSE;
809 }
810 }
811 start_growth_time = Yap_cputime();
812 gc_verbose = Yap_is_gc_verbose();
813 LOCAL_heap_overflows++;
814 if (gc_verbose) {
815#if defined(YAPOR_THREADS)
816 fprintf( stderr, "%% Worker Id %d:\n", worker_id);
817#endif
818 fprintf( stderr, "%% Database Overflow %d\n", LOCAL_heap_overflows);
819 fprintf( stderr, "%% growing the heap " Int_FORMAT " bytes\n", size);
820 }
821 /* CreepFlag is set to force heap expansion */
822 if ( Yap_only_has_signal( YAP_CDOVF_SIGNAL) ) {
823 CalculateStackGap( PASS_REGS1 );
824 }
825 LOCAL_TrDiff = LOCAL_LDiff = LOCAL_GDiff = LOCAL_GDiff0 = LOCAL_DelayDiff = LOCAL_BaseDiff = size;
826 LOCAL_XDiff = LOCAL_HDiff = 0;
827 LOCAL_GSplit = NULL;
828 YAPEnterCriticalSection();
829 SetHeapRegs(FALSE PASS_REGS);
830 MoveLocalAndTrail( PASS_REGS1 );
831 if (fix_code) {
832 CELL *SaveOldH = LOCAL_OldH;
833 LOCAL_OldH = (CELL *)cip->freep;
834 MoveGlobal( PASS_REGS1 );
835 LOCAL_OldH = SaveOldH;
836 } else {
837 MoveGlobal( PASS_REGS1 );
838 }
839 if (old_trp) {
840 tr_fr_ptr nTR;
841
842 AdjustScannerStacks(tksp, vep PASS_REGS);
843 nTR = TR;
844 *old_trp = PtoTRAdjust(*old_trp);
845 TR = *old_trp;
846 AdjustStacksAndTrail(0, FALSE PASS_REGS);
847 TR = nTR;
848 } else {
849 AdjustStacksAndTrail(0, FALSE PASS_REGS);
850 }
851 AdjustRegs(MaxTemps PASS_REGS);
852 if (minimal_request)
853 Yap_AllocHole(minimal_request, size);
854 YAPLeaveCriticalSection();
855 growth_time = Yap_cputime()-start_growth_time;
856 LOCAL_total_heap_overflow_time += growth_time;
857 if (gc_verbose) {
858 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
859 fprintf(stderr, "%% Total of %g sec expanding Database\n", (double)LOCAL_total_heap_overflow_time/1000);
860 }
861 return(TRUE);
862}
863
864/* Used when we're short of heap, usually because of an overflow in
865 the attributed stack, but also because we allocated a zone */
866static int
867static_growglobal(size_t request, CELL **ptr, CELL *hsplit USES_REGS)
868{
869 size_t cells_to_move = HR-hsplit;
870 UInt start_growth_time, growth_time;
871 int gc_verbose;
872 char *omax = (char *)H0;
873 ADDR old_GlobalBase = LOCAL_GlobalBase;
874 UInt minimal_request = 0L;
875 Int size = request;
876 char vb_msg1 = '\0', *vb_msg2;
877 bool do_grow = true;
878 /*
879 request is the amount of memory we requested, in bytes;
880 base_move is the shift in global stacks we had to do
881 size is how much space we allocate: it's negative if we just expand
882 the delay stack.
883 do_grow is whether we expand stacks
884 */
885
886 if (hsplit) {
887 /* just a little bit of sanity checking */
888 if (hsplit < H0 && hsplit > (CELL *)LOCAL_GlobalBase) {
889 /* expanding attributed variables */
890 if (omax - size > LOCAL_GlobalBase+4096*sizeof(CELL)) {
891 /* we can just ask for more room */
892 size = 0;
893 do_grow = FALSE;
894 }
895 } else if (hsplit < (CELL*)omax ||
896 hsplit > HR) {
897 //Yap_ThrowError(INTERNAL_SYSTEM_ERROR, MkAddressTerm(hsplit), NULL );
898 }
899 else if (hsplit == (CELL *)omax)
900 hsplit = NULL;
901 if (size < 0 ||
902 (Unsigned(HR)+size < Unsigned(ASP)-StackGap( PASS_REGS1 ) &&
903 hsplit > H0)) {
904 /* don't need to expand stacks */
905 do_grow = FALSE;
906 }
907 } else {
908 if (Unsigned(HR)+size < Unsigned(ASP)-CreepFlag) {
909 /* we can just ask for more room */
910 return size; }
911 }
912 if (do_grow) {
913 if (size < YAP_ALLOC_SIZE)
914 size = YAP_ALLOC_SIZE;
915 size = AdjustPageSize(size);
916 }
917 /* adjust to a multiple of 256) */
918 LOCAL_ErrorMessage = NULL;
919 LOCAL_PrologMode |= GrowStackMode;
920 start_growth_time = Yap_cputime();
921 if (do_grow) {
922 if (!GLOBAL_AllowGlobalExpansion) {
923 LOCAL_ErrorMessage = "Global Stack crashed against Local Stack";
924 LeaveGrowMode(GrowStackMode);
925 return 0;
926 }
927 if (!GLOBAL_AllowGlobalExpansion || !Yap_ExtendWorkSpace(size)) {
928 /* always fails when using malloc */
929 LOCAL_ErrorMessage = NULL;
930 size += AdjustPageSize(((CELL)LOCAL_TrailTop-(CELL)LOCAL_GlobalBase)+MinHeapGap);
931 minimal_request = size;
932 size = Yap_ExtendWorkSpaceThroughHole(size);
933 if (size < 0) {
934 LOCAL_ErrorMessage = "Global Stack crashed against Local Stack";
935 LeaveGrowMode(GrowStackMode);
936 return 0;
937 }
938 }
939 }
940 gc_verbose = Yap_is_gc_verbose();
941 LOCAL_delay_overflows++;
942 if (gc_verbose) {
943 if (hsplit) {
944 if (hsplit > H0) {
945 vb_msg1 = 'H';
946 vb_msg2 = "Global Variable Space";
947 } else {
948 vb_msg1 = 'D';
949 vb_msg2 = "Global Variable Delay Space";
950 }
951 } else {
952 vb_msg1 = 'D';
953 vb_msg2 = "Delay";
954 }
955#if defined(YAPOR_THREADS)
956 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
957#endif
958 fprintf(stderr, "%% %cO %s Overflow %d\n", vb_msg1, vb_msg2, LOCAL_delay_overflows);
959 fprintf(stderr, "%% %cO growing the stacks " UInt_FORMAT " bytes\n", vb_msg1, size);
960 }
961 YAPEnterCriticalSection();
962 /* we always shift the local and the stack by the same amount */
963 if (do_grow) {
964 /* we got over a hole */
965 if (minimal_request) {
966 /* we went over a hole */
967 LOCAL_BaseDiff = size+((CELL)LOCAL_TrailTop-(CELL)LOCAL_GlobalBase)-minimal_request;
968 LOCAL_LDiff = LOCAL_TrDiff = size;
969 } else {
970 /* we may still have an overflow */
971 LOCAL_BaseDiff = LOCAL_GlobalBase - old_GlobalBase;
972 /* if we grow, we need to move the stacks */
973 LOCAL_LDiff = LOCAL_TrDiff = LOCAL_BaseDiff+size;
974 }
975 } else {
976 /* stay still */
977 LOCAL_LDiff = LOCAL_TrDiff = 0;
978 LOCAL_BaseDiff = 0;
979 }
980 /* now, remember we have delay -- global with a hole in delay or a
981 hole in global */
982 if (!hsplit) {
983 if (!do_grow) {
984 LOCAL_DelayDiff = LOCAL_GDiff = LOCAL_GDiff0 = size;
985 request = 0L;
986 } else {
987 /* expand delay stack */
988 LOCAL_DelayDiff = LOCAL_GDiff = LOCAL_GDiff0 = LOCAL_LDiff;
989 }
990 } else {
991 /* we want to expand a hole for the delay stack */
992 LOCAL_GDiff0 = LOCAL_DelayDiff = LOCAL_BaseDiff;
993 LOCAL_GDiff = LOCAL_BaseDiff+request;
994 }
995 LOCAL_XDiff = LOCAL_HDiff = 0;
996 LOCAL_GSplit = hsplit;
997 LOCAL_GlobalBase = old_GlobalBase;
998 SetHeapRegs(FALSE PASS_REGS);
999 if (do_grow) {
1000 MoveLocalAndTrail( PASS_REGS1 );
1001 if (hsplit) {
1002 MoveGlobalWithHole( PASS_REGS1 );
1003 } else {
1004 MoveExpandedGlobal( PASS_REGS1 );
1005 }
1006 } else if (!hsplit) {
1007 MoveExpandedGlobal( PASS_REGS1 );
1008 }
1009 /* don't run through garbage */
1010 if (hsplit && (LOCAL_OldH != hsplit)) {
1011 AdjustStacksAndTrail(request, FALSE PASS_REGS);
1012 } else {
1013 AdjustStacksAndTrail(0, FALSE PASS_REGS);
1014 }
1015 AdjustRegs(MaxTemps PASS_REGS);
1016 if (ptr) {
1017
1018 if (IsOldGlobalPtr(*ptr))
1019 *ptr = PtoGloAdjust(*ptr);
1020 else
1021 *ptr = PtoLocAdjust(*ptr);
1022 }
1023 if (hsplit) {
1024 MoveHalfGlobal(hsplit, cells_to_move PASS_REGS);
1025 LOCAL_GSplit = hsplit+LOCAL_GDiff0/sizeof(CELL);
1026 }
1027 YAPLeaveCriticalSection();
1028 if (minimal_request) {
1029 Yap_AllocHole(minimal_request, size);
1030 }
1031 growth_time = Yap_cputime()-start_growth_time;
1032 LOCAL_total_delay_overflow_time += growth_time;
1033 if (gc_verbose) {
1034 fprintf(stderr, "%% %cO took %g sec\n", vb_msg1, (double)growth_time/1000);
1035 fprintf(stderr, "%% %cO Total of %g sec expanding stacks \n", vb_msg1, (double)LOCAL_total_delay_overflow_time/1000);
1036 }
1037 LeaveGrowMode(GrowStackMode);
1038 if (hsplit) {
1039 return request;
1040 } else
1041 return LOCAL_GDiff-LOCAL_BaseDiff;
1042}
1043
1044static void
1045fix_compiler_instructions(PInstr *pcpc USES_REGS)
1046{
1047 while (pcpc != NULL) {
1048 PInstr *ncpc = pcpc->nextInst;
1049
1050 switch(pcpc->op) {
1051 /* check c_var for functions that point at variables */
1052 case get_var_op:
1053 case get_val_op:
1054 case unify_var_op:
1055 case unify_last_var_op:
1056 case unify_val_op:
1057 case unify_local_op:
1058 case unify_last_val_op:
1059 case unify_last_local_op:
1060 case put_var_op:
1061 case put_val_op:
1062 case put_unsafe_op:
1063 case write_unsafe_op:
1064 case write_var_op:
1065 case write_val_op:
1066 case write_local_op:
1067 case f_var_op:
1068 case f_val_op:
1069 case save_pair_op:
1070 case save_appl_op:
1071 case save_b_op:
1072 case commit_b_op:
1073 case fetch_args_vv_op:
1074 case fetch_args_cv_op:
1075 case fetch_args_vc_op:
1076 pcpc->rnd1 = GlobalAdjust(pcpc->rnd1);
1077 break;
1078 case bccall_op:
1079 pcpc->rnd1 = GlobalAdjust(pcpc->rnd1);
1080 pcpc->rnd3 = GlobalAdjust(pcpc->rnd3);
1081 break;
1082 case get_float_op:
1083 case put_float_op:
1084 case get_longint_op:
1085 case get_string_op:
1086 case put_longint_op:
1087 case put_string_op:
1088 case unify_float_op:
1089 case unify_last_float_op:
1090 case write_float_op:
1091 /* floats might be in the global */
1092 pcpc->rnd1 = AdjustAppl(pcpc->rnd1 PASS_REGS);
1093 break;
1094 /* hopefully nothing to do */
1095 case nop_op:
1096 case ensure_space_op:
1097 case get_atom_op:
1098 case put_atom_op:
1099 case get_num_op:
1100 case put_num_op:
1101 case align_float_op:
1102 case get_bigint_op:
1103 case put_bigint_op:
1104 case get_dbterm_op:
1105 case put_dbterm_op:
1106 case get_list_op:
1107 case put_list_op:
1108 case get_struct_op:
1109 case put_struct_op:
1110 case unify_atom_op:
1111 case unify_last_atom_op:
1112 case write_atom_op:
1113 case unify_num_op:
1114 case unify_last_num_op:
1115 case write_num_op:
1116 case unify_longint_op:
1117 case unify_string_op:
1118 case unify_last_longint_op:
1119 case unify_last_string_op:
1120 case write_longint_op:
1121 case write_string_op:
1122 case unify_bigint_op:
1123 case unify_last_bigint_op:
1124 case unify_dbterm_op:
1125 case unify_last_dbterm_op:
1126 case write_bigint_op:
1127 case write_dbterm_op:
1128 case unify_list_op:
1129 case write_list_op:
1130 case unify_struct_op:
1131 case write_struct_op:
1132 case fail_op:
1133 case cut_op:
1134 case cutexit_op:
1135 case allocate_op:
1136 case deallocate_op:
1137 case tryme_op:
1138 case jump_op:
1139 case jumpi_op:
1140 case procceed_op:
1141 case call_op:
1142 case execute_op:
1143 case safe_call_op:
1144 case label_op:
1145 case name_op:
1146 case pop_op:
1147 case retryme_op:
1148 case trustme_op:
1149 case either_op:
1150 case orelse_op:
1151 case orlast_op:
1152 case push_or_op:
1153 case pushpop_or_op:
1154 case pop_or_op:
1155 case patch_b_op:
1156 case try_op:
1157 case retry_op:
1158 case trust_op:
1159 case try_in_op:
1160 case jump_v_op:
1161 case jump_nv_op:
1162 case cache_arg_op:
1163 case cache_sub_arg_op:
1164 case user_switch_op:
1165 case switch_on_type_op:
1166 case switch_c_op:
1167 case if_c_op:
1168 case switch_f_op:
1169 case if_f_op:
1170 case if_not_op:
1171 case index_dbref_op:
1172 case index_blob_op:
1173 case index_long_op:
1174 case index_string_op:
1175 case if_nonvar_op:
1176 case unify_last_list_op:
1177 case write_last_list_op:
1178 case unify_last_struct_op:
1179 case write_last_struct_op:
1180 case mark_initialized_pvars_op:
1181 case mark_live_regs_op:
1182 case enter_profiling_op:
1183 case retry_profiled_op:
1184 case count_call_op:
1185 case count_retry_op:
1186 case restore_tmps_op:
1187 case restore_tmps_and_skip_op:
1188 case enter_lu_op:
1189 case empty_call_op:
1190 case blob_op:
1191 case string_op:
1192 case fetch_args_vi_op:
1193 case fetch_args_iv_op:
1194 case label_ctl_op:
1195 case f_0_op:
1196 case native_op:
1197#ifdef TABLING
1198 case table_new_answer_op:
1199 case table_try_single_op:
1200#endif /* TABLING */
1201#ifdef YAPOR
1202 case sync_op:
1203#endif
1204#ifdef BEAM
1205 case run_op:
1206 case body_op:
1207 case endgoal_op:
1208 case try_me_op:
1209 case retry_me_op:
1210 case trust_me_op:
1211 case only_1_clause_op:
1212 case create_first_box_op:
1213 case create_box_op:
1214 case create_last_box_op:
1215 case remove_box_op:
1216 case remove_last_box_op:
1217 case prepare_tries:
1218 case std_base_op:
1219 case direct_safe_call_op:
1220 case commit_op:
1221 case skip_while_var_op:
1222 case wait_while_var_op:
1223 case force_wait_op:
1224 case write_op:
1225 case is_op:
1226 case equal_op:
1227 case exit_op:
1228#endif
1229 break;
1230 }
1231 if (ncpc != NULL) {
1232 ncpc = (PInstr *)GlobalAddrAdjust((ADDR)(pcpc->nextInst));
1233 pcpc->nextInst = ncpc;
1234 }
1235 pcpc = ncpc;
1236 }
1237}
1238static int
1239do_growheap(int fix_code, UInt in_size, struct intermediates *cip, tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep USES_REGS)
1240{
1241 unsigned long size = sizeof(CELL) * K16;
1242 int shift_factor = (LOCAL_heap_overflows > 8 ? 8 : LOCAL_heap_overflows);
1243 unsigned long sz = size << shift_factor;
1244
1245 if (sz < in_size) {
1246 sz = in_size;
1247 }
1248#ifdef YAPOR
1249 Yap_Error(RESOURCE_ERROR_HEAP,TermNil,"cannot grow Heap: more than a worker/thread running");
1250 return FALSE;
1251#endif
1252 if (GLOBAL_SizeOfOverflow > sz) {
1253 if (size < YAP_ALLOC_SIZE)
1254 size = YAP_ALLOC_SIZE;
1255 sz = AdjustPageSize(GLOBAL_SizeOfOverflow);
1256 }
1257 while(sz >= sizeof(CELL) * K16 && !static_growheap(sz, fix_code, cip, old_trp, tksp, vep PASS_REGS)) {
1258 size = size/2;
1259 sz = size << shift_factor;
1260 if (sz < in_size) {
1261 return FALSE;
1262 }
1263 }
1264 /* we must fix an instruction chain */
1265 if (fix_code) {
1266 PInstr *pcpc = cip->CodeStart;
1267 if (pcpc != NULL) {
1268 cip->CodeStart = pcpc = (PInstr *)GlobalAddrAdjust((ADDR)pcpc);
1269 }
1270 fix_compiler_instructions(pcpc PASS_REGS);
1271 pcpc = cip->BlobsStart;
1272 if (pcpc != NULL) {
1273 cip->BlobsStart = pcpc = (PInstr *)GlobalAddrAdjust((ADDR)pcpc);
1274 }
1275 fix_compiler_instructions(pcpc PASS_REGS);
1276 cip->freep = (char *)GlobalAddrAdjust((ADDR)cip->freep);
1277 cip->label_offset = (Int *)GlobalAddrAdjust((ADDR)cip->label_offset);
1278 }
1279 if (sz >= sizeof(CELL) * K16) {
1280 Yap_get_signal( YAP_CDOVF_SIGNAL );
1281 return TRUE;
1282 }
1283 /* failed */
1284 return FALSE;
1285}
1286
1287static void
1288init_new_table(AtomHashEntry *ntb, UInt nsize)
1289{
1290 UInt i;
1291
1292 for (i = 0; i < nsize; i++) {
1293 INIT_RWLOCK(ntb[i].AERWLock);
1294 ntb[i].Entry = NIL;
1295 }
1296}
1297
1298static void
1299cp_atom_table(AtomHashEntry *ntb, UInt nsize)
1300{
1301 UInt i;
1302
1303 for (i = 0; i < AtomHashTableSize; i++) {
1304 Atom catom;
1305
1306 READ_LOCK(HashChain[i].AERWLock);
1307 catom = HashChain[i].Entry;
1308 while (catom != NIL) {
1309 AtomEntry *ap = RepAtom(catom);
1310 Atom natom;
1311 CELL hash;
1312
1313 hash = HashFunction(ap->UStrOfAE) % nsize;
1314 natom = ap->NextOfAE;
1315 ap->NextOfAE = ntb[hash].Entry;
1316 ntb[hash].Entry = catom;
1317 catom = natom;
1318 }
1319 READ_UNLOCK(HashChain[i].AERWLock);
1320 }
1321}
1322
1323static int
1324growatomtable( USES_REGS1 )
1325{
1326 AtomHashEntry *ntb;
1327 UInt nsize = 3*AtomHashTableSize-1;
1328 UInt start_growth_time = Yap_cputime(), growth_time;
1329 int gc_verbose = Yap_is_gc_verbose();
1330 if (nsize -AtomHashTableSize > 4*1024*1024)
1331 nsize = AtomHashTableSize+4*1024*1024+7919;
1332
1333 Yap_get_signal( YAP_CDOVF_SIGNAL );
1334 while ((ntb = (AtomHashEntry *)Yap_AllocCodeSpace(nsize*sizeof(AtomHashEntry))) == NULL) {
1335 /* leave for next time */
1336#if !USE_SYSTEM_MALLOC
1337 if (!do_growheap(FALSE, nsize*sizeof(AtomHashEntry), NULL, NULL, NULL, NULL))
1338#endif
1339 return FALSE;
1340 }
1341 LOCAL_atom_table_overflows ++;
1342 if (gc_verbose) {
1343#if defined(YAPOR_THREADS)
1344 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1345#endif
1346 fprintf(stderr, "%% Atom Table Overflow %d\n", LOCAL_atom_table_overflows );
1347 fprintf(stderr, "%% growing the atom table to %ld entries\n", (long int)(nsize));
1348 }
1349 YAPEnterCriticalSection();
1350 init_new_table(ntb, nsize);
1351 cp_atom_table(ntb, nsize);
1352 Yap_FreeCodeSpace((char *)HashChain);
1353 HashChain = ntb;
1354 AtomHashTableSize = nsize;
1355 YAPLeaveCriticalSection();
1356 growth_time = Yap_cputime()-start_growth_time;
1357 LOCAL_total_atom_table_overflow_time += growth_time;
1358 if (gc_verbose) {
1359 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
1360 fprintf(stderr, "%% Total of %g sec expanding atom table \n", (double)LOCAL_total_atom_table_overflow_time/1000);
1361 }
1362#if USE_SYSTEM_MALLOC
1363 return TRUE;
1364#else
1365 if (HeapTop + sizeof(YAP_SEG_SIZE) > HeapLim - MinHeapGap) {
1366 /* make sure there is no heap overflow */
1367 int res;
1368
1369 res = do_growheap(FALSE, 0, NULL, NULL, NULL, NULL PASS_REGS);
1370 return res;
1371 } else {
1372 return TRUE;
1373 }
1374#endif
1375}
1376
1377
1378int
1379Yap_locked_growheap(bool fix_code, size_t in_size, void *cip)
1380{
1381 CACHE_REGS
1382 int res;
1383 bool blob_overflow = (NOfBlobs > NOfBlobsMax);
1384
1385#ifdef THREADS
1386 LOCK(GLOBAL_BGL);
1387#endif
1388 // make sure that we cannot have more than a thread life
1389 if (Yap_NOfThreads() > 1) {
1390#ifdef THREADS
1391 UNLOCK(GLOBAL_BGL);
1392#endif
1393 res = FALSE;
1394 if (NOfAtoms > 2*AtomHashTableSize || blob_overflow) {
1395 Yap_get_signal( YAP_CDOVF_SIGNAL );
1396 return TRUE;
1397 }
1398 }
1399 // don't release the MTHREAD lock in case we're running from the C-interface.
1400 if (NOfAtoms > 2*AtomHashTableSize || blob_overflow) {
1401 UInt n = NOfAtoms;
1402 if (GLOBAL_AGcThreshold)
1403 Yap_atom_gc( PASS_REGS1 );
1404 /* check if we have a significant improvement from agc */
1405 if (!blob_overflow &&
1406 (n > NOfAtoms+ NOfAtoms/10 ||
1407 /* +1 = make sure we didn't lose the current atom */
1408 NOfAtoms+1 > 2*AtomHashTableSize)) {
1409 res = growatomtable( PASS_REGS1 );
1410 } else {
1411#ifdef THREADS
1412 UNLOCK(GLOBAL_BGL);
1413#endif
1414 return TRUE;
1415 }
1416 LeaveGrowMode(GrowHeapMode);
1417 if (res) {
1418#ifdef THREADS
1419 UNLOCK(GLOBAL_BGL);
1420#endif
1421 return res;
1422 }
1423 }
1424#if USE_SYSTEM_MALLOC
1425 P = Yap_Error(RESOURCE_ERROR_HEAP,TermNil,"malloc failed");
1426 res = FALSE;
1427#else
1428 res=do_growheap(fix_code, in_size, (struct intermediates *)cip, NULL, NULL, NULL PASS_REGS);
1429#endif
1430 LeaveGrowMode(GrowHeapMode);
1431#ifdef THREADS
1432 UNLOCK(GLOBAL_BGL);
1433#endif
1434 return res;
1435}
1436
1437int
1438Yap_growheap(bool fix_code, size_t in_size, void *cip)
1439{
1440 int rc;
1441 rc = Yap_locked_growheap(fix_code, in_size, cip);
1442 return rc;
1443}
1444
1445int
1446Yap_growheap_in_parser(tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep)
1447{
1448 CACHE_REGS
1449 int res;
1450
1451 res=do_growheap(FALSE, 0L, NULL, old_trp, tksp, vep PASS_REGS);
1452 LeaveGrowMode(GrowHeapMode);
1453 return res;
1454}
1455
1456int
1457Yap_locked_growglobal(CELL **ptr)
1458{
1459 CACHE_REGS
1460 unsigned long sz = sizeof(CELL) * K16;
1461
1462#if defined(YAPOR_THREADS)
1463 if (GLOBAL_number_workers != 1) {
1464 Yap_Error(RESOURCE_ERROR_STACK,TermNil,"cannot grow Global: more than a worker/thread running");
1465 return(FALSE);
1466 }
1467#elif defined(THREADS)
1468 if (GLOBAL_NOfThreads != 1) {
1469 Yap_Error(RESOURCE_ERROR_STACK,TermNil,"cannot grow Global: more than a worker/thread running");
1470 return(FALSE);
1471 }
1472#endif
1473 if ( static_growglobal(sz, ptr, NULL PASS_REGS) == 0)
1474 return FALSE;
1475 return TRUE;
1476}
1477
1478int
1479Yap_growglobal(CELL **ptr)
1480{
1481 int rc;
1482 rc = Yap_locked_growglobal(ptr);
1483 return rc;
1484}
1485
1486UInt
1487Yap_InsertInGlobal(CELL *where, size_t howmuch, CELL **at)
1488{
1489 CACHE_REGS
1490 bool gc_verbose = Yap_is_gc_verbose();
1491 howmuch = static_growglobal(howmuch, NULL, where PASS_REGS);
1492 if (gc_verbose) {
1493#if defined(YAPOR) || defined(THREADS)
1494 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1495#endif
1496 fprintf(stderr, "%% Insert %ldB at %p in global\n", howmuch,where);
1497 fprintf(stderr, "%% Global: %8ld cells (%p-%p)\n", (unsigned long int)(HR-(CELL *)LOCAL_GlobalBase),LOCAL_GlobalBase,HR);
1498 fprintf(stderr, "%% Local:%8ld cells (%p-%p)\n", (unsigned long int)(LCL0-ASP),LCL0,ASP);
1499 fprintf(stderr, "%% Trail:%8ld cells (%p-%p)\n",
1500 (unsigned long int)(TR-(tr_fr_ptr)LOCAL_TrailBase),LOCAL_TrailBase,TR);
1501 fprintf(stderr, "%% Growing the stacks " UInt_FORMAT " bytes\n",howmuch);
1502 }
1503 if (at) {
1504 if (LOCAL_GSplit)
1505 *at = LOCAL_GSplit;
1506 else
1507 *at = HR+howmuch/sizeof(CELL);
1508 }
1509 return howmuch;
1510}
1511
1512
1513int
1514Yap_locked_growstack(size_t size)
1515{
1516 CACHE_REGS
1517 int res;
1518
1519 LOCAL_PrologMode |= GrowStackMode;
1520 res=growstack(size PASS_REGS);
1521 LeaveGrowMode(GrowStackMode);
1522 return res;
1523}
1524
1525int
1526Yap_growstack(size_t size)
1527{
1528 CACHE_REGS
1529 int res;
1530
1531 LOCAL_PrologMode |= GrowStackMode;
1532 res=growstack(size PASS_REGS);
1533 LeaveGrowMode(GrowStackMode);
1534 return res;
1535}
1536
1537static int
1538execute_growstack(size_t esize0, bool from_trail, bool in_parser, tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep USES_REGS)
1539{
1540 UInt minimal_request = 0L;
1541 Int size0 = esize0;
1542 Int size = size0;
1543 ADDR old_LOCAL_GlobalBase = LOCAL_GlobalBase;
1544
1545 if (!GLOBAL_AllowGlobalExpansion) {
1546 LOCAL_ErrorMessage = "Database crashed against stacks";
1547 return FALSE;
1548 }
1549 if (!Yap_ExtendWorkSpace(size)) {
1550 /* make sure stacks and trail are contiguous */
1551
1552 LOCAL_ErrorMessage = NULL;
1553 minimal_request = AdjustPageSize(((CELL)LOCAL_TrailTop-(CELL)LOCAL_GlobalBase)+4*MinHeapGap+size0);
1554
1555 size = Yap_ExtendWorkSpaceThroughHole(minimal_request);
1556 if (size < 0) {
1557 LOCAL_ErrorMessage = "Database crashed against stacks";
1558 return FALSE;
1559 }
1560 YAPEnterCriticalSection();
1561 LOCAL_GDiff = LOCAL_DelayDiff = LOCAL_BaseDiff = size-size0;
1562 } else {
1563 YAPEnterCriticalSection();
1564 LOCAL_GDiff = LOCAL_BaseDiff = LOCAL_DelayDiff = LOCAL_GlobalBase-old_LOCAL_GlobalBase;
1565 LOCAL_GlobalBase=old_LOCAL_GlobalBase;
1566 }
1567 LOCAL_XDiff = LOCAL_HDiff = 0;
1568 LOCAL_GDiff0=LOCAL_GDiff;
1569#if USE_SYSTEM_MALLOC
1570 if (from_trail) {
1571 LOCAL_TrDiff = LOCAL_LDiff = LOCAL_GDiff;
1572 } else {
1573 LOCAL_TrDiff = LOCAL_LDiff = size+LOCAL_GDiff;
1574 }
1575#else
1576 if (from_trail) {
1577 LOCAL_TrDiff = LOCAL_LDiff = size-size0;
1578 } else {
1579 LOCAL_TrDiff = LOCAL_LDiff = size;
1580 }
1581#endif
1582 SetHeapRegs(FALSE PASS_REGS);
1583 if (from_trail) {
1584 LOCAL_TrailTop += size0;
1585
1586 }
1587 if (LOCAL_LDiff) {
1588 MoveLocalAndTrail( PASS_REGS1 );
1589 }
1590 if (LOCAL_GDiff) {
1591#if !USE_SYSTEM_MALLOC
1592 /* That is done by realloc */
1593 MoveGlobal( PASS_REGS1 );
1594#endif
1595 if (in_parser) {
1596 tr_fr_ptr nTR;
1597
1598 AdjustScannerStacks(tksp, vep PASS_REGS);
1599 nTR = TR;
1600 *old_trp = PtoTRAdjust(*old_trp);
1601 TR = *old_trp;
1602 AdjustStacksAndTrail(0, FALSE PASS_REGS);
1603 TR = nTR;
1604 } else {
1605 AdjustStacksAndTrail(0, FALSE PASS_REGS);
1606 }
1607 AdjustRegs(MaxTemps PASS_REGS);
1608 } else if (LOCAL_LDiff) {
1609 if (in_parser) {
1610 tr_fr_ptr nTR;
1611
1612 AdjustScannerStacks(tksp, vep PASS_REGS);
1613 nTR = TR;
1614 *old_trp = PtoTRAdjust(*old_trp);
1615 TR = *old_trp;
1616 AdjustGrowStack( PASS_REGS1 );
1617 TR = nTR;
1618 } else {
1619 AdjustGrowStack( PASS_REGS1 );
1620 }
1621 AdjustRegs(MaxTemps PASS_REGS);
1622 }
1623 YAPLeaveCriticalSection();
1624 if (minimal_request)
1625 Yap_AllocHole(minimal_request, size);
1626 return TRUE;
1627}
1628
1629/* Used by do_goal() when we're short of stack space */
1630static int
1631growstack(size_t size USES_REGS)
1632{
1633 UInt start_growth_time, growth_time;
1634 int gc_verbose;
1635
1636 /* adjust to a multiple of 256) */
1637 if (size < YAP_ALLOC_SIZE)
1638 size = YAP_ALLOC_SIZE;
1639 size = AdjustPageSize(size);
1640 LOCAL_ErrorMessage = NULL;
1641 start_growth_time = Yap_cputime();
1642 gc_verbose = Yap_is_gc_verbose();
1643 LOCAL_stack_overflows++;
1644 if (gc_verbose) {
1645#if defined(YAPOR) || defined(THREADS)
1646 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1647#endif
1648 fprintf(stderr, "%% Stack Overflow %d\n", LOCAL_stack_overflows);
1649 fprintf(stderr, "%% Global: %8ld cells (%p-%p)\n", (unsigned long int)(HR-(CELL *)LOCAL_GlobalBase),LOCAL_GlobalBase,HR);
1650 fprintf(stderr, "%% Local:%8ld cells (%p-%p)\n", (unsigned long int)(LCL0-ASP),LCL0,ASP);
1651 fprintf(stderr, "%% Trail:%8ld cells (%p-%p)\n",
1652 (unsigned long int)(TR-(tr_fr_ptr)LOCAL_TrailBase),LOCAL_TrailBase,TR);
1653 fprintf(stderr, "%% Growing the stacks " UInt_FORMAT " bytes\n", (UInt) size);
1654 }
1655 if (!execute_growstack(size, FALSE, FALSE, NULL, NULL, NULL PASS_REGS))
1656 return FALSE;
1657 growth_time = Yap_cputime()-start_growth_time;
1658 LOCAL_total_stack_overflow_time += growth_time;
1659 if (gc_verbose) {
1660 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
1661 fprintf(stderr, "%% Total of %g sec expanding stacks \n", (double)LOCAL_total_stack_overflow_time/1000);
1662 }
1663 return TRUE;
1664}
1665
1666/* Used by parser when we're short of stack space */
1667int
1668Yap_growstack_in_parser(tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep)
1669{
1670 CACHE_REGS
1671 UInt size;
1672 UInt start_growth_time, growth_time;
1673 bool gc_verbose;
1674
1675 LOCAL_PrologMode |= GrowStackMode;
1676 /* adjust to a multiple of 256) */
1677 size = AdjustPageSize((ADDR)LCL0-LOCAL_GlobalBase);
1678 LOCAL_ErrorMessage = NULL;
1679 start_growth_time = Yap_cputime();
1680 gc_verbose = Yap_is_gc_verbose();
1681 LOCAL_stack_overflows++;
1682 if (gc_verbose) {
1683#if defined(YAPOR) || defined(THREADS)
1684 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1685#endif
1686 fprintf(stderr, "%% Stack Overflow %d\n", LOCAL_stack_overflows);
1687 fprintf(stderr, "%% Global: %8ld cells (%p-%p)\n", (unsigned long int)(HR-(CELL *)LOCAL_GlobalBase),LOCAL_GlobalBase,HR);
1688 fprintf(stderr, "%% Local:%8ld cells (%p-%p)\n", (unsigned long int)(LCL0-ASP),LCL0,ASP);
1689 fprintf(stderr, "%% Trail:%8ld cells (%p-%p)\n",
1690 (unsigned long int)(TR-(tr_fr_ptr)LOCAL_TrailBase),LOCAL_TrailBase,TR);
1691 fprintf(stderr, "%% Growing the stacks %ld bytes\n", (unsigned long int)size);
1692 }
1693 if (!execute_growstack(size, FALSE, TRUE, old_trp, tksp, vep PASS_REGS)) {
1694 LeaveGrowMode(GrowStackMode);
1695 return FALSE;
1696 }
1697 growth_time = Yap_cputime()-start_growth_time;
1698 LOCAL_total_stack_overflow_time += growth_time;
1699 if (gc_verbose) {
1700 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
1701 fprintf(stderr, "%% Total of %g sec expanding stacks \n", (double)LOCAL_total_stack_overflow_time/1000);
1702 }
1703 LeaveGrowMode(GrowStackMode);
1704 return TRUE;
1705}
1706
1707static int do_growtrail(size_t esize, bool contiguous_only, bool in_parser, tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep USES_REGS)
1708{
1709 UInt start_growth_time = Yap_cputime(), growth_time;
1710 int gc_verbose = Yap_is_gc_verbose();
1711 Int size0 = esize;
1712 Int size = esize;
1713
1714#if USE_SYSTEM_MALLOC
1715 if (contiguous_only)
1716 return FALSE;
1717#endif
1718 /* at least 64K for trail */
1719 if (!size)
1720 size = LOCAL_TrailTop-LOCAL_TrailBase;
1721 size *= 2;
1722 if (size < YAP_ALLOC_SIZE)
1723 size = YAP_ALLOC_SIZE;
1724 if (size > M2)
1725 size = M2;
1726 if (size < size0)
1727 size=size0;
1728 /* adjust to a multiple of 256) */
1729 size = AdjustPageSize(size);
1730 LOCAL_trail_overflows++;
1731 if (gc_verbose) {
1732#if defined(YAPOR) || defined(THREADS)
1733 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1734#endif
1735 fprintf(stderr, "%% Trail Overflow %d\n", LOCAL_trail_overflows);
1736#if USE_SYSTEM_MALLOC
1737 fprintf(stderr, "%% Heap: %8ld cells (%p-%p)\n", (unsigned long int)(HR-(CELL *)LOCAL_GlobalBase),(CELL *)LOCAL_GlobalBase,HR);
1738 fprintf(stderr, "%% Local:%8ld cells (%p-%p)\n", (unsigned long int)(LCL0-ASP),LCL0,ASP);
1739 fprintf(stderr, "%% Trail:%8ld cells (%p-%p)\n",
1740 (unsigned long int)(TR-(tr_fr_ptr)LOCAL_TrailBase),LOCAL_TrailBase,TR);
1741#endif
1742 fprintf(stderr, "%% growing the trail " UInt_FORMAT " bytes\n", size);
1743 }
1744 LOCAL_ErrorMessage = NULL;
1745 if (!GLOBAL_AllowTrailExpansion) {
1746 LOCAL_ErrorMessage = "Trail Overflow";
1747 return FALSE;
1748 }
1749#if USE_SYSTEM_MALLOC
1750 execute_growstack(size, TRUE, in_parser, old_trp, tksp, vep PASS_REGS);
1751#else
1752 YAPEnterCriticalSection();
1753 if (!Yap_ExtendWorkSpace(size)) {
1754 YAPLeaveCriticalSection();
1755 LOCAL_ErrorMessage = NULL;
1756 if (contiguous_only) {
1757 /* I can't expand in this case */
1758 LOCAL_trail_overflows--;
1759 return FALSE;
1760 }
1761 execute_growstack(size, TRUE, in_parser, old_trp, tksp, vep PASS_REGS);
1762 } else {
1763 if (in_parser) {
1764 LOCAL_TrDiff = LOCAL_LDiff = LOCAL_GDiff = LOCAL_BaseDiff = LOCAL_DelayDiff = LOCAL_XDiff = LOCAL_HDiff = LOCAL_GDiff0 = 0;
1765 AdjustScannerStacks(tksp, vep PASS_REGS);
1766 }
1767 LOCAL_TrailTop += size;
1768 CurrentTrailTop = (tr_fr_ptr)(LOCAL_TrailTop-MinTrailGap);
1769 YAPLeaveCriticalSection();
1770 }
1771#endif
1772 growth_time = Yap_cputime()-start_growth_time;
1773 LOCAL_total_trail_overflow_time += growth_time;
1774 if (gc_verbose) {
1775 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
1776 fprintf(stderr, "%% Total of %g sec expanding trail \n", (double)LOCAL_total_trail_overflow_time/1000);
1777 }
1778 Yap_get_signal( YAP_TROVF_SIGNAL );
1779 return TRUE;
1780}
1781
1782
1783/* Used by do_goal() when we're short of stack space */
1784int
1785Yap_growtrail(size_t size, bool contiguous_only)
1786{
1787 int rc;
1788 CACHE_REGS
1789 rc = do_growtrail(size, contiguous_only, FALSE, NULL, NULL, NULL PASS_REGS);
1790 return rc;
1791}
1792
1793/* Used by do_goal() when we're short of stack space */
1794int
1795Yap_locked_growtrail(size_t size, bool contiguous_only)
1796{
1797 CACHE_REGS
1798 return do_growtrail(size, contiguous_only, FALSE, NULL, NULL, NULL PASS_REGS);
1799}
1800
1801int
1802Yap_growtrail_in_parser(tr_fr_ptr *old_trp, TokEntry **tksp, VarEntry **vep)
1803{
1804 CACHE_REGS
1805 return do_growtrail(0, FALSE, TRUE, old_trp, tksp, vep PASS_REGS);
1806}
1807
1808CELL **
1809Yap_shift_visit(CELL **tovisit, CELL ***tovisit_maxp, CELL ***tovisit_base)
1810{
1811 CACHE_REGS
1812 CELL **tovisit_max = *tovisit_maxp;
1813 /* relative position of top of stack */
1814 Int off = (ADDR)tovisit-AuxBase;
1815 /* how much space the top stack was using */
1816 Int sz = AuxTop - (ADDR)tovisit_max;
1817 /* how much space the bottom stack was using */
1818 Int szlow = (ADDR)tovisit_max-AuxBase;
1819 /* original size for AuxSpace */
1820 Int totalsz0 = AuxTop - AuxBase; /* totalsz0 == szlow+sz */
1821 /* new size for AuxSpace */
1822 Int totalsz;
1823 /* how much we grow */
1824 Int dsz; /* totalsz == szlow+dsz+sz */
1825 char *newb = Yap_ExpandPreAllocCodeSpace(0, NULL, FALSE);
1826
1827 if (newb == NULL) {
1828 Yap_Error(RESOURCE_ERROR_HEAP,TermNil,"cannot allocate temporary space for unification (%p)", tovisit);
1829 return tovisit;
1830 }
1831 /* check new size */
1832 totalsz = AuxTop-AuxBase;
1833 /* how much we grew */
1834 dsz = totalsz-totalsz0;
1835 if (dsz == 0) {
1836 Yap_Error(RESOURCE_ERROR_HEAP,TermNil,"cannot allocate temporary space for unification (%p)", tovisit);
1837 return tovisit;
1838 }
1839 /* copy whole block to end */
1840 cpcellsd((CELL *)(newb+(dsz+szlow)), (CELL *)(newb+szlow), sz/sizeof(CELL));
1841 /* base pointer is block start */
1842 *tovisit_maxp = (CELL **)(newb+szlow);
1843 /* base pointer is block start */
1844 if (tovisit_base)
1845 *tovisit_base = (CELL **)AuxSp;
1846 /* current top is originall diff + diff size */
1847 return (CELL **)(newb+(off+dsz));
1848}
1849
1850static Int
1851p_inform_trail_overflows( USES_REGS1 )
1852{
1853 Term tn = MkIntTerm(LOCAL_trail_overflows);
1854 Term tt = MkIntegerTerm(LOCAL_total_trail_overflow_time);
1855
1856 return(Yap_unify(tn, ARG1) && Yap_unify(tt, ARG2));
1857}
1858
1859/* :- grow_heap(Size) */
1860static Int
1861p_growheap( USES_REGS1 )
1862{
1863 Int diff;
1864 Term t1 = Deref(ARG1);
1865
1866 if (IsVarTerm(t1)) {
1867 Yap_Error(INSTANTIATION_ERROR, t1, "grow_heap/1");
1868 return(FALSE);
1869 } else if (!IsIntTerm(t1)) {
1870 Yap_Error(TYPE_ERROR_INTEGER, t1, "grow_heap/1");
1871 return(FALSE);
1872 }
1873 diff = IntOfTerm(t1);
1874 if (diff < 0) {
1875 Yap_Error(DOMAIN_ERROR_NOT_LESS_THAN_ZERO, t1, "grow_heap/1");
1876 }
1877 return(static_growheap(diff, FALSE, NULL, NULL, NULL, NULL PASS_REGS));
1878}
1879
1880static Int
1881p_inform_heap_overflows( USES_REGS1 )
1882{
1883 Term tn = MkIntTerm(LOCAL_heap_overflows);
1884 Term tt = MkIntegerTerm(LOCAL_total_heap_overflow_time);
1885
1886 return(Yap_unify(tn, ARG1) && Yap_unify(tt, ARG2));
1887}
1888
1889#if defined(YAPOR_THREADS)
1890void
1891Yap_CopyThreadStacks(int worker_q, int worker_p, bool incremental)
1892{
1893 CACHE_REGS
1894 Int size;
1895
1896 /* make sure both stacks have same size */
1897 Int p_size = REMOTE_ThreadHandle(worker_p).ssize+REMOTE_ThreadHandle(worker_p).tsize;
1898 Int q_size = REMOTE_ThreadHandle(worker_q).ssize+REMOTE_ThreadHandle(worker_q).tsize;
1899 if (p_size != q_size) {
1900 UInt start_growth_time, growth_time;
1901 int gc_verbose;
1902 size_t ssiz = REMOTE_ThreadHandle(worker_q).ssize*K1;
1903 size_t tsiz = REMOTE_ThreadHandle(worker_q).tsize*K1;
1904 size_t diff = (REMOTE_ThreadHandle(worker_p).ssize-REMOTE_ThreadHandle(worker_q).ssize)*K1;
1905 char *oldq = (char *)REMOTE_ThreadHandle(worker_q).stack_address, *newq;
1906
1907 if (!(newq = REMOTE_ThreadHandle(worker_q).stack_address = realloc(REMOTE_ThreadHandle(worker_q).stack_address,p_size*K1))) {
1908 Yap_Error(RESOURCE_ERROR_STACK,TermNil,"cannot expand slave thread to match master thread");
1909 }
1910 start_growth_time = Yap_cputime();
1911 gc_verbose = Yap_is_gc_verbose();
1912 LOCAL_stack_overflows++;
1913 if (gc_verbose) {
1914#if defined(YAPOR) || defined(THREADS)
1915 fprintf(stderr, "%% Worker Id %d:\n", worker_id);
1916#endif
1917 fprintf(stderr, "%% Stack Overflow %d\n", LOCAL_stack_overflows);
1918 fprintf(stderr, "%% Stack: %8ld cells (%p-%p)\n", (unsigned long int)(LCL0-(CELL *)LOCAL_GlobalBase),LOCAL_GlobalBase,LCL0);
1919 fprintf(stderr, "%% Trail:%8ld cells (%p-%p)\n",
1920 (unsigned long int)(TR-(tr_fr_ptr)LOCAL_TrailBase),LOCAL_TrailBase,TR);
1921 fprintf(stderr, "%% Growing the stacks %ld bytes\n", diff);
1922 }
1923 LOCAL_GDiff = LOCAL_GDiff0 = LOCAL_DelayDiff = LOCAL_BaseDiff = (newq-oldq);
1924 LOCAL_TrDiff = LOCAL_LDiff = diff + LOCAL_GDiff;
1925 LOCAL_XDiff = LOCAL_HDiff = 0;
1926 LOCAL_GSplit = NULL;
1927 YAPEnterCriticalSection();
1928 SetHeapRegs(FALSE PASS_REGS);
1929 {
1930 choiceptr imageB;
1931
1932 LOCAL_OldLCL0 = LCL0;
1933 LCL0 = REMOTE_ThreadHandle(0).current_yaam_regs->LCL0_;
1934 imageB = Get_GLOBAL_root_cp();
1935 /* we know B */
1936 B->cp_tr = TR =
1937 (tr_fr_ptr)((CELL)(imageB->cp_tr)+((CELL)LOCAL_OldLCL0-(CELL)LCL0));
1938 LCL0 = LOCAL_OldLCL0;
1939 B->cp_h = H0;
1940 B->cp_ap = GETWORK;
1941 B->cp_or_fr = GLOBAL_root_or_fr;
1942 }
1943 YAPLeaveCriticalSection();
1944 growth_time = Yap_cputime()-start_growth_time;
1945 LOCAL_total_stack_overflow_time += growth_time;
1946 if (gc_verbose) {
1947 fprintf(stderr, "%% took %g sec\n", (double)growth_time/1000);
1948 fprintf(stderr, "%% Total of %g sec expanding stacks \n", (double)LOCAL_total_stack_overflow_time/1000);
1949 }
1950 }
1951
1952 REMOTE_ThreadHandle(worker_q).ssize = REMOTE_ThreadHandle(worker_p).ssize;
1953 REMOTE_ThreadHandle(worker_q).tsize = REMOTE_ThreadHandle(worker_p).tsize;
1954 /* compute offset indicators */
1955 LOCAL_GlobalBase = REMOTE_GlobalBase(worker_p);
1956 LOCAL_LocalBase = REMOTE_LocalBase(worker_p);
1957 LOCAL_TrailBase = REMOTE_TrailBase(worker_p);
1958 LOCAL_TrailTop = REMOTE_TrailTop(worker_p);
1959 CurrentTrailTop = (tr_fr_ptr)(LOCAL_TrailTop-MinTrailGap);
1960 size = REMOTE_ThreadHandle(worker_q).stack_address-REMOTE_ThreadHandle(worker_p).stack_address;
1961 LOCAL_TrDiff = LOCAL_LDiff = LOCAL_GDiff = LOCAL_GDiff0 = LOCAL_DelayDiff = LOCAL_BaseDiff = size;
1962 LOCAL_XDiff = LOCAL_HDiff = 0;
1963 LOCAL_GSplit = NULL;
1964 HR = REMOTE_ThreadHandle(worker_p).current_yaam_regs->H_;
1965 H0 = REMOTE_ThreadHandle(worker_p).current_yaam_regs->H0_;
1966 B = REMOTE_ThreadHandle(worker_p).current_yaam_regs->B_;
1967 ENV = REMOTE_ThreadHandle(worker_p).current_yaam_regs->ENV_;
1968 YENV = REMOTE_ThreadHandle(worker_p).current_yaam_regs->YENV_;
1969 ASP = REMOTE_ThreadHandle(worker_p).current_yaam_regs->ASP_;
1970 TR = REMOTE_ThreadHandle(worker_p).current_yaam_regs->TR_;
1971 if (ASP > CellPtr(B))
1972 ASP = CellPtr(B);
1973 LCL0 = REMOTE_ThreadHandle(worker_p).current_yaam_regs->LCL0_;
1974 Yap_REGS.CUT_C_TOP = REMOTE_ThreadHandle(worker_p).current_yaam_regs->CUT_C_TOP;
1975 LOCAL_DynamicArrays = NULL;
1976 LOCAL_StaticArrays = NULL;
1977 LOCAL_GlobalVariables = NULL;
1978 SetHeapRegs(TRUE PASS_REGS);
1979 if (incremental) {
1980 IncrementalCopyStacksFromWorker( PASS_REGS1 );
1981 LOCAL_start_global_copy =
1982 (CELL)PtoGloAdjust((CELL *)LOCAL_start_global_copy);
1983 LOCAL_end_global_copy =
1984 (CELL)PtoGloAdjust((CELL *)LOCAL_end_global_copy);
1985 LOCAL_start_local_copy =
1986 (CELL)PtoLocAdjust((CELL *)LOCAL_start_local_copy);
1987 LOCAL_end_local_copy =
1988 (CELL)PtoLocAdjust((CELL *)LOCAL_end_local_copy);
1989 LOCAL_start_trail_copy =
1990 (CELL)PtoTRAdjust((tr_fr_ptr)LOCAL_start_trail_copy);
1991 LOCAL_end_trail_copy =
1992 (CELL)PtoTRAdjust((tr_fr_ptr)LOCAL_end_trail_copy);
1993 AdjustStacksAndTrail(0, STACK_INCREMENTAL_COPYING PASS_REGS);
1994 RestoreTrail(worker_p PASS_REGS);
1995 TR = (tr_fr_ptr) LOCAL_end_trail_copy;
1996 } else {
1997 CopyLocalAndTrail( PASS_REGS1 );
1998 MoveGlobal( PASS_REGS1 );
1999 AdjustStacksAndTrail(0, STACK_COPYING PASS_REGS);
2000 }
2001}
2002#endif
2003
2004/* :- grow_stack(Size) */
2005static Int
2006p_growstack( USES_REGS1 )
2007{
2008 Int diff;
2009 Term t1 = Deref(ARG1);
2010
2011 if (IsVarTerm(t1)) {
2012 Yap_Error(INSTANTIATION_ERROR, t1, "grow_stack/1");
2013 return(FALSE);
2014 } else if (!IsIntTerm(t1)) {
2015 Yap_Error(TYPE_ERROR_INTEGER, t1, "grow_stack/1");
2016 return(FALSE);
2017 }
2018 diff = IntOfTerm(t1);
2019 if (diff < 0) {
2020 Yap_Error(DOMAIN_ERROR_NOT_LESS_THAN_ZERO, t1, "grow_stack/1");
2021 }
2022 return(growstack(diff PASS_REGS));
2023}
2024
2025static Int
2026p_inform_stack_overflows( USES_REGS1 )
2027{ /* */
2028 Term tn = MkIntTerm(LOCAL_stack_overflows);
2029 Term tt = MkIntegerTerm(LOCAL_total_stack_overflow_time);
2030
2031 return(Yap_unify(tn, ARG1) && Yap_unify(tt, ARG2));
2032
2033}
2034
2035Int
2036Yap_total_stack_shift_time(void)
2037{
2038 CACHE_REGS
2039 return(LOCAL_total_heap_overflow_time+
2040 LOCAL_total_stack_overflow_time+
2041 LOCAL_total_trail_overflow_time);
2042}
2043
2044void
2045Yap_InitGrowPreds(void)
2046{
2047 Yap_InitCPred("$grow_heap", 1, p_growheap, SafePredFlag);
2048 Yap_InitCPred("$grow_stack", 1, p_growstack, SafePredFlag);
2049 Yap_InitCPred("$inform_trail_overflows", 2, p_inform_trail_overflows, SafePredFlag);
2050 Yap_InitCPred("$inform_heap_overflows", 2, p_inform_heap_overflows, SafePredFlag);
2051 Yap_InitCPred("$inform_stack_overflows", 2, p_inform_stack_overflows, SafePredFlag);
2052 Yap_init_gc();
2053 Yap_init_agc();
2054}
CELL YAP_SEG_SIZE
definitions required by saver/restorer and memory manager
Definition: alloc.h:61
Definition: arrays.h:92
Definition: YapHeap.h:81
Definition: Yatom.h:151
Definition: arrays.h:76