12static struct malloc_chunk *
13ChunkPtrAdjust (
struct malloc_chunk *ptr)
15 return (
struct malloc_chunk *) ((
char *) (ptr) + LOCAL_HDiff);
180Yap_add_memory_hole(ADDR start, ADDR end)
182 if (Yap_NOfMemoryHoles == MAX_DLMALLOC_HOLES) {
183 Yap_Error(SYSTEM_ERROR_OPERATING_SYSTEM, 0L,
"Unexpected Too Much Memory Fragmentation: please contact YAP maintainers");
186 Yap_MemoryHoles[Yap_NOfMemoryHoles].start = start;
187 Yap_MemoryHoles[Yap_NOfMemoryHoles].end = end;
188 Yap_HoleSize += (UInt)(start-end);
189 Yap_NOfMemoryHoles++;
195 ADDR newHeapTop = HeapTop, oldHeapTop = HeapTop;
196 newHeapTop = HeapTop+size;
197 while (Yap_NOfMemoryHoles && newHeapTop > Yap_MemoryHoles[0].start) {
200 HeapTop = oldHeapTop = Yap_MemoryHoles[0].end;
201 newHeapTop = oldHeapTop+size;
202 Yap_NOfMemoryHoles--;
203 for (i=0; i < Yap_NOfMemoryHoles; i++) {
204 Yap_MemoryHoles[i].start = Yap_MemoryHoles[i+1].start;
205 Yap_MemoryHoles[i].end = Yap_MemoryHoles[i+1].end;
208 if (newHeapTop > HeapLim - MinHeapGap) {
209 if (HeapTop + size < HeapLim) {
213 Yap_signal(YAP_CDOVF_SIGNAL);
215 if (size > GLOBAL_SizeOfOverflow)
216 GLOBAL_SizeOfOverflow = size;
218 UNLOCK(HeapUsedLock);
220 return (
void *)MORECORE_FAILURE;
223 HeapTop = newHeapTop;
232static int largebin_index(
unsigned int sz) {
233 unsigned int x = sz >> SMALLBIN_WIDTH;
236 if (x >= 0x10000)
return NBINS-1;
239#if defined(__GNUC__) && defined(i386)
241 __asm__(
"bsrl %1,%0\n\t"
252 unsigned int n = ((x - 0x100) >> 16) & 8;
254 m = ((x - 0x1000) >> 16) & 4;
257 m = ((x - 0x4000) >> 16) & 2;
260 m = 13 - n + (x & ~(x>>1));
265 return NSMALLBINS + (
m << 2) + ((sz >> (
m + 6)) & 3);
268#define bin_index(sz) \
269 ((in_smallbin_range(sz)) ? smallbin_index(sz) : largebin_index(sz))
291#define FIRST_SORTED_BIN_SIZE 2056
305#define unsorted_chunks(M) (bin_at(M, 1))
326#define initial_top(M) (unsorted_chunks(M))
339#define idx2block(i) ((i) >> BINMAPSHIFT)
340#define idx2bit(i) ((1U << ((i) & ((1U << BINMAPSHIFT)-1))))
342#define mark_bin(m,i) ((m)->binmap[idx2block(i)] |= idx2bit(i))
343#define unmark_bin(m,i) ((m)->binmap[idx2block(i)] &= ~(idx2bit(i)))
344#define get_binmap(m,i) ((m)->binmap[idx2block(i)] & idx2bit(i))
374#define FASTBIN_CONSOLIDATION_THRESHOLD \
375 ((unsigned long)(DEFAULT_TRIM_THRESHOLD) >> 1)
388#define ANYCHUNKS_BIT (1U)
390#define have_anychunks(M) (((M)->max_fast & ANYCHUNKS_BIT))
391#define set_anychunks(M) ((M)->max_fast |= ANYCHUNKS_BIT)
392#define clear_anychunks(M) ((M)->max_fast &= ~ANYCHUNKS_BIT)
400#define FASTCHUNKS_BIT (2U)
402#define have_fastchunks(M) (((M)->max_fast & FASTCHUNKS_BIT))
403#define set_fastchunks(M) ((M)->max_fast |= (FASTCHUNKS_BIT|ANYCHUNKS_BIT))
404#define clear_fastchunks(M) ((M)->max_fast &= ~(FASTCHUNKS_BIT))
411#define set_max_fast(M, s) \
412 (M)->max_fast = (((s) == 0)? SMALLBIN_WIDTH: request2size(s)) | \
413 ((M)->max_fast & (FASTCHUNKS_BIT|ANYCHUNKS_BIT))
415#define get_max_fast(M) \
416 ((M)->max_fast & ~(FASTCHUNKS_BIT | ANYCHUNKS_BIT))
424#define MORECORE_CONTIGUOUS_BIT (1U)
426#define contiguous(M) \
427 (((M)->morecore_properties & MORECORE_CONTIGUOUS_BIT))
428#define noncontiguous(M) \
429 (((M)->morecore_properties & MORECORE_CONTIGUOUS_BIT) == 0)
430#define set_contiguous(M) \
431 ((M)->morecore_properties |= MORECORE_CONTIGUOUS_BIT)
432#define set_noncontiguous(M) \
433 ((M)->morecore_properties &= ~MORECORE_CONTIGUOUS_BIT)
455#define get_malloc_state() Yap_av
468static void malloc_init_state(mstate av)
470static void malloc_init_state(av) mstate av;
477 for (i = 1; i < NBINS; ++i) {
479 bin->fd = bin->bk = bin;
482 av->top_pad = DEFAULT_TOP_PAD;
483 av->trim_threshold = DEFAULT_TRIM_THRESHOLD;
485#if MORECORE_CONTIGUOUS
488 set_noncontiguous(av);
492 set_max_fast(av, DEFAULT_MXFAST);
494 av->top = initial_top(av);
495 av->pagesize = malloc_getpagesize;
503static Void_t* sYSMALLOc(INTERNAL_SIZE_T, mstate);
504static int sYSTRIm(
size_t, mstate);
505static void malloc_consolidate(mstate);
506static Void_t** iALLOc(
size_t,
size_t*,
int, Void_t**);
508static Void_t* sYSMALLOc();
510static void malloc_consolidate();
511static Void_t** iALLOc();
526#define check_chunk(P)
527#define check_free_chunk(P)
528#define check_inuse_chunk(P)
529#define check_remalloced_chunk(P,N)
530#define check_malloced_chunk(P,N)
531#define check_malloc_state()
534#define check_chunk(P) do_check_chunk(P)
535#define check_free_chunk(P) do_check_free_chunk(P)
536#define check_inuse_chunk(P) do_check_inuse_chunk(P)
537#define check_remalloced_chunk(P,N) do_check_remalloced_chunk(P,N)
538#define check_malloced_chunk(P,N) do_check_malloced_chunk(P,N)
539#define check_malloc_state() do_check_malloc_state()
546static void do_check_chunk(mchunkptr p)
548static void do_check_chunk(p) mchunkptr p;
551 mstate av = get_malloc_state();
554 char* max_address = (
char*)(av->top) + chunksize(av->top);
555 CHUNK_SIZE_T sz = chunksize(p);
556 char* min_address = max_address - av->sbrked_mem;
559 if (!chunk_is_mmapped(p)) {
563 if (contiguous(av)) {
564 assert(((
char*)p) >= min_address);
565 assert(((
char*)p + sz) <= ((
char*)(av->top)));
570 assert((CHUNK_SIZE_T)(sz) >= MINSIZE);
572 assert(prev_inuse(p));
579 if (contiguous(av) && av->top != initial_top(av)) {
580 assert(((
char*)p) < min_address || ((
char*)p) > max_address);
583 assert(((p->prev_size + sz) & (av->pagesize-1)) == 0);
585 assert(aligned_OK(chunk2mem(p)));
588 assert(!chunk_is_mmapped(p));
598static void do_check_free_chunk(mchunkptr p)
600static void do_check_free_chunk(p) mchunkptr p;
604 mstate av = get_malloc_state();
607 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
609 mchunkptr next = chunk_at_offset(p, sz);
616 assert (!chunk_is_mmapped(p));
619 if ((CHUNK_SIZE_T)(sz) >= MINSIZE)
621 assert((sz & MALLOC_ALIGN_MASK) == 0);
622 assert(aligned_OK(chunk2mem(p)));
624 assert(next->prev_size == sz);
626 assert(prev_inuse(p));
627 assert (next == av->top || inuse(next));
630 assert(p->fd->bk == p);
631 assert(p->bk->fd == p);
634 assert(sz == SIZE_SZ);
642static void do_check_inuse_chunk(mchunkptr p)
644static void do_check_inuse_chunk(p) mchunkptr p;
647 mstate av = get_malloc_state();
651 if (chunk_is_mmapped(p))
657 next = next_chunk(p);
663 if (!prev_inuse(p)) {
665 mchunkptr prv = prev_chunk(p);
666 assert(next_chunk(prv) == p);
667 do_check_free_chunk(prv);
670 if (next == av->top) {
671 assert(prev_inuse(next));
672 assert(chunksize(next) >= MINSIZE);
674 else if (!inuse(next))
675 do_check_free_chunk(next);
683static void do_check_remalloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
685static void do_check_remalloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
689 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
692 do_check_inuse_chunk(p);
695 assert((sz & MALLOC_ALIGN_MASK) == 0);
696 assert((CHUNK_SIZE_T)(sz) >= MINSIZE);
698 assert(aligned_OK(chunk2mem(p)));
700 assert((
long)(sz) - (
long)(s) >= 0);
701 assert((
long)(sz) - (
long)(s + MINSIZE) < 0);
709static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
711static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
715 do_check_remalloced_chunk(p, s);
727 assert(prev_inuse(p));
742static void do_check_malloc_state(
void)
744 mstate av = get_malloc_state();
752 INTERNAL_SIZE_T size;
753 CHUNK_SIZE_T total = 0;
757 assert(
sizeof(INTERNAL_SIZE_T) <=
sizeof(
char*));
760 assert((MALLOC_ALIGNMENT & (MALLOC_ALIGNMENT-1)) == 0);
763 if (av->top == 0 || av->top == initial_top(av))
767 assert((av->pagesize & (av->pagesize-1)) == 0);
772 assert(get_max_fast(av) <= request2size(MAX_FAST_SIZE));
774 max_fast_bin = fastbin_index(av->max_fast);
776 for (i = 0; i < NFASTBINS; ++i) {
780 if (i > max_fast_bin)
785 do_check_inuse_chunk(p);
786 total += chunksize(p);
788 assert(fastbin_index(chunksize(p)) == i);
794 assert(have_fastchunks(av));
795 else if (!have_fastchunks(av))
799 for (i = 1; i < NBINS; ++i) {
804 binbit = get_binmap(av,i);
805 empty = last(b) == b;
812 for (p = last(b); p != b; p = p->bk) {
814 do_check_free_chunk(p);
819 idx = bin_index(size);
822 if ((CHUNK_SIZE_T) size >= (CHUNK_SIZE_T)(FIRST_SORTED_BIN_SIZE)) {
824 (CHUNK_SIZE_T)chunksize(p->bk) >=
825 (CHUNK_SIZE_T)chunksize(p));
829 for (q = next_chunk(p);
830 (q != av->top && inuse(q) &&
831 (CHUNK_SIZE_T)(chunksize(q)) >= MINSIZE);
833 do_check_inuse_chunk(q);
838 check_chunk(av->top);
842 assert(total <= (CHUNK_SIZE_T)(av->max_total_mem));
844 assert((CHUNK_SIZE_T)(av->sbrked_mem) <=
845 (CHUNK_SIZE_T)(av->max_sbrked_mem));
847 assert((CHUNK_SIZE_T)(av->max_total_mem) >=
848 (CHUNK_SIZE_T)(av->sbrked_mem));
863static Void_t* sYSMALLOc(INTERNAL_SIZE_T nb, mstate av)
865static Void_t* sYSMALLOc(nb, av) INTERNAL_SIZE_T nb; mstate av;
869 INTERNAL_SIZE_T old_size;
878 INTERNAL_SIZE_T front_misalign;
879 INTERNAL_SIZE_T end_misalign;
884 CHUNK_SIZE_T remainder_size;
888 size_t pagemask = av->pagesize - 1;
898 if (have_fastchunks(av)) {
899 assert(in_smallbin_range(nb));
900 malloc_consolidate(av);
901 return mALLOc(nb - MALLOC_ALIGN_MASK);
908 old_size = chunksize(old_top);
909 old_end = (
char*)(chunk_at_offset(old_top, old_size));
911 brk = snd_brk = (
char*)(MORECORE_FAILURE);
918 assert((old_top == initial_top(av) && old_size == 0) ||
919 ((CHUNK_SIZE_T) (old_size) >= MINSIZE &&
920 prev_inuse(old_top)));
923 assert((CHUNK_SIZE_T)(old_size) < (CHUNK_SIZE_T)(nb + MINSIZE));
926 assert(!have_fastchunks(av));
931 size = nb + av->top_pad + MINSIZE;
950 size = (size + pagemask) & ~pagemask;
959 brk = (
char*)(MORECORE(size));
971 if (brk != (
char*)(MORECORE_FAILURE)) {
972 av->sbrked_mem += size;
978 if (brk == old_end && snd_brk == (
char*)(MORECORE_FAILURE)) {
979 set_head(old_top, (size + old_size) | PREV_INUSE);
1015 if (contiguous(av) && old_size != 0 && brk < old_end) {
1016 set_noncontiguous(av);
1020 if (contiguous(av)) {
1028 av->sbrked_mem += brk - old_end;
1032 front_misalign = (INTERNAL_SIZE_T)chunk2mem(brk) & MALLOC_ALIGN_MASK;
1033 if (front_misalign > 0) {
1043 correction = MALLOC_ALIGNMENT - front_misalign;
1044 aligned_brk += correction;
1052 correction += old_size;
1055 end_misalign = (INTERNAL_SIZE_T)(brk + size + correction);
1056 correction += ((end_misalign + pagemask) & ~pagemask) - end_misalign;
1058 assert(correction >= 0);
1059 snd_brk = (
char*)(MORECORE(correction));
1061 if (snd_brk == (
char*)(MORECORE_FAILURE)) {
1067 snd_brk = (
char*)(MORECORE(0));
1069 else if (snd_brk < brk) {
1082 snd_brk = brk + size;
1084 set_noncontiguous(av);
1092 assert(aligned_OK(chunk2mem(brk)));
1095 if (snd_brk == (
char*)(MORECORE_FAILURE)) {
1096 snd_brk = (
char*)(MORECORE(0));
1097 av->sbrked_mem += snd_brk - brk - size;
1102 if (snd_brk != (
char*)(MORECORE_FAILURE)) {
1103 av->top = (mchunkptr)aligned_brk;
1104 set_head(av->top, (snd_brk - aligned_brk + correction) | PREV_INUSE);
1105 av->sbrked_mem += correction;
1116 if (old_size != 0) {
1122 old_size = (old_size - 3*SIZE_SZ) & ~MALLOC_ALIGN_MASK;
1123 set_head(old_top, old_size | PREV_INUSE);
1131 chunk_at_offset(old_top, old_size )->size =
1134 chunk_at_offset(old_top, old_size + SIZE_SZ)->size =
1140 if (old_size >= MINSIZE) {
1141 INTERNAL_SIZE_T tt = av->trim_threshold;
1142 av->trim_threshold = (INTERNAL_SIZE_T)(-1);
1143 fREe(chunk2mem(old_top));
1144 av->trim_threshold = tt;
1151 sum = av->sbrked_mem;
1152 if (sum > (CHUNK_SIZE_T)(av->max_sbrked_mem))
1153 av->max_sbrked_mem = sum;
1155 sum += av->mmapped_mem;
1156 if (sum > (CHUNK_SIZE_T)(av->max_total_mem))
1157 av->max_total_mem = sum;
1159 check_malloc_state();
1164 size = chunksize(p);
1167 if ((CHUNK_SIZE_T)(size) >= (CHUNK_SIZE_T)(nb + MINSIZE)) {
1169 remainder_size = size - nb;
1170 remainder = chunk_at_offset(p, nb);
1171 av->top = remainder;
1172 set_head(p, nb | PREV_INUSE);
1173 set_head(remainder, remainder_size | PREV_INUSE);
1174 check_malloced_chunk(p, nb);
1175 return chunk2mem(p);
1181 MALLOC_FAILURE_ACTION;
1197static int sYSTRIm(
size_t pad, mstate av)
1199static int sYSTRIm(pad, av)
size_t pad; mstate av;
1209 pagesz = av->pagesize;
1210 top_size = chunksize(av->top);
1213 extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;
1221 current_brk = (
char*)(MORECORE(0));
1222 if (current_brk == (
char*)(av->top) + top_size) {
1235 new_brk = (
char*)(MORECORE(0));
1237 if (new_brk != (
char*)MORECORE_FAILURE) {
1238 released = (long)(current_brk - new_brk);
1240 if (released != 0) {
1242 av->sbrked_mem -= released;
1243 set_head(av->top, (top_size - released) | PREV_INUSE);
1244 check_malloc_state();
1259Void_t* mALLOc(
size_t bytes)
1261 Void_t* mALLOc(bytes)
size_t bytes;
1264 mstate av = get_malloc_state();
1272 INTERNAL_SIZE_T size;
1275 mchunkptr remainder;
1276 CHUNK_SIZE_T remainder_size;
1294 checked_request2size(bytes, nb);
1299 if (!have_anychunks(av)) {
1300 if (av->max_fast == 0)
1301 malloc_consolidate(av);
1309 if ((CHUNK_SIZE_T)(nb) <= (CHUNK_SIZE_T)(av->max_fast)) {
1310 fb = &(av->fastbins[(fastbin_index(nb))]);
1311 if ( (victim = *fb) != 0) {
1313 check_remalloced_chunk(victim, nb);
1314 return chunk2mem(victim);
1326 if (in_smallbin_range(nb)) {
1327 idx = smallbin_index(nb);
1328 bin = bin_at(av,idx);
1330 if ( (victim = last(bin)) != bin) {
1332 set_inuse_bit_at_offset(victim, nb);
1336 check_malloced_chunk(victim, nb);
1337 return chunk2mem(victim);
1353 idx = largebin_index(nb);
1354 if (have_fastchunks(av))
1355 malloc_consolidate(av);
1366 while ( (victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) {
1368 size = chunksize(victim);
1378 if (in_smallbin_range(nb) &&
1379 bck == unsorted_chunks(av) &&
1380 victim == av->last_remainder &&
1381 (CHUNK_SIZE_T)(size) > (CHUNK_SIZE_T)(nb + MINSIZE)) {
1384 remainder_size = size - nb;
1385 remainder = chunk_at_offset(victim, nb);
1386 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
1387 av->last_remainder = remainder;
1388 remainder->bk = remainder->fd = unsorted_chunks(av);
1390 set_head(victim, nb | PREV_INUSE);
1391 set_head(remainder, remainder_size | PREV_INUSE);
1392 set_foot(remainder, remainder_size);
1394 check_malloced_chunk(victim, nb);
1395 return chunk2mem(victim);
1399 unsorted_chunks(av)->bk = bck;
1400 bck->fd = unsorted_chunks(av);
1405 set_inuse_bit_at_offset(victim, size);
1406 check_malloced_chunk(victim, nb);
1407 return chunk2mem(victim);
1412 if (in_smallbin_range(size)) {
1413 victim_index = smallbin_index(size);
1414 bck = bin_at(av, victim_index);
1418 victim_index = largebin_index(size);
1419 bck = bin_at(av, victim_index);
1424 if ((CHUNK_SIZE_T)(size) < (CHUNK_SIZE_T)(bck->bk->size)) {
1428 else if ((CHUNK_SIZE_T)(size) >=
1429 (CHUNK_SIZE_T)(FIRST_SORTED_BIN_SIZE)) {
1433 while ((CHUNK_SIZE_T)(size) < (CHUNK_SIZE_T)(fwd->size))
1440 mark_bin(av, victim_index);
1456 if (!in_smallbin_range(nb)) {
1457 bin = bin_at(av, idx);
1459 for (victim = last(bin); victim != bin; victim = victim->bk) {
1460 size = chunksize(victim);
1462 if ((CHUNK_SIZE_T)(size) >= (CHUNK_SIZE_T)(nb)) {
1463 remainder_size = size - nb;
1464 dl_unlink(victim, bck, fwd);
1467 if (remainder_size < MINSIZE) {
1468 set_inuse_bit_at_offset(victim, size);
1469 check_malloced_chunk(victim, nb);
1470 return chunk2mem(victim);
1474 remainder = chunk_at_offset(victim, nb);
1475 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
1476 remainder->bk = remainder->fd = unsorted_chunks(av);
1477 set_head(victim, nb | PREV_INUSE);
1478 set_head(remainder, remainder_size | PREV_INUSE);
1479 set_foot(remainder, remainder_size);
1480 check_malloced_chunk(victim, nb);
1481 return chunk2mem(victim);
1497 bin = bin_at(av,idx);
1498 block = idx2block(idx);
1499 map = av->binmap[block];
1505 if (bit > map || bit == 0) {
1507 if (++block >= BINMAPSIZE)
1509 }
while ( (map = av->binmap[block]) == 0);
1511 bin = bin_at(av, (block << BINMAPSHIFT));
1516 while ((bit & map) == 0) {
1517 bin = next_bin(bin);
1526 if (victim == bin) {
1527 av->binmap[block] = map &= ~bit;
1528 bin = next_bin(bin);
1533 size = chunksize(victim);
1536 assert((CHUNK_SIZE_T)(size) >= (CHUNK_SIZE_T)(nb));
1538 remainder_size = size - nb;
1546 if (remainder_size < MINSIZE) {
1547 set_inuse_bit_at_offset(victim, size);
1548 check_malloced_chunk(victim, nb);
1549 return chunk2mem(victim);
1554 remainder = chunk_at_offset(victim, nb);
1556 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
1557 remainder->bk = remainder->fd = unsorted_chunks(av);
1559 if (in_smallbin_range(nb))
1560 av->last_remainder = remainder;
1562 set_head(victim, nb | PREV_INUSE);
1563 set_head(remainder, remainder_size | PREV_INUSE);
1564 set_foot(remainder, remainder_size);
1565 check_malloced_chunk(victim, nb);
1566 return chunk2mem(victim);
1588 size = chunksize(victim);
1590 if ((CHUNK_SIZE_T)(size) >= (CHUNK_SIZE_T)(nb + MINSIZE)) {
1591 remainder_size = size - nb;
1592 remainder = chunk_at_offset(victim, nb);
1593 av->top = remainder;
1594 set_head(victim, nb | PREV_INUSE);
1595 set_head(remainder, remainder_size | PREV_INUSE);
1597 check_malloced_chunk(victim, nb);
1598 return chunk2mem(victim);
1604 return sYSMALLOc(nb, av);
1612void fREe(Void_t* mem)
1614void fREe(mem) Void_t* mem;
1617 mstate av = get_malloc_state();
1620 INTERNAL_SIZE_T size;
1622 mchunkptr nextchunk;
1623 INTERNAL_SIZE_T nextsize;
1625 INTERNAL_SIZE_T prevsize;
1632 size = chunksize(p);
1634 check_inuse_chunk(p);
1641 if ((CHUNK_SIZE_T)(size) <= (CHUNK_SIZE_T)(av->max_fast)
1648 && (chunk_at_offset(p, size) != av->top)
1653 fb = &(av->fastbins[fastbin_index(size)]);
1662 else if (!chunk_is_mmapped(p)) {
1665 nextchunk = chunk_at_offset(p, size);
1666 nextsize = chunksize(nextchunk);
1669 if (!prev_inuse(p)) {
1670 prevsize = p->prev_size;
1672 p = chunk_at_offset(p, -((
long) prevsize));
1673 dl_unlink(p, bck, fwd);
1676 if (nextchunk != av->top) {
1678 nextinuse = inuse_bit_at_offset(nextchunk, nextsize);
1679 set_head(nextchunk, nextsize);
1683 dl_unlink(nextchunk, bck, fwd);
1693 bck = unsorted_chunks(av);
1700 set_head(p, size | PREV_INUSE);
1703 check_free_chunk(p);
1713 set_head(p, size | PREV_INUSE);
1731 if ((CHUNK_SIZE_T)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD) {
1732 if (have_fastchunks(av))
1733 malloc_consolidate(av);
1735#ifndef MORECORE_CANNOT_TRIM
1736 if ((CHUNK_SIZE_T)(chunksize(av->top)) >=
1737 (CHUNK_SIZE_T)(av->trim_threshold))
1738 sYSTRIm(av->top_pad, av);
1771static void malloc_consolidate(mstate av)
1773static void malloc_consolidate(av) mstate av;
1780 mchunkptr unsorted_bin;
1781 mchunkptr first_unsorted;
1784 mchunkptr nextchunk;
1785 INTERNAL_SIZE_T size;
1786 INTERNAL_SIZE_T nextsize;
1787 INTERNAL_SIZE_T prevsize;
1797 if (av->max_fast != 0) {
1798 clear_fastchunks(av);
1800 unsorted_bin = unsorted_chunks(av);
1810 maxfb = &(av->fastbins[fastbin_index(av->max_fast)]);
1811 fb = &(av->fastbins[0]);
1813 if ( (p = *fb) != 0) {
1817 check_inuse_chunk(p);
1821 size = p->size & ~PREV_INUSE;
1822 nextchunk = chunk_at_offset(p, size);
1823 nextsize = chunksize(nextchunk);
1825 if (!prev_inuse(p)) {
1826 prevsize = p->prev_size;
1828 p = chunk_at_offset(p, -((
long) prevsize));
1829 dl_unlink(p, bck, fwd);
1832 if (nextchunk != av->top) {
1833 nextinuse = inuse_bit_at_offset(nextchunk, nextsize);
1834 set_head(nextchunk, nextsize);
1838 dl_unlink(nextchunk, bck, fwd);
1841 first_unsorted = unsorted_bin->fd;
1842 unsorted_bin->fd = p;
1843 first_unsorted->bk = p;
1845 set_head(p, size | PREV_INUSE);
1846 p->bk = unsorted_bin;
1847 p->fd = first_unsorted;
1853 set_head(p, size | PREV_INUSE);
1857 }
while ( (p = nextp) != 0);
1860 }
while (fb++ != maxfb);
1863 malloc_init_state(av);
1864 check_malloc_state();
1874Void_t* rEALLOc(Void_t* oldmem,
size_t bytes)
1876Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem;
size_t bytes;
1879 mstate av = get_malloc_state();
1884 INTERNAL_SIZE_T oldsize;
1887 INTERNAL_SIZE_T newsize;
1892 mchunkptr remainder;
1893 CHUNK_SIZE_T remainder_size;
1898 CHUNK_SIZE_T copysize;
1899 unsigned int ncopies;
1904#ifdef REALLOC_ZERO_BYTES_FREES
1912 if (oldmem == 0)
return mALLOc(bytes);
1914 checked_request2size(bytes, nb);
1916 oldp = mem2chunk(oldmem);
1917 oldsize = chunksize(oldp);
1919 check_inuse_chunk(oldp);
1921 if (!chunk_is_mmapped(oldp)) {
1923 if ((CHUNK_SIZE_T)(oldsize) >= (CHUNK_SIZE_T)(nb)) {
1930 next = chunk_at_offset(oldp, oldsize);
1933 if (next == av->top &&
1934 (CHUNK_SIZE_T)(newsize = oldsize + chunksize(next)) >=
1935 (CHUNK_SIZE_T)(nb + MINSIZE)) {
1936 set_head_size(oldp, nb);
1937 av->top = chunk_at_offset(oldp, nb);
1938 set_head(av->top, (newsize - nb) | PREV_INUSE);
1939 return chunk2mem(oldp);
1943 else if (next != av->top &&
1945 (CHUNK_SIZE_T)(newsize = oldsize + chunksize(next)) >=
1946 (CHUNK_SIZE_T)(nb)) {
1948 dl_unlink(next, bck, fwd);
1953 newmem = mALLOc(nb - MALLOC_ALIGN_MASK);
1957 newp = mem2chunk(newmem);
1958 newsize = chunksize(newp);
1974 copysize = oldsize - SIZE_SZ;
1975 s = (INTERNAL_SIZE_T*)(oldmem);
1976 d = (INTERNAL_SIZE_T*)(newmem);
1977 ncopies = copysize /
sizeof(INTERNAL_SIZE_T);
1978 assert(ncopies >= 3);
1981 memcpy(d, s, copysize);
2002 check_inuse_chunk(newp);
2003 return chunk2mem(newp);
2010 assert((CHUNK_SIZE_T)(newsize) >= (CHUNK_SIZE_T)(nb));
2012 remainder_size = newsize - nb;
2014 if (remainder_size < MINSIZE) {
2015 set_head_size(newp, newsize);
2016 set_inuse_bit_at_offset(newp, newsize);
2019 remainder = chunk_at_offset(newp, nb);
2020 set_head_size(newp, nb);
2021 set_head(remainder, remainder_size | PREV_INUSE);
2023 set_inuse_bit_at_offset(remainder, remainder_size);
2024 fREe(chunk2mem(remainder));
2027 check_inuse_chunk(newp);
2028 return chunk2mem(newp);
2039 INTERNAL_SIZE_T offset = oldp->prev_size;
2040 size_t pagemask = av->pagesize - 1;
2045 newsize = (nb + offset + SIZE_SZ + pagemask) & ~pagemask;
2048 if (oldsize == newsize - offset)
2051 cp = (
char*)mremap((
char*)oldp - offset, oldsize + offset, newsize, 1);
2053 if (cp != (
char*)MORECORE_FAILURE) {
2055 newp = (mchunkptr)(cp + offset);
2056 set_head(newp, (newsize - offset)|IS_MMAPPED);
2058 assert(aligned_OK(chunk2mem(newp)));
2059 assert((newp->prev_size == offset));
2062 sum = av->mmapped_mem += newsize - oldsize;
2063 if (sum > (CHUNK_SIZE_T)(av->max_mmapped_mem))
2064 av->max_mmapped_mem = sum;
2065 sum += av->sbrked_mem;
2066 if (sum > (CHUNK_SIZE_T)(av->max_total_mem))
2067 av->max_total_mem = sum;
2069 return chunk2mem(newp);
2074 if ((CHUNK_SIZE_T)(oldsize) >= (CHUNK_SIZE_T)(nb + SIZE_SZ))
2078 newmem = mALLOc(nb - MALLOC_ALIGN_MASK);
2080 memcpy(newmem, oldmem, oldsize - 2*SIZE_SZ);
2088 check_malloc_state();
2089 MALLOC_FAILURE_ACTION;
2100Void_t* mEMALIGn(
size_t alignment,
size_t bytes)
2102Void_t* mEMALIGn(alignment, bytes)
size_t alignment;
size_t bytes;
2110 INTERNAL_SIZE_T newsize;
2111 INTERNAL_SIZE_T leadsize;
2112 mchunkptr remainder;
2113 CHUNK_SIZE_T remainder_size;
2114 INTERNAL_SIZE_T size;
2118 if (alignment <= MALLOC_ALIGNMENT)
return mALLOc(bytes);
2122 if (alignment < MINSIZE) alignment = MINSIZE;
2125 if ((alignment & (alignment - 1)) != 0) {
2126 size_t a = MALLOC_ALIGNMENT * 2;
2127 while ((CHUNK_SIZE_T)a < (CHUNK_SIZE_T)alignment) a <<= 1;
2131 checked_request2size(bytes, nb);
2141 m = (
char*)(mALLOc(nb + alignment + MINSIZE));
2143 if (
m == 0)
return 0;
2147 if ((((PTR_UINT)(
m)) % alignment) != 0) {
2157 brk = (
char*)mem2chunk((PTR_UINT)(((PTR_UINT)(
m + alignment - 1)) &
2158 -((
signed long) alignment)));
2159 if ((CHUNK_SIZE_T)(brk - (
char*)(p)) < MINSIZE)
2162 newp = (mchunkptr)brk;
2163 leadsize = brk - (
char*)(p);
2164 newsize = chunksize(p) - leadsize;
2167 if (chunk_is_mmapped(p)) {
2168 newp->prev_size = p->prev_size + leadsize;
2169 set_head(newp, newsize|IS_MMAPPED);
2170 return chunk2mem(newp);
2174 set_head(newp, newsize | PREV_INUSE);
2175 set_inuse_bit_at_offset(newp, newsize);
2176 set_head_size(p, leadsize);
2180 assert (newsize >= nb &&
2181 (((PTR_UINT)(chunk2mem(p))) % alignment) == 0);
2185 if (!chunk_is_mmapped(p)) {
2186 size = chunksize(p);
2187 if ((CHUNK_SIZE_T)(size) > (CHUNK_SIZE_T)(nb + MINSIZE)) {
2188 remainder_size = size - nb;
2189 remainder = chunk_at_offset(p, nb);
2190 set_head(remainder, remainder_size | PREV_INUSE);
2191 set_head_size(p, nb);
2192 fREe(chunk2mem(remainder));
2196 check_inuse_chunk(p);
2197 return chunk2mem(p);
2205Void_t* cALLOc(
size_t n_elements,
size_t elem_size)
2207Void_t* cALLOc(n_elements, elem_size)
size_t n_elements;
size_t elem_size;
2211 CHUNK_SIZE_T clearsize;
2212 CHUNK_SIZE_T nclears;
2215 Void_t* mem = mALLOc(n_elements * elem_size);
2220 if (!chunk_is_mmapped(p))
2228 d = (INTERNAL_SIZE_T*)mem;
2229 clearsize = chunksize(p) - SIZE_SZ;
2230 nclears = clearsize /
sizeof(INTERNAL_SIZE_T);
2231 assert(nclears >= 3);
2234 memset(d, 0, clearsize);
2263void cFREe(Void_t *mem)
2265void cFREe(mem) Void_t *mem;
2276Void_t** iCALLOc(
size_t n_elements,
size_t elem_size, Void_t* chunks[])
2278Void_t** iCALLOc(n_elements, elem_size, chunks)
size_t n_elements;
size_t elem_size; Void_t* chunks[];
2281 size_t sz = elem_size;
2283 return iALLOc(n_elements, &sz, 3, chunks);
2291Void_t** iCOMALLOc(
size_t n_elements,
size_t sizes[], Void_t* chunks[])
2293Void_t** iCOMALLOc(n_elements, sizes, chunks)
size_t n_elements;
size_t sizes[]; Void_t* chunks[];
2296 return iALLOc(n_elements, sizes, 0, chunks);
2312static Void_t** iALLOc(
size_t n_elements,
2317static Void_t** iALLOc(n_elements, sizes, opts, chunks)
size_t n_elements;
size_t* sizes;
int opts; Void_t* chunks[];
2320 mstate av = get_malloc_state();
2321 INTERNAL_SIZE_T element_size;
2322 INTERNAL_SIZE_T contents_size;
2323 INTERNAL_SIZE_T array_size;
2326 INTERNAL_SIZE_T remainder_size;
2328 mchunkptr array_chunk;
2329 INTERNAL_SIZE_T size;
2333 if (av->max_fast == 0) malloc_consolidate(av);
2337 if (n_elements == 0)
2344 if (n_elements == 0)
2345 return (Void_t**) mALLOc(0);
2347 array_size = request2size(n_elements * (
sizeof(Void_t*)));
2352 element_size = request2size(*sizes);
2353 contents_size = n_elements * element_size;
2358 for (i = 0; i != n_elements; ++i)
2359 contents_size += request2size(sizes[i]);
2363 size = contents_size + array_size - MALLOC_ALIGN_MASK;
2376 assert(!chunk_is_mmapped(p));
2377 remainder_size = chunksize(p);
2380 memset(mem, 0, remainder_size - SIZE_SZ - array_size);
2385 array_chunk = chunk_at_offset(p, contents_size);
2386 marray = (Void_t**) (chunk2mem(array_chunk));
2387 set_head(array_chunk, (remainder_size - contents_size) | PREV_INUSE);
2388 remainder_size = contents_size;
2392 for (i = 0; ; ++i) {
2393 marray[i] = chunk2mem(p);
2394 if (i != n_elements-1) {
2395 if (element_size != 0)
2396 size = element_size;
2398 size = request2size(sizes[i]);
2399 remainder_size -= size;
2400 set_head(p, size | PREV_INUSE);
2401 p = chunk_at_offset(p, size);
2404 set_head(p, remainder_size | PREV_INUSE);
2410 if (marray != chunks) {
2412 if (element_size != 0)
2413 assert(remainder_size == element_size);
2415 assert(remainder_size == request2size(sizes[i]));
2416 check_inuse_chunk(mem2chunk(marray));
2419 for (i = 0; i != n_elements; ++i)
2420 check_inuse_chunk(mem2chunk(marray[i]));
2432Void_t* vALLOc(
size_t bytes)
2434Void_t* vALLOc(bytes)
size_t bytes;
2438 mstate av = get_malloc_state();
2439 if (av->max_fast == 0) malloc_consolidate(av);
2440 return mEMALIGn(av->pagesize, bytes);
2449Void_t* pVALLOc(
size_t bytes)
2451Void_t* pVALLOc(bytes)
size_t bytes;
2454 mstate av = get_malloc_state();
2458 if (av->max_fast == 0) malloc_consolidate(av);
2459 pagesz = av->pagesize;
2460 return mEMALIGn(pagesz, (bytes + pagesz - 1) & ~(pagesz - 1));
2469int mTRIm(
size_t pad)
2471int mTRIm(pad)
size_t pad;
2474 mstate av = get_malloc_state();
2476 malloc_consolidate(av);
2478#ifndef MORECORE_CANNOT_TRIM
2479 return sYSTRIm(pad, av);
2491size_t mUSABLe(Void_t* mem)
2493size_t mUSABLe(mem) Void_t* mem;
2499 if (chunk_is_mmapped(p))
2500 return chunksize(p) - 2*SIZE_SZ;
2502 return chunksize(p) - SIZE_SZ;
2511struct mallinfo mALLINFo()
2513 mstate av = get_malloc_state();
2518 INTERNAL_SIZE_T avail;
2519 INTERNAL_SIZE_T fastavail;
2524 if (av->top == 0) malloc_consolidate(av);
2526 check_malloc_state();
2529 avail = chunksize(av->top);
2536 for (i = 0; i < NFASTBINS; ++i) {
2537 for (p = av->fastbins[i]; p != 0; p = p->fd) {
2539 fastavail += chunksize(p);
2546 for (i = 1; i < NBINS; ++i) {
2548 for (p = last(b); p != b; p = p->bk) {
2550 avail += chunksize(p);
2554 mi.smblks = nfastblocks;
2555 mi.ordblks = nblocks;
2556 mi.fordblks = avail;
2557 mi.uordblks = av->sbrked_mem - avail;
2558 mi.arena = av->sbrked_mem;
2559 mi.fsmblks = fastavail;
2560 mi.keepcost = chunksize(av->top);
2561 mi.usmblks = av->max_total_mem;
2573Yap_givemallinfo(
void)
2575 struct mallinfo mi = mALLINFo();
2582 struct mallinfo mi = mALLINFo();
2586 fprintf(stderr,
"max system bytes = %10lu\n",
2587 (CHUNK_SIZE_T)(mi.usmblks));
2588 fprintf(stderr,
"system bytes = %10lu\n",
2589 (CHUNK_SIZE_T)(mi.arena + mi.hblkhd));
2590 fprintf(stderr,
"in use bytes = %10lu\n",
2591 (CHUNK_SIZE_T)(mi.uordblks + mi.hblkhd));
2601int mALLOPt(
int param_number,
int value)
2603int mALLOPt(param_number, value)
int param_number;
int value;
2606 mstate av = get_malloc_state();
2608 malloc_consolidate(av);
2610 switch(param_number) {
2612 if (value >= 0 && value <= MAX_FAST_SIZE) {
2613 set_max_fast(av, value);
2619 case M_TRIM_THRESHOLD:
2620 av->trim_threshold = value;
2624 av->top_pad = value;
2924Yap_initdlmalloc(
void)
2926 HeapTop = (ADDR)ALIGN_SIZE(HeapTop,16);
2927 Yap_NOfMemoryHoles = 0;
2928 Yap_av = (
struct malloc_state *)HeapTop;
2929 memset((
void *)Yap_av, 0,
sizeof(
struct malloc_state));
2930 HeapTop +=
sizeof(
struct malloc_state);
2931 HeapTop = (ADDR)ALIGN_SIZE(HeapTop,2*SIZEOF_LONG_LONG_INT);
2932 HeapMax = HeapTop-Yap_HeapBase;
2935void Yap_RestoreDLMalloc(
void)
2942 unsigned int binbit;
2945 INTERNAL_SIZE_T size;
2946 CHUNK_SIZE_T total = 0;
2950 assert(
sizeof(INTERNAL_SIZE_T) <=
sizeof(
char*));
2953 assert((MALLOC_ALIGNMENT & (MALLOC_ALIGNMENT-1)) == 0);
2956 if (av->top == 0 || av->top == initial_top(av))
2960 assert((av->pagesize & (av->pagesize-1)) == 0);
2965 assert(get_max_fast(av) <= request2size(MAX_FAST_SIZE));
2967 max_fast_bin = fastbin_index(av->max_fast);
2970 av->top = ChunkPtrAdjust(av->top);
2972 if (av->last_remainder) {
2973 av->last_remainder = ChunkPtrAdjust(av->last_remainder);
2975 for (i = 0; i < NFASTBINS; ++i) {
2977 if (av->fastbins[i]) {
2978 av->fastbins[i] = ChunkPtrAdjust(av->fastbins[i]);
2980 p = av->fastbins[i];
2983 if (i > max_fast_bin)
2988 check_inuse_chunk(p);
2989 total += chunksize(p);
2991 assert(fastbin_index(chunksize(p)) == i);
2993 p->fd = ChunkPtrAdjust(p->fd);
2995 p->bk = ChunkPtrAdjust(p->bk);
3001 assert(have_fastchunks(av));
3002 else if (!have_fastchunks(av))
3005 for (i = 0; i < NBINS*2; i++) {
3007 av->bins[i] = ChunkPtrAdjust(av->bins[i]);
3012 for (i = 1; i < NBINS; ++i) {
3017 binbit = get_binmap(av,i);
3018 empty = last(b) == b;
3025 for (p = last(b); p != b; p = p->bk) {
3027 check_free_chunk(p);
3029 p->fd = ChunkPtrAdjust(p->fd);
3031 p->bk = ChunkPtrAdjust(p->bk);
3032 size = chunksize(p);
3036 idx = bin_index(size);
3039 if ((CHUNK_SIZE_T) size >= (CHUNK_SIZE_T)(FIRST_SORTED_BIN_SIZE)) {
3040 assert(p->bk == b ||
3041 (CHUNK_SIZE_T)chunksize(p->bk) >=
3042 (CHUNK_SIZE_T)chunksize(p));
3046 for (q = next_chunk(p);
3047 (q != av->top && inuse(q) &&
3048 (CHUNK_SIZE_T)(chunksize(q)) >= MINSIZE);
3049 q = next_chunk(q)) {
3050 check_inuse_chunk(q);