1 /*
2   This is a version (aka dlmalloc) of malloc/free/realloc written by
3   Doug Lea and released to the public domain.  Use, modify, and
4   redistribute this code without permission or acknowledgement in any
5   way you wish.  Send questions, comments, complaints, performance
6   data, etc to dl@cs.oswego.edu
7 
8   VERSION 2.7.2 Sat Aug 17 09:07:30 2002  Doug Lea  (dl at gee)
9 
10   Note: There may be an updated version of this malloc obtainable at
11            ftp://gee.cs.oswego.edu/pub/misc/malloc.c
12   Check before installing!
13 
14   Hacked up for uClibc by Erik Andersen <andersen@codepoet.org>
15 */
16 
17 #include <features.h>
18 #include <stddef.h>
19 #include <unistd.h>
20 #include <errno.h>
21 #include <string.h>
22 #include "malloc.h"
23 
24 
25 /* ------------------------------ memalign ------------------------------ */
memalign(size_t alignment,size_t bytes)26 void* memalign(size_t alignment, size_t bytes)
27 {
28     size_t nb;             /* padded  request size */
29     char*           m;              /* memory returned by malloc call */
30     mchunkptr       p;              /* corresponding chunk */
31     char*           _brk;            /* alignment point within p */
32     mchunkptr       newp;           /* chunk to return */
33     size_t newsize;        /* its size */
34     size_t leadsize;       /* leading space before alignment point */
35     mchunkptr       remainder;      /* spare room at end to split off */
36     unsigned long    remainder_size; /* its size */
37     size_t size;
38     void *retval;
39 
40     /* If need less alignment than we give anyway, just relay to malloc */
41 
42     if (alignment <= MALLOC_ALIGNMENT) return malloc(bytes);
43 
44     /* Otherwise, ensure that it is at least a minimum chunk size */
45 
46     if (alignment <  MINSIZE) alignment = MINSIZE;
47 
48     /* Make sure alignment is power of 2 (in case MINSIZE is not).  */
49     if ((alignment & (alignment - 1)) != 0) {
50 	size_t a = MALLOC_ALIGNMENT * 2;
51 	while ((unsigned long)a < (unsigned long)alignment) a <<= 1;
52 	alignment = a;
53     }
54 
55     checked_request2size(bytes, nb);
56     __MALLOC_LOCK;
57 
58     /* Strategy: find a spot within that chunk that meets the alignment
59      * request, and then possibly free the leading and trailing space.  */
60 
61 
62     /* Call malloc with worst case padding to hit alignment. */
63 
64     m  = (char*)(malloc(nb + alignment + MINSIZE));
65 
66     if (m == 0) {
67 	retval = 0; /* propagate failure */
68 	goto DONE;
69     }
70 
71     p = mem2chunk(m);
72 
73     if ((((unsigned long)(m)) % alignment) != 0) { /* misaligned */
74 
75 	/*
76 	   Find an aligned spot inside chunk.  Since we need to give back
77 	   leading space in a chunk of at least MINSIZE, if the first
78 	   calculation places us at a spot with less than MINSIZE leader,
79 	   we can move to the next aligned spot -- we've allocated enough
80 	   total room so that this is always possible.
81 	   */
82 
83 	_brk = (char*)mem2chunk((unsigned long)(((unsigned long)(m + alignment - 1)) &
84 		    -((signed long) alignment)));
85 	if ((unsigned long)(_brk - (char*)(p)) < MINSIZE)
86 	    _brk += alignment;
87 
88 	newp = (mchunkptr)_brk;
89 	leadsize = _brk - (char*)(p);
90 	newsize = chunksize(p) - leadsize;
91 
92 	/* For mmapped chunks, just adjust offset */
93 	if (chunk_is_mmapped(p)) {
94 	    newp->prev_size = p->prev_size + leadsize;
95 	    set_head(newp, newsize|IS_MMAPPED);
96 	    retval = chunk2mem(newp);
97 	    goto DONE;
98 	}
99 
100 	/* Otherwise, give back leader, use the rest */
101 	set_head(newp, newsize | PREV_INUSE);
102 	set_inuse_bit_at_offset(newp, newsize);
103 	set_head_size(p, leadsize);
104 	free(chunk2mem(p));
105 	p = newp;
106 
107 	assert (newsize >= nb &&
108 		(((unsigned long)(chunk2mem(p))) % alignment) == 0);
109     }
110 
111     /* Also give back spare room at the end */
112     if (!chunk_is_mmapped(p)) {
113 	size = chunksize(p);
114 	if ((unsigned long)(size) > (unsigned long)(nb + MINSIZE)) {
115 	    remainder_size = size - nb;
116 	    remainder = chunk_at_offset(p, nb);
117 	    set_head(remainder, remainder_size | PREV_INUSE);
118 	    set_head_size(p, nb);
119 	    free(chunk2mem(remainder));
120 	}
121     }
122 
123     check_inuse_chunk(p);
124     retval = chunk2mem(p);
125 
126  DONE:
127     __MALLOC_UNLOCK;
128 	return retval;
129 }
130 weak_alias(memalign, aligned_alloc)
131 libc_hidden_def(memalign)
132 /* glibc compatibilty  */
133 weak_alias(memalign, __libc_memalign)
134