00001 #include <fixed_size_allocator.hpp>
00002 #include <common.hpp>
00003 #include <algorithm>
00004 #include <cassert>
00005 #include <boost/bind.hpp>
00006
00007 using namespace GCpp;
00008 using namespace std;
00009
00010 namespace
00011 {
00012
00013 GCPP_MUTEX global_allocators_mutex;
00014
00015 }
00016
00017 IntrusiveList<FixedSizeAllocator> FixedSizeAllocatorManager::f_allocators;
00018
00019 bool FixedSizeAllocatorManager::contains (void* p_block)
00020 {
00021 return find_if (f_allocators.begin (), f_allocators.end (), boost::bind (&FixedSizeAllocator::contains, _1, p_block)) != f_allocators.end ();
00022 }
00023
00024 void FixedSizeAllocatorManager::register_allocator (FixedSizeAllocator* p_item)
00025 {
00026 f_allocators.push_back (p_item);
00027 }
00028
00029 void FixedSizeAllocatorManager::forget_allocator (FixedSizeAllocator* p_item)
00030 {
00031 GCPP_LOCK lck (global_allocators_mutex);
00032 f_allocators.remove (p_item);
00033 }
00034
00035 namespace
00036 {
00037
00038 struct Bucket
00039 {
00040 Byte f_available;
00041 Byte f_free;
00042 Byte* f_data;
00043
00044 Bucket (size_t p_block_size, Byte* p_data);
00045 void* allocate (size_t p_block_size);
00046 void release (void* p_ptr, size_t p_block_size);
00047 bool validate (size_t p_block_size);
00048 };
00049
00050 inline
00051 Bucket* BUCKET (void* p_ptr)
00052 {
00053 return static_cast<Bucket*> (p_ptr);
00054 }
00055
00056 inline
00057 Byte* BYTE (void* p_ptr)
00058 {
00059 return static_cast<Byte*> (p_ptr);
00060 }
00061
00062 Bucket::Bucket (size_t p_block_size, Byte* p_data)
00063 : f_available (255),
00064 f_free (0),
00065 f_data (p_data)
00066 {
00067 Byte blk (0);
00068
00069 for (Byte* ptr (f_data); blk < 255; ptr += p_block_size)
00070 *ptr = ++blk;
00071
00072 assert (validate (p_block_size));
00073 }
00074
00075 void* Bucket::allocate (size_t p_block_size)
00076 {
00077 assert (validate (p_block_size));
00078
00079 if (f_available == 0)
00080 return 0;
00081
00082 Byte* ret (f_data + f_free * p_block_size);
00083 f_free = *ret;
00084 --f_available;
00085
00086 assert (f_free != 255 or f_available == 0);
00087 assert (validate (p_block_size));
00088
00089 return ret;
00090 }
00091
00092 void Bucket::release (void* p_ptr, size_t p_block_size)
00093 {
00094 assert (p_ptr >= f_data);
00095 assert (p_ptr < f_data + 255 * p_block_size);
00096 assert (validate (p_block_size));
00097
00098 Byte* rel (BYTE (p_ptr));
00099
00100 *rel = f_free;
00101 f_free = static_cast<Byte> ((rel - f_data) / p_block_size);
00102
00103 assert (f_free == (rel - f_data) / p_block_size);
00104 assert (f_free != 255);
00105
00106 ++f_available;
00107
00108 assert (validate (p_block_size));
00109 }
00110
00111 bool Bucket::validate (size_t p_block_size)
00112 {
00113 Byte chk (f_free);
00114 Byte cnt (f_available);
00115
00116 while (cnt--)
00117 {
00118 if (chk == 255)
00119 return false;
00120
00121 chk = *(f_data + chk * p_block_size);
00122 }
00123
00124 return true;
00125 }
00126
00127 bool sort_buckets (void* p_bucket_1, void* p_bucket_2)
00128 {
00129 return BUCKET (p_bucket_1)->f_data < BUCKET (p_bucket_2)->f_data;
00130 }
00131
00132 bool bounding_bucket (void* p_bucket, void* p_ptr, size_t p_block_size)
00133 {
00134 return BYTE (p_ptr) > BUCKET (p_bucket)->f_data + (p_block_size << 8);
00135 }
00136
00137 bool containing_bucket (void* p_bucket, void* p_ptr, size_t p_block_size)
00138 {
00139 return BYTE (p_ptr) >= BUCKET (p_bucket)->f_data and BYTE (p_ptr) < BUCKET (p_bucket)->f_data + (p_block_size << 8);
00140 }
00141
00142 bool lesser_bucket (void* p_bucket, void* p_ptr, size_t p_block_size)
00143 {
00144 return BUCKET (p_bucket)->f_data + (p_block_size << 8) < BYTE (p_ptr);
00145 }
00146
00147 void check_bucket (void* p_bucket, size_t p_block_size)
00148 {
00149 assert (BUCKET (p_bucket)->validate (p_block_size));
00150 }
00151
00152 inline
00153 Bucket* create_bucket (Byte* p_base, size_t p_item, size_t p_block_size)
00154 {
00155 Byte* dat (p_base + p_item * (sizeof (Bucket) + (p_block_size << 8)));
00156 return new (dat) Bucket (p_block_size, dat + sizeof (Bucket));
00157 }
00158
00159 inline
00160 Byte* allocate_bucket_space (size_t p_block_size, size_t p_count)
00161 {
00162 return OS_malloc (p_count * (sizeof (Bucket) + (p_block_size << 8)));
00163 }
00164
00165 }
00166
00167 FixedSizeAllocator::FixedSizeAllocator (size_t p_size, bool p_make_known)
00168 : f_block_size (p_size)
00169 {
00170 size_t bks (10000 / (p_size * p_size));
00171 bks = bks ? bks : 1;
00172
00173 f_buckets.resize (bks);
00174 Byte* dat (allocate_bucket_space (f_block_size, bks));
00175 f_kill_list.push_back (dat);
00176
00177 for (size_t idx (0); idx < f_buckets.size (); ++idx)
00178 {
00179 f_buckets[idx] = create_bucket (dat, idx, f_block_size);
00180 f_free_list.push_back (f_buckets[idx]);
00181 }
00182
00183 std::sort (f_buckets.begin (), f_buckets.end (), sort_buckets);
00184
00185 f_last_free = f_buckets[0];
00186 f_last_malloc = f_buckets[0];
00187
00188 if (p_make_known)
00189 FixedSizeAllocatorManager::register_allocator (this);
00190 else
00191 {
00192 next = 0;
00193 prior = 0;
00194 }
00195 }
00196
00197 FixedSizeAllocator::~FixedSizeAllocator ()
00198 {
00199 FixedSizeAllocatorManager::forget_allocator (this);
00200
00201 std::list<void*>::iterator itr (f_kill_list.begin ());
00202 std::list<void*>::iterator end (f_kill_list.end ());
00203
00204 for (; itr != end; ++itr)
00205 OS_free (static_cast<Byte*> (*itr));
00206 }
00207
00208 void* FixedSizeAllocator::find_allocation_bucket ()
00209 {
00210 while (not f_free_list.empty ())
00211 {
00212 void* ret (BUCKET (f_free_list.back ())->allocate (f_block_size));
00213
00214 if (ret)
00215 {
00216 f_last_malloc = f_free_list.back ();
00217 return ret;
00218 }
00219 else
00220 f_free_list.pop_back ();
00221 }
00222
00223 Byte* dat (allocate_bucket_space (f_block_size, 1));
00224 f_kill_list.push_back (dat);
00225 f_last_malloc = create_bucket (dat, 0, f_block_size);
00226 f_buckets.insert (std::lower_bound (f_buckets.begin (), f_buckets.end (), f_last_malloc, sort_buckets), f_last_malloc);
00227 f_free_list.push_back (f_last_malloc);
00228
00229 return BUCKET (f_last_malloc)->allocate (f_block_size);
00230 }
00231
00232 bool FixedSizeAllocator::valid_pointer (void* p_ptr)
00233 {
00234 return std::find (f_buckets.begin (), f_buckets.end (), p_ptr) != f_buckets.end ();
00235 }
00236
00237 void* FixedSizeAllocator::malloc ()
00238 {
00239 void* ret (BUCKET (f_last_free)->allocate (f_block_size));
00240
00241 if (not ret)
00242 {
00243 f_free_list.remove (f_last_free);
00244 ret = BUCKET (f_last_malloc)->allocate (f_block_size);
00245 }
00246
00247 if (not ret)
00248 {
00249 f_free_list.remove (f_last_malloc);
00250 ret = find_allocation_bucket ();
00251 }
00252
00253 return ret;
00254 }
00255
00256 void FixedSizeAllocator::free (void* p_ptr)
00257 {
00258 if (containing_bucket (f_last_malloc, p_ptr, f_block_size))
00259 f_last_free = f_last_malloc;
00260 else if (!containing_bucket (f_last_free, p_ptr, f_block_size))
00261 f_last_free = *std::lower_bound (f_buckets.begin (), f_buckets.end (), p_ptr, boost::bind (bounding_bucket, _1, _2, f_block_size));
00262
00263 assert (valid_pointer (f_last_free));
00264 assert (containing_bucket (f_last_free, p_ptr, f_block_size));
00265
00266 bool add (BUCKET (f_last_free)->f_available == 0);
00267 BUCKET (f_last_free)->release (p_ptr, f_block_size);
00268
00269 if (add)
00270 f_free_list.push_back (f_last_free);
00271 }
00272
00273 void FixedSizeAllocator::compact ()
00274 {
00275 size_t bks (10000 / (f_block_size * f_block_size));
00276
00277 list<void*>::iterator itr (f_kill_list.begin ());
00278 list<void*>::iterator end (f_kill_list.end ());
00279
00280 for (++itr; itr != end; )
00281 {
00282 size_t idx (0);
00283
00284 for (; idx < bks; ++idx)
00285 {
00286 if (BUCKET (static_cast<Byte*> (*itr) + idx * ((f_block_size << 8) + sizeof (Bucket)))->f_available < 255)
00287 break;
00288 }
00289
00290 if (idx == bks)
00291 {
00292 OS_free (static_cast<Byte*> (*itr));
00293 itr = f_kill_list.erase (itr);
00294 }
00295 else
00296 ++itr;
00297 }
00298 }
00299
00300 bool FixedSizeAllocator::contains (void* p_block)
00301 {
00302 vector<void*>::iterator itr (lower_bound (f_buckets.begin (), f_buckets.end (), p_block, boost::bind (lesser_bucket, _1, _2, f_block_size)));
00303
00304 if (itr != f_buckets.end () and containing_bucket (*itr, p_block, f_block_size))
00305 return true;
00306
00307 return false;
00308 }
00309
00310 void* FixedSizeAllocator::internal_malloc ()
00311 {
00312 void* ret (BUCKET (f_last_free)->allocate (f_block_size));
00313
00314 if (not ret)
00315 {
00316 f_free_list.remove (f_last_free);
00317 ret = BUCKET (f_last_malloc)->allocate (f_block_size);
00318 }
00319
00320 if (not ret)
00321 {
00322 f_free_list.remove (f_last_malloc);
00323
00324 while (not f_free_list.empty ())
00325 {
00326 ret = BUCKET (f_free_list.back ())->allocate (f_block_size);
00327
00328 if (ret)
00329 {
00330 f_last_malloc = f_free_list.back ();
00331 return ret;
00332 }
00333 else
00334 f_free_list.pop_back ();
00335 }
00336 }
00337
00338 return ret;
00339 }
00340
00341 void* FixedSizeAllocator::internal_add_block ()
00342 {
00343 size_t bks (10000 / (f_block_size * f_block_size));
00344 bks = bks ? bks : 1;
00345 Byte* dat (allocate_bucket_space (f_block_size, bks));
00346 f_kill_list.push_back (dat);
00347
00348 for (size_t idx (0); idx < bks; ++idx)
00349 {
00350 f_last_malloc = create_bucket (dat, idx, f_block_size);
00351 f_buckets.insert (std::lower_bound (f_buckets.begin (), f_buckets.end (), f_last_malloc, sort_buckets), f_last_malloc);
00352 f_free_list.push_back (f_last_malloc);
00353 }
00354
00355 void* ret (BUCKET (f_last_malloc)->allocate (f_block_size));
00356 return ret;
00357 }