Stan  2.5.0
probability, sampling & optimization
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
stack_alloc.hpp
Go to the documentation of this file.
1 #ifndef STAN__MEMORY__STACK_ALLOC_HPP
2 #define STAN__MEMORY__STACK_ALLOC_HPP
3 
4 #include <cstdlib>
5 #include <cstddef>
6 #include <sstream>
7 #include <stdexcept>
8 #if defined(_MSC_VER)
9  #include <msinttypes.h> // Microsoft Visual Studio lacks compliant stdint.h
10 #else
11  #include <stdint.h> // FIXME: replace with cstddef?
12 #endif
13 #include <vector>
14 #include <stan/meta/likely.hpp>
15 
16 namespace stan {
17 
18  namespace memory {
19 
31  template <typename T>
32  bool is_aligned(T* ptr, unsigned int bytes_aligned) {
33  return (reinterpret_cast<uintptr_t>(ptr) % bytes_aligned) == 0U;
34  }
35 
36 
37  namespace {
38  const size_t DEFAULT_INITIAL_NBYTES = 1 << 16; // 64KB
39 
40 
41  // FIXME: enforce alignment
42  // big fun to inline, but only called twice
43  inline char* eight_byte_aligned_malloc(size_t size) {
44  char* ptr = static_cast<char*>(malloc(size));
45  if (!ptr) return ptr; // malloc failed to alloc
46  if (!is_aligned(ptr,8U)) {
47  std::stringstream s;
48  s << "invalid alignment to 8 bytes, ptr="
49  << reinterpret_cast<uintptr_t>(ptr)
50  << std::endl;
51  throw std::runtime_error(s.str());
52  }
53  return ptr;
54  }
55  }
56 
76  class stack_alloc {
77  private:
78  std::vector<char*> blocks_; // storage for blocks, may be bigger than cur_block_
79  std::vector<size_t> sizes_; // could store initial & shift for others
80  size_t cur_block_; // index into blocks_ for next alloc
81  char* cur_block_end_; // ptr to cur_block_ptr_ + sizes_[cur_block_]
82  char* next_loc_; // ptr to next available spot in cur
83  // block
84  // next three for keeping track of nested allocations on top of stack:
85  std::vector<size_t> nested_cur_blocks_;
86  std::vector<char*> nested_next_locs_;
87  std::vector<char*> nested_cur_block_ends_;
88 
89 
98  char* move_to_next_block(size_t len) {
99  char* result;
100  ++cur_block_;
101  // Find the next block (if any) containing at least len bytes.
102  while ((cur_block_ < blocks_.size()) && (sizes_[cur_block_] < len))
103  ++cur_block_;
104  // Allocate a new block if necessary.
105  if (unlikely(cur_block_ >= blocks_.size())) {
106  // New block should be max(2*size of last block, len) bytes.
107  size_t newsize = sizes_.back() * 2;
108  if (newsize < len)
109  newsize = len;
110  blocks_.push_back(eight_byte_aligned_malloc(newsize));
111  if (!blocks_.back())
112  throw std::bad_alloc();
113  sizes_.push_back(newsize);
114  }
115  result = blocks_[cur_block_];
116  // Get the object's state back in order.
117  next_loc_ = result + len;
118  cur_block_end_ = result + sizes_[cur_block_];
119  return result;
120  }
121 
122  public:
123 
133  stack_alloc(size_t initial_nbytes = DEFAULT_INITIAL_NBYTES) :
134  blocks_(1, eight_byte_aligned_malloc(initial_nbytes)),
135  sizes_(1,initial_nbytes),
136  cur_block_(0),
137  cur_block_end_(blocks_[0] + initial_nbytes),
138  next_loc_(blocks_[0]) {
139  if (!blocks_[0])
140  throw std::bad_alloc(); // no msg allowed in bad_alloc ctor
141  }
142 
150  // free ALL blocks
151  for (size_t i = 0; i < blocks_.size(); ++i)
152  if (blocks_[i])
153  free(blocks_[i]);
154  }
155 
168  inline void* alloc(size_t len) {
169  // Typically, just return and increment the next location.
170  char* result = next_loc_;
171  next_loc_ += len;
172  // Occasionally, we have to switch blocks.
173  if (unlikely(next_loc_ >= cur_block_end_))
174  result = move_to_next_block(len);
175  return (void*)result;
176  }
177 
184  inline void recover_all() {
185  cur_block_ = 0;
186  next_loc_ = blocks_[0];
187  cur_block_end_ = next_loc_ + sizes_[0];
188  }
189 
194  inline void start_nested() {
195  nested_cur_blocks_.push_back(cur_block_);
196  nested_next_locs_.push_back(next_loc_);
197  nested_cur_block_ends_.push_back(cur_block_end_);
198  }
199 
203  inline void recover_nested() {
204  if (unlikely(nested_cur_blocks_.empty()))
205  recover_all();
206 
207  cur_block_ = nested_cur_blocks_.back();
208  nested_cur_blocks_.pop_back();
209 
210  next_loc_ = nested_next_locs_.back();
211  nested_next_locs_.pop_back();
212 
213  cur_block_end_ = nested_cur_block_ends_.back();
214  nested_cur_block_ends_.pop_back();
215  }
216 
222  inline void free_all() {
223  // frees all BUT the first (index 0) block
224  for (size_t i = 1; i < blocks_.size(); ++i)
225  if (blocks_[i])
226  free(blocks_[i]);
227  sizes_.resize(1);
228  blocks_.resize(1);
229  recover_all();
230  }
231 
242  size_t bytes_allocated() {
243  size_t sum = 0;
244  for (size_t i = 0; i <= cur_block_; ++i) {
245  sum += sizes_[i];
246  }
247  return sum;
248  }
249 
250  };
251 
252  }
253 }
254 #endif
size_t bytes_allocated()
Return number of bytes allocated to this instance by the heap.
void free_all()
Free all memory used by the stack allocator other than the initial block allocation back to the syste...
An instance of this class provides a memory pool through which blocks of raw memory may be allocated ...
Definition: stack_alloc.hpp:76
fvar< T > sum(const Eigen::Matrix< fvar< T >, R, C > &m)
Definition: sum.hpp:14
#define unlikely(x)
Definition: likely.hpp:9
~stack_alloc()
Destroy this memory allocator.
void * alloc(size_t len)
Return a newly allocated block of memory of the appropriate size managed by the stack allocator...
int size(const std::vector< T > &x)
Definition: size.hpp:11
bool is_aligned(T *ptr, unsigned int bytes_aligned)
Return true if the specified pointer is aligned on the number of bytes.
Definition: stack_alloc.hpp:32
void start_nested()
Store current positions before doing nested operation so can recover back to start.
void recover_nested()
recover memory back to the last start_nested call.
stack_alloc(size_t initial_nbytes=DEFAULT_INITIAL_NBYTES)
Construct a resizable stack allocator initially holding the specified number of bytes.
void recover_all()
Recover all the memory used by the stack allocator.

     [ Stan Home Page ] © 2011–2014, Stan Development Team.