contiguous_memory_block.hpp 12.7 KB
Newer Older
1
// Software License for MTL
2
//
3
4
5
6
7
// Copyright (c) 2007 The Trustees of Indiana University.
//               2008 Dresden University of Technology and the Trustees of Indiana University.
//               2010 SimuNova UG (haftungsbeschränkt), www.simunova.com.
// All rights reserved.
// Authors: Peter Gottschling and Andrew Lumsdaine
8
//
9
// This file is part of the Matrix Template Library
10
//
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
// See also license.mtl.txt in the distribution.

#ifndef MTL_CONTIGUOUS_MEMORY_BLOCK_INCLUDE
#define MTL_CONTIGUOUS_MEMORY_BLOCK_INCLUDE

#include <cassert>
#include <algorithm>
#include <boost/static_assert.hpp>
#include <boost/numeric/mtl/mtl_fwd.hpp>
#include <boost/numeric/mtl/utility/tag.hpp>
#include <boost/numeric/mtl/utility/exception.hpp>
#include <boost/numeric/mtl/matrix/dimension.hpp>
#include <boost/numeric/mtl/detail/index.hpp>
#include <boost/numeric/mtl/operation/clone.hpp>



namespace mtl { namespace detail {
using std::size_t;
30

31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
// Macro MTL_ENABLE_ALIGNMENT is by default not set

// Minimal size of memory allocation using alignment
#ifndef MTL_ALIGNMENT_LIMIT
#  define MTL_ALIGNMENT_LIMIT 1024
#endif

// Alignment in memory
#ifndef MTL_ALIGNMENT
#  define MTL_ALIGNMENT 128
#endif


// Size helper for static size
template <unsigned Size>
struct size_helper
{
    typedef size_helper self;

    size_helper() {}
    explicit size_helper(std::size_t size)
    {
	set_size(size);
    }

# ifndef MTL_IGNORE_STATIC_SIZE_VIOLATION
    void set_size(std::size_t MTL_DEBUG_ARG(size))
    {	MTL_DEBUG_THROW_IF(Size != size, change_static_size()); }
# else
    void set_size(std::size_t) {}
# endif

    std::size_t used_memory() const { return Size;  }
    friend void swap(self&, self&) {}
};

// Manage size only if template parameter is 0, i.e. dynamic size
template <>
struct size_helper<0>
{
    typedef size_helper self;

    size_helper(std::size_t size= 0) : my_used_memory(size) {}

    void set_size(std::size_t size)
    {
	my_used_memory= size;
    }

    std::size_t used_memory() const
    {
	return my_used_memory;
    }

85
    friend void swap(self& x, self& y)
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
    {
	std::swap(x.my_used_memory, y.my_used_memory);
    }

  protected:
    std::size_t                               my_used_memory;
};


// Encapsulate behavior of alignment

# ifdef MTL_ENABLE_ALIGNMENT

    template <typename Value>
    struct alignment_helper
    {
	typedef alignment_helper self;

	alignment_helper() : malloc_address(0) {}

	Value* alligned_alloc(std::size_t size)
	{
	    if (size == 0)
		return 0;

	    bool        align= size * sizeof(value_type) >= MTL_ALIGNMENT_LIMIT;
	    std::size_t bytes= size * sizeof(value_type);
113

114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
	    if (align)
		bytes+= MTL_ALIGNMENT - 1;

	    char* p= malloc_address= new char[bytes];
	    if (align)
		while ((long int)(p) % MTL_ALIGNMENT) p++;
		// p+= MTL_ALIGNMENT - (long int)(p) % MTL_ALIGNMENT;

	    return reinterpret_cast<value_type*>(p);
	}

	void aligned_delete(bool is_own, Value*& data)
	{
	    if (is_own && malloc_address) delete[] malloc_address;
	    data= 0;
	}

131
	friend void swap(self& x, self& y)
132
133
134
135
136
	{
	    using std::swap
	    swap(x.malloc_address, y.malloc_address);
	}

137
      private:
138
139
140
141
142
143
144
145
146
147
148
149
150
151
	char*                                     malloc_address;
    };

# else

    template <typename Value>
    struct alignment_helper
    {
	typedef alignment_helper self;

	Value* alligned_alloc(std::size_t size)	{  return size > 0 ? new Value[size] : (Value*)(0); }

	void aligned_delete(bool is_own, Value*& data)
	{
152
	    if (is_own && data != 0) // std::cout << "Delete " << data << '\n',
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
		delete[] data, data= 0;
	}

	friend void swap(self&, self&) {}
    };

# endif


template <typename Value, bool OnStack, unsigned Size>
struct memory_crtp
//    : public contiguous_memory_block<Value, OnStack, Size>
{
    typedef contiguous_memory_block<Value, OnStack, Size> base;

    static bool const                         on_stack= OnStack;
169

170
171
172
173
    typedef Value                             value_type;
    typedef value_type*                       pointer_type;
    typedef const value_type*                 const_pointer_type;

174
    // offset of key (pointer) w.r.t. data
175
    // values must be stored consecutively
176
177
178
    size_t offset(const Value* p) const
    {
      return p - static_cast<const base&>(*this).data;
179
180
181
182
183
    }

    // returns pointer to data
    pointer_type elements()
    {
184
      return static_cast<base&>(*this).data;
185
186
187
    }

    // returns const pointer to data
188
    const_pointer_type elements() const
189
    {
190
      return static_cast<const base&>(*this).data;
191
192
193
194
195
    }

    // returns n-th value in consecutive memory
    // (whatever this means in the corr. matrix format)
    value_type& value_n(size_t offset)
196
197
    {
      return static_cast<base&>(*this).data[offset];
198
199
200
201
    }

    // returns n-th value in consecutive memory
    // (whatever this means in the corr. matrix format)
202
203
204
    const value_type& value_n(size_t offset) const
    {
      return static_cast<const base&>(*this).data[offset];
205
    }
206

207
208
};

209
// OnStack == false -> data on heap
210
211
212
213
214
215
216
217
218
219
220
221
222
223
template <typename Value, bool OnStack, unsigned Size>
struct contiguous_memory_block
    : public size_helper<Size>,
      public alignment_helper<Value>,
      public memory_crtp<Value, OnStack, Size>
{
    typedef Value                             value_type;
    typedef contiguous_memory_block           self;
    typedef size_helper<Size>                 size_base;
    typedef alignment_helper<Value>           alignment_base;
    typedef memory_crtp<Value, OnStack, Size> crtp_base;

    /// Category of memory, determines behaviour
    enum c_t {own,         //< My own memory: allocate and free it
224
	      external,    //< Memory, complete memory block of other item, only reference
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
	      view         //< View of other's memory (e.g. sub-matrix), different construction than external
    };

  private:

    void alloc(std::size_t size)
    {
	category= own;
	this->set_size(size);
	data= this->alligned_alloc(this->used_memory());
    }

    void delete_it()
    {
	this->aligned_delete(category == own, data);
    }

    template <typename Other>
    void copy_construction(const Other& other)
    {
	using std::copy;
	category= own;
247
	// std::cout << "Copied in copy constructor.\n";
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
	alloc(other.used_memory());
	// std::cout << "My address: " << data << ", other address: " << other.data << '\n';
	copy(other.data, other.data + other.used_memory(), data);
    }

    void move_construction(self& other)
    {
	// std::cout << "Data moved in constructor.\n";
	category= own; data= 0;
	swap(*this, other);
    }

    // Copy the arguments of a view (shallowly) and leave original as it is
    void copy_view(const self& other)
    {
	// std::cout << "View copied (shallowly).\n";
	assert(other.category == view);
	category= view;
	this->set_size(other.used_memory());
	data= other.data;
    }

    template <typename Other>
    void copy_assignment(const Other& other)
    {
273
	// std::cout << "Copied in assignment.\n";
274
275
276
277
278
279
280
281
282
	if (this->used_memory() == 0)
	    alloc(other.used_memory());
	MTL_DEBUG_THROW_IF(this->used_memory() != other.used_memory(), incompatible_size());
	std::copy(other.data, other.data + other.used_memory(), data);
    }

  public:
    contiguous_memory_block() : category(own), data(0) {}

283
    explicit contiguous_memory_block(Value *data, std::size_t size, bool is_view= false)
284
	: size_base(size), category(is_view ? view : external), data(data)
285
    {}
286
287
288
289
290
291
292
293
294
295
296

    explicit contiguous_memory_block(std::size_t size) : category(own)
    {
	// std::cout << "Constructor with size.\n";
	alloc(size);
	// std::cout << "New block at " << data << '\n';
    }

    // Default copy constructor
    contiguous_memory_block(const self& other) : size_base(other)
    {
297
	// std::cout << "Copy constructor (same type).\n";
298
299
300
301
302
303
304
305
306
	if (other.category == view)
	    copy_view(other);
	else
	    copy_construction(other);
    }

    // Force copy construction
    contiguous_memory_block(const self& other, clone_ctor)
    {
307
	// std::cout << "(Forced) Copy constructor (same type).\n";
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
	copy_construction(other);
    }

    // Other types must be copied always
    template<typename Value2, bool OnStack2, unsigned Size2>
    explicit contiguous_memory_block(const contiguous_memory_block<Value2, OnStack2, Size2>& other)
    {
	std::cout << "Copy constructor (different type).\n";
	copy_construction(other);
    }

#ifdef MTL_WITH_MOVE
    self& operator=(self&& other)
    {
	move_assignment(other);
	return *this;
    }

    self& operator=(const self& other)
    {
	copy_assignment(other);
	return *this;
    }
#elif defined(MTL_MEMORY_BLOCK_MOVE_EMULATION)
    // Operator takes parameter by value and consumes it
    self& operator=(self other)
    {
	move_assignment(other);
	return *this;
    }
#else
    self& operator=(self other)
    {
	copy_assignment(other);
	return *this;
    }
#endif

    // Same behavior as consuming assignment, to be used by derived classes
protected:
    void move_assignment(self& other)
    {
	// std::cout << "Consuming assignment operator (if same type).\n";
	if (category == own && other.category == own)
	    swap(*this, other);
	else
	    copy_assignment(other);
    }

public:
    template<typename Value2, bool OnStack2, unsigned Size2>
    self& operator=(const contiguous_memory_block<Value2, OnStack2, Size2>& other)
    {
	// std::cout << "Assignment from different array type -> Copy.\n";
	copy_assignment(other);
	return *this;
    }


    void set_view() { category= view; }

    void realloc(std::size_t size)
    {
	if (Size == 0) {

	    // If already have memory of the right size we can keep it
374
	    if (size == this->used_memory())
375
		return;
376
	    MTL_DEBUG_THROW_IF(category != own,
377
378
379
380
			       logic_error("Can't change the size of collections with external memory"));
	    delete_it();
	    alloc(size);
	} else {
381
	    MTL_DEBUG_THROW_IF(size != Size, logic_error("Can't change static size"));
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
	}
    }

    ~contiguous_memory_block()
    {
	//std::cout << "Delete block with address " << data << '\n';
	delete_it();
    }

    friend void swap(self& x, self& y)
    {
	using std::swap;
	swap(x.category, y.category);
	std::swap(x.data, y.data);
	swap(static_cast<size_base&>(x), static_cast<size_base&>(y));
	swap(static_cast<alignment_base&>(x), static_cast<alignment_base&>(y));
398
    }
399
400
401
402
403
404
405

protected:
    enum c_t                                  category;
public:
    Value                                     *data;
};

406
// OnStack == true
407
408
409
410
411
412
413
414
415
416
417
418
419
420
template <typename Value, unsigned Size>
struct contiguous_memory_block<Value, true, Size>
    : public alignment_helper<Value>,
      public memory_crtp<Value, true, Size>
{
    typedef Value                             value_type;
    typedef contiguous_memory_block           self;
    //static bool const                         on_stack= true;

    Value    data[Size];

# ifdef NDEBUG
    contiguous_memory_block() {} // default constructor in release mode
    explicit contiguous_memory_block(std::size_t) {}
421
# else
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
    explicit contiguous_memory_block(std::size_t size= Size)
    {
	MTL_DEBUG_THROW_IF(Size != size, incompatible_size());
    }
# endif

    // Move-semantics ignored for arrays on stack
    contiguous_memory_block(const self& other)
    {
	// std::cout << "Copied in copy constructor (same type).\n";
	std::copy(other.data, other.data+Size, data);
    }


    template<typename Value2, bool OnStack2, unsigned Size2>
    explicit contiguous_memory_block(const contiguous_memory_block<Value2, OnStack2, Size2>& other)
    {
439
	// std::cout << "Copied in copy constructor (different type).\n";
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
	MTL_DEBUG_THROW_IF(Size != other.used_memory(), incompatible_size());
	std::copy(other.data, other.data + other.used_memory(), data);
    }

    self& operator=(const self& other)
    {
	// std::cout << "Assignment from same type.\n";
	std::copy(other.data, other.data+Size, data);
	return *this;
    }

    // For consistency with non-static blocks, to be used by derived classes
protected:
    void move_assignment(self& other)
    {
	std::copy(other.data, other.data+Size, data);
    }

public:
    template<typename Value2, bool OnStack2, unsigned Size2>
    self& operator=(const contiguous_memory_block<Value2, OnStack2, Size2>& other)
    {
	// std::cout << "Assignment from different type.\n";
	MTL_DEBUG_THROW_IF(Size != other.used_memory(), incompatible_size());
	std::copy(other.data, other.data + other.used_memory(), data);
	return *this;
    }


469
    void realloc(std::size_t MTL_DEBUG_ARG(s))
470
471
    {
	// Arrays on stack cannot be reallocated but if the size isn't changed we are fine
472
	assert(s == Size);
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
    }

    std::size_t used_memory() const
    {
	return Size;
    }

  protected:
    enum c_t {own};
    static const c_t category= own;
};


}} // namespace mtl::detail

namespace mtl {
    template <typename Value, bool OnStack, unsigned Size>
    struct is_clonable< detail::contiguous_memory_block<Value, OnStack, Size> > : boost::mpl::bool_<!OnStack> {};
}

#endif // MTL_CONTIGUOUS_MEMORY_BLOCK_INCLUDE