/third_party/skia/src/gpu/ |
H A D | GrMemoryPool.cpp | 62 static_assert(alignof(Header) <= kAlignment); in allocate() 65 SkBlockAllocator::ByteRange alloc = fAllocator.allocate<kAlignment, sizeof(Header)>(size); in allocate() 111 SkBlockAllocator::Block* block = fAllocator.owningBlock<kAlignment>(header, header->fStart); in release()
|
H A D | GrMemoryPool.h | 31 inline static constexpr size_t kAlignment = 8; member in GrMemoryPool 34 inline static constexpr size_t kAlignment = alignof(std::max_align_t);
|
/third_party/skia/third_party/externals/abseil-cpp/absl/random/internal/ |
H A D | pool_urbg.cc | 191 constexpr size_t kAlignment = in PoolAlignedAlloc() local 198 new char[sizeof(RandenPoolEntry) + kAlignment]); in PoolAlignedAlloc() 199 auto y = x % kAlignment; in PoolAlignedAlloc() 200 void* aligned = reinterpret_cast<void*>(y == 0 ? x : (x + kAlignment - y)); in PoolAlignedAlloc()
|
/third_party/skia/bench/ |
H A D | GrMemoryPoolBench.cpp | 17 // sizeof is a multiple of GrMemoryPool::kAlignment for 4, 8, or 16 byte alignment 18 struct alignas(GrMemoryPool::kAlignment) Aligned { 22 static_assert(sizeof(Aligned) % GrMemoryPool::kAlignment == 0); member in __anon18263::GrMemoryPool 24 // sizeof is not a multiple of GrMemoryPool::kAlignment (will not be a multiple of max_align_t 30 static_assert(sizeof(Unaligned) % GrMemoryPool::kAlignment != 0); member in __anon18263::GrMemoryPool 33 static_assert(SkAlignTo(sizeof(Unaligned), GrMemoryPool::kAlignment) == sizeof(Aligned));
|
/third_party/node/deps/v8/src/heap/cppgc/ |
H A D | object-allocator.h | 152 constexpr size_t kAlignment = 2 * kAllocationGranularity; in AllocateObjectOnSpace() local 153 constexpr size_t kAlignmentMask = kAlignment - 1; in AllocateObjectOnSpace() 154 constexpr size_t kPaddingSize = kAlignment - sizeof(HeapObjectHeader); in AllocateObjectOnSpace()
|
/third_party/skia/third_party/externals/abseil-cpp/absl/synchronization/internal/ |
H A D | create_thread_identity.cc | 111 // PerThreadSynch::kAlignment. This space is never released (it is in NewThreadIdentity() 114 sizeof(*identity) + base_internal::PerThreadSynch::kAlignment - 1); in NewThreadIdentity() 118 base_internal::PerThreadSynch::kAlignment)); in NewThreadIdentity()
|
/third_party/skia/third_party/externals/dawn/src/dawn_native/vulkan/ |
H A D | BufferVk.cpp | 142 constexpr size_t kAlignment = 4u; in Initialize() local 164 if (size > std::numeric_limits<uint64_t>::max() - kAlignment) { in Initialize() 168 mAllocatedSize = Align(size, kAlignment); in Initialize()
|
/third_party/gn/src/gn/ |
H A D | immutable_vector.h | 216 static constexpr size_t kAlignment = alignof(T) > alignof(Header) member in ImmutableVector 220 using Allocator = AlignedAlloc<kAlignment>;
|
/third_party/skia/third_party/externals/abseil-cpp/absl/base/internal/ |
H A D | thread_identity_test.cc | 58 PerThreadSynch::kAlignment); in TestThreadIdentityCurrent()
|
H A D | thread_identity.h | 56 static constexpr int kAlignment = 1 << kLowZeroBits; member 136 // alignment of PerThreadSynch::kAlignment. 140 // PerThreadSynch::kAlignment aligned. We provide this alignment on
|
/third_party/node/deps/v8/src/heap/ |
H A D | basic-memory-chunk.h | 127 static const intptr_t kAlignment = member in v8::internal::BasicMemoryChunk 130 static const intptr_t kAlignmentMask = kAlignment - 1;
|
H A D | memory-allocator.cc | 305 // +----------------------------+<- base aligned at MemoryChunk::kAlignment in ComputeChunkSize() 325 // +----------------------------+<- base aligned at MemoryChunk::kAlignment in ComputeChunkSize() 350 MemoryChunk::kAlignment); in AllocateUninitializedChunk() 358 AllocateAlignedMemory(chunk_size, area_size, MemoryChunk::kAlignment, in AllocateUninitializedChunk()
|
/third_party/skia/third_party/externals/abseil-cpp/absl/synchronization/ |
H A D | mutex.cc | 672 static_assert(PerThreadSynch::kAlignment > kMuLow, 673 "PerThreadSynch::kAlignment must be greater than kMuLow"); 2435 static_assert(PerThreadSynch::kAlignment > kCvLow, 2436 "PerThreadSynch::kAlignment must be greater than kCvLow");
|