1/* 2 * Copyright 2018 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8#include "include/private/GrTypesPriv.h" 9#include "src/gpu/GrGpuResourcePriv.h" 10#include "src/gpu/GrStagingBufferManager.h" 11#include "src/gpu/mtl/GrMtlBuffer.h" 12#include "src/gpu/mtl/GrMtlCommandBuffer.h" 13#include "src/gpu/mtl/GrMtlGpu.h" 14 15#if !__has_feature(objc_arc) 16#error This file must be compiled with Arc. Use -fobjc-arc flag 17#endif 18 19#ifdef SK_DEBUG 20#define VALIDATE() this->validate() 21#else 22#define VALIDATE() do {} while(false) 23#endif 24 25GR_NORETAIN_BEGIN 26 27#ifdef SK_ENABLE_MTL_DEBUG_INFO 28NSString* kBufferTypeNames[kGrGpuBufferTypeCount] = { 29 @"Vertex", 30 @"Index", 31 @"Indirect", 32 @"Xfer CPU to GPU", 33 @"Xfer GPU to CPU", 34 @"Uniform", 35}; 36#endif 37 38sk_sp<GrMtlBuffer> GrMtlBuffer::Make(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType, 39 GrAccessPattern accessPattern, const void* data) { 40 sk_sp<GrMtlBuffer> buffer(new GrMtlBuffer(gpu, size, intendedType, accessPattern)); 41 if (data && !buffer->onUpdateData(data, size)) { 42 return nullptr; 43 } 44 return buffer; 45} 46 47GrMtlBuffer::GrMtlBuffer(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType, 48 GrAccessPattern accessPattern) 49 : INHERITED(gpu, size, intendedType, accessPattern) 50 , fIsDynamic(accessPattern != kStatic_GrAccessPattern) { 51 NSUInteger options = 0; 52 if (@available(macOS 10.11, iOS 9.0, *)) { 53 if (fIsDynamic) { 54#ifdef SK_BUILD_FOR_MAC 55 if (gpu->mtlCaps().isMac()) { 56 options |= MTLResourceStorageModeManaged; 57 } else { 58 options |= MTLResourceStorageModeShared; 59 } 60#else 61 options |= MTLResourceStorageModeShared; 62#endif 63 } else { 64 options |= MTLResourceStorageModePrivate; 65 } 66 } 67 68 size = SkAlignTo(size, gpu->mtlCaps().getMinBufferAlignment()); 69 fMtlBuffer = size == 0 ? nil : 70 [gpu->device() newBufferWithLength: size 71 options: options]; 72#ifdef SK_ENABLE_MTL_DEBUG_INFO 73 fMtlBuffer.label = kBufferTypeNames[(int)intendedType]; 74#endif 75 this->registerWithCache(SkBudgeted::kYes); 76 VALIDATE(); 77} 78 79GrMtlBuffer::~GrMtlBuffer() { 80 SkASSERT(!fMtlBuffer); 81 SkASSERT(!fMapPtr); 82} 83 84bool GrMtlBuffer::onUpdateData(const void* src, size_t sizeInBytes) { 85 if (this->wasDestroyed()) { 86 return false; 87 } 88 89 if (sizeInBytes > this->size()) { 90 return false; 91 } 92 93 if (fIsDynamic) { 94 this->internalMap(sizeInBytes); 95 if (!fMapPtr) { 96 return false; 97 } 98 memcpy(fMapPtr, src, sizeInBytes); 99 this->internalUnmap(sizeInBytes); 100 } else { 101 // copy data to gpu buffer 102 GrStagingBufferManager::Slice slice; 103 slice = this->mtlGpu()->stagingBufferManager()->allocateStagingBufferSlice( 104 sizeInBytes, this->mtlGpu()->mtlCaps().getMinBufferAlignment()); 105 if (!slice.fBuffer) { 106 return false; 107 } 108 memcpy(slice.fOffsetMapPtr, src, sizeInBytes); 109 110 GrMtlCommandBuffer* cmdBuffer = this->mtlGpu()->commandBuffer(); 111 id<MTLBlitCommandEncoder> GR_NORETAIN blitCmdEncoder = cmdBuffer->getBlitCommandEncoder(); 112 if (!blitCmdEncoder) { 113 return false; 114 } 115 GrMtlBuffer* mtlBuffer = static_cast<GrMtlBuffer*>(slice.fBuffer); 116 id<MTLBuffer> transferBuffer = mtlBuffer->mtlBuffer(); 117 [blitCmdEncoder copyFromBuffer: transferBuffer 118 sourceOffset: slice.fOffset 119 toBuffer: fMtlBuffer 120 destinationOffset: 0 121 size: sizeInBytes]; 122 } 123 124 return true; 125} 126 127inline GrMtlGpu* GrMtlBuffer::mtlGpu() const { 128 SkASSERT(!this->wasDestroyed()); 129 return static_cast<GrMtlGpu*>(this->getGpu()); 130} 131 132void GrMtlBuffer::onAbandon() { 133 fMtlBuffer = nil; 134 fMapPtr = nullptr; 135 VALIDATE(); 136 INHERITED::onAbandon(); 137} 138 139void GrMtlBuffer::onRelease() { 140 if (!this->wasDestroyed()) { 141 VALIDATE(); 142 fMtlBuffer = nil; 143 fMapPtr = nullptr; 144 VALIDATE(); 145 } 146 INHERITED::onRelease(); 147} 148 149void GrMtlBuffer::internalMap(size_t sizeInBytes) { 150 if (fIsDynamic) { 151 VALIDATE(); 152 SkASSERT(sizeInBytes <= this->size()); 153 SkASSERT(!this->isMapped()); 154 fMapPtr = static_cast<char*>(fMtlBuffer.contents); 155 VALIDATE(); 156 } 157} 158 159void GrMtlBuffer::internalUnmap(size_t sizeInBytes) { 160 SkASSERT(fMtlBuffer); 161 if (fIsDynamic) { 162 VALIDATE(); 163 SkASSERT(sizeInBytes <= this->size()); 164 SkASSERT(this->isMapped()); 165#ifdef SK_BUILD_FOR_MAC 166 if (this->mtlGpu()->mtlCaps().isMac()) { 167 [fMtlBuffer didModifyRange: NSMakeRange(0, sizeInBytes)]; 168 } 169#endif 170 fMapPtr = nullptr; 171 } 172} 173 174void GrMtlBuffer::onMap() { 175 if (!this->wasDestroyed()) { 176 this->internalMap(this->size()); 177 } 178} 179 180void GrMtlBuffer::onUnmap() { 181 if (!this->wasDestroyed()) { 182 this->internalUnmap(this->size()); 183 } 184} 185 186#ifdef SK_DEBUG 187void GrMtlBuffer::validate() const { 188 SkASSERT(fMtlBuffer == nil || 189 this->intendedType() == GrGpuBufferType::kVertex || 190 this->intendedType() == GrGpuBufferType::kIndex || 191 this->intendedType() == GrGpuBufferType::kXferCpuToGpu || 192 this->intendedType() == GrGpuBufferType::kXferGpuToCpu || 193 this->intendedType() == GrGpuBufferType::kDrawIndirect || 194 this->intendedType() == GrGpuBufferType::kUniform); 195 SkASSERT((fMapPtr && fMtlBuffer) || !fMapPtr); 196} 197#endif 198 199GR_NORETAIN_END 200