1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/mock/GrMockGpu.h"
9
10 #include "src/gpu/GrThreadSafePipelineBuilder.h"
11 #include "src/gpu/mock/GrMockAttachment.h"
12 #include "src/gpu/mock/GrMockBuffer.h"
13 #include "src/gpu/mock/GrMockCaps.h"
14 #include "src/gpu/mock/GrMockOpsRenderPass.h"
15 #include "src/gpu/mock/GrMockTexture.h"
16
17 #include <atomic>
18
NextInternalTextureID()19 int GrMockGpu::NextInternalTextureID() {
20 static std::atomic<int> nextID{1};
21 int id;
22 do {
23 id = nextID.fetch_add(1, std::memory_order_relaxed);
24 } while (0 == id); // Reserve 0 for an invalid ID.
25 return id;
26 }
27
NextExternalTextureID()28 int GrMockGpu::NextExternalTextureID() {
29 // We use negative ints for the "testing only external textures" so they can easily be
30 // identified when debugging.
31 static std::atomic<int> nextID{-1};
32 return nextID.fetch_add(-1, std::memory_order_relaxed);
33 }
34
NextInternalRenderTargetID()35 int GrMockGpu::NextInternalRenderTargetID() {
36 // We start off with large numbers to differentiate from texture IDs, even though they're
37 // technically in a different space.
38 static std::atomic<int> nextID{SK_MaxS32};
39 return nextID.fetch_add(-1, std::memory_order_relaxed);
40 }
41
NextExternalRenderTargetID()42 int GrMockGpu::NextExternalRenderTargetID() {
43 // We use large negative ints for the "testing only external render targets" so they can easily
44 // be identified when debugging.
45 static std::atomic<int> nextID{SK_MinS32};
46 return nextID.fetch_add(1, std::memory_order_relaxed);
47 }
48
Make(const GrMockOptions* mockOptions, const GrContextOptions& contextOptions, GrDirectContext* direct)49 sk_sp<GrGpu> GrMockGpu::Make(const GrMockOptions* mockOptions,
50 const GrContextOptions& contextOptions, GrDirectContext* direct) {
51 static const GrMockOptions kDefaultOptions = GrMockOptions();
52 if (!mockOptions) {
53 mockOptions = &kDefaultOptions;
54 }
55 return sk_sp<GrGpu>(new GrMockGpu(direct, *mockOptions, contextOptions));
56 }
57
onGetOpsRenderPass(GrRenderTarget* rt, bool , GrAttachment*, GrSurfaceOrigin origin, const SkIRect& bounds, const GrOpsRenderPass::LoadAndStoreInfo& colorInfo, const GrOpsRenderPass::StencilLoadAndStoreInfo&, const SkTArray<GrSurfaceProxy*,true>& sampledProxies, GrXferBarrierFlags renderPassXferBarriers)58 GrOpsRenderPass* GrMockGpu::onGetOpsRenderPass(GrRenderTarget* rt,
59 bool /*useMSAASurface*/,
60 GrAttachment*,
61 GrSurfaceOrigin origin,
62 const SkIRect& bounds,
63 const GrOpsRenderPass::LoadAndStoreInfo& colorInfo,
64 const GrOpsRenderPass::StencilLoadAndStoreInfo&,
65 const SkTArray<GrSurfaceProxy*,true>& sampledProxies,
66 GrXferBarrierFlags renderPassXferBarriers) {
67 return new GrMockOpsRenderPass(this, rt, origin, colorInfo);
68 }
69
submit(GrOpsRenderPass* renderPass)70 void GrMockGpu::submit(GrOpsRenderPass* renderPass) {
71 for (int i = 0; i < static_cast<GrMockOpsRenderPass*>(renderPass)->numDraws(); ++i) {
72 fStats.incNumDraws();
73 }
74 delete renderPass;
75 }
76
GrMockGpu(GrDirectContext* direct, const GrMockOptions& options, const GrContextOptions& contextOptions)77 GrMockGpu::GrMockGpu(GrDirectContext* direct, const GrMockOptions& options,
78 const GrContextOptions& contextOptions)
79 : INHERITED(direct)
80 , fMockOptions(options) {
81 this->initCapsAndCompiler(sk_make_sp<GrMockCaps>(contextOptions, options));
82 }
83
~GrMockGpu()84 GrMockGpu::~GrMockGpu() {}
85
pipelineBuilder()86 GrThreadSafePipelineBuilder* GrMockGpu::pipelineBuilder() {
87 return nullptr;
88 }
89
refPipelineBuilder()90 sk_sp<GrThreadSafePipelineBuilder> GrMockGpu::refPipelineBuilder() {
91 return nullptr;
92 }
93
onCreateTexture(SkISize dimensions, const GrBackendFormat& format, GrRenderable renderable, int renderTargetSampleCnt, SkBudgeted budgeted, GrProtected isProtected, int mipLevelCount, uint32_t levelClearMask)94 sk_sp<GrTexture> GrMockGpu::onCreateTexture(SkISize dimensions,
95 const GrBackendFormat& format,
96 GrRenderable renderable,
97 int renderTargetSampleCnt,
98 SkBudgeted budgeted,
99 GrProtected isProtected,
100 int mipLevelCount,
101 uint32_t levelClearMask) {
102 if (fMockOptions.fFailTextureAllocations) {
103 return nullptr;
104 }
105
106 // Compressed formats should go through onCreateCompressedTexture
107 SkASSERT(format.asMockCompressionType() == SkImage::CompressionType::kNone);
108
109 GrColorType ct = format.asMockColorType();
110 SkASSERT(ct != GrColorType::kUnknown);
111
112 GrMipmapStatus mipmapStatus =
113 mipLevelCount > 1 ? GrMipmapStatus::kDirty : GrMipmapStatus::kNotAllocated;
114 GrMockTextureInfo texInfo(ct, SkImage::CompressionType::kNone, NextInternalTextureID());
115 if (renderable == GrRenderable::kYes) {
116 GrMockRenderTargetInfo rtInfo(ct, NextInternalRenderTargetID());
117 return sk_sp<GrTexture>(new GrMockTextureRenderTarget(this, budgeted, dimensions,
118 renderTargetSampleCnt, isProtected,
119 mipmapStatus, texInfo, rtInfo));
120 }
121 return sk_sp<GrTexture>(
122 new GrMockTexture(this, budgeted, dimensions, isProtected, mipmapStatus, texInfo));
123 }
124
125 // TODO: why no 'isProtected' ?!
onCreateCompressedTexture(SkISize dimensions, const GrBackendFormat& format, SkBudgeted budgeted, GrMipmapped mipMapped, GrProtected isProtected, const void* data, size_t dataSize)126 sk_sp<GrTexture> GrMockGpu::onCreateCompressedTexture(SkISize dimensions,
127 const GrBackendFormat& format,
128 SkBudgeted budgeted,
129 GrMipmapped mipMapped,
130 GrProtected isProtected,
131 const void* data, size_t dataSize) {
132 if (fMockOptions.fFailTextureAllocations) {
133 return nullptr;
134 }
135
136 #ifdef SK_DEBUG
137 // Uncompressed formats should go through onCreateTexture
138 SkImage::CompressionType compression = format.asMockCompressionType();
139 SkASSERT(compression != SkImage::CompressionType::kNone);
140 #endif
141
142 GrMipmapStatus mipmapStatus = (mipMapped == GrMipmapped::kYes)
143 ? GrMipmapStatus::kValid
144 : GrMipmapStatus::kNotAllocated;
145 GrMockTextureInfo texInfo(GrColorType::kUnknown,
146 format.asMockCompressionType(),
147 NextInternalTextureID());
148
149 return sk_sp<GrTexture>(
150 new GrMockTexture(this, budgeted, dimensions, isProtected, mipmapStatus, texInfo));
151 }
152
onCreateCompressedTexture(SkISize dimensions, const GrBackendFormat& format, SkBudgeted budgeted, GrMipmapped mipMapped, GrProtected isProtected, OH_NativeBuffer* nativeBuffer, size_t bufferSize)153 sk_sp<GrTexture> GrMockGpu::onCreateCompressedTexture(SkISize dimensions,
154 const GrBackendFormat& format,
155 SkBudgeted budgeted,
156 GrMipmapped mipMapped,
157 GrProtected isProtected,
158 OH_NativeBuffer* nativeBuffer,
159 size_t bufferSize) {
160 SkASSERT(!"unimplemented");
161 return nullptr;
162 }
163
onWrapBackendTexture(const GrBackendTexture& tex, GrWrapOwnership ownership, GrWrapCacheable wrapType, GrIOType ioType)164 sk_sp<GrTexture> GrMockGpu::onWrapBackendTexture(const GrBackendTexture& tex,
165 GrWrapOwnership ownership,
166 GrWrapCacheable wrapType,
167 GrIOType ioType) {
168 GrMockTextureInfo texInfo;
169 SkAssertResult(tex.getMockTextureInfo(&texInfo));
170
171 SkImage::CompressionType compression = texInfo.compressionType();
172 if (compression != SkImage::CompressionType::kNone) {
173 return nullptr;
174 }
175
176 GrMipmapStatus mipmapStatus = tex.hasMipmaps() ? GrMipmapStatus::kValid
177 : GrMipmapStatus::kNotAllocated;
178 auto isProtected = GrProtected(tex.isProtected());
179 return sk_sp<GrTexture>(new GrMockTexture(this, tex.dimensions(), isProtected, mipmapStatus,
180 texInfo, wrapType, ioType));
181 }
182
onWrapCompressedBackendTexture(const GrBackendTexture& tex, GrWrapOwnership ownership, GrWrapCacheable wrapType)183 sk_sp<GrTexture> GrMockGpu::onWrapCompressedBackendTexture(const GrBackendTexture& tex,
184 GrWrapOwnership ownership,
185 GrWrapCacheable wrapType) {
186 return nullptr;
187 }
188
onWrapRenderableBackendTexture(const GrBackendTexture& tex, int sampleCnt, GrWrapOwnership ownership, GrWrapCacheable cacheable)189 sk_sp<GrTexture> GrMockGpu::onWrapRenderableBackendTexture(const GrBackendTexture& tex,
190 int sampleCnt,
191 GrWrapOwnership ownership,
192 GrWrapCacheable cacheable) {
193 GrMockTextureInfo texInfo;
194 SkAssertResult(tex.getMockTextureInfo(&texInfo));
195 SkASSERT(texInfo.compressionType() == SkImage::CompressionType::kNone);
196
197 GrMipmapStatus mipmapStatus =
198 tex.hasMipmaps() ? GrMipmapStatus::kValid : GrMipmapStatus::kNotAllocated;
199
200 // The client gave us the texture ID but we supply the render target ID.
201 GrMockRenderTargetInfo rtInfo(texInfo.colorType(), NextInternalRenderTargetID());
202
203 auto isProtected = GrProtected(tex.isProtected());
204 return sk_sp<GrTexture>(new GrMockTextureRenderTarget(this, tex.dimensions(), sampleCnt,
205 isProtected, mipmapStatus, texInfo,
206 rtInfo, cacheable));
207 }
208
onWrapBackendRenderTarget(const GrBackendRenderTarget& rt)209 sk_sp<GrRenderTarget> GrMockGpu::onWrapBackendRenderTarget(const GrBackendRenderTarget& rt) {
210 GrMockRenderTargetInfo info;
211 SkAssertResult(rt.getMockRenderTargetInfo(&info));
212
213 auto isProtected = GrProtected(rt.isProtected());
214 return sk_sp<GrRenderTarget>(new GrMockRenderTarget(this, GrMockRenderTarget::kWrapped,
215 rt.dimensions(), rt.sampleCnt(),
216 isProtected, info));
217 }
218
onCreateBuffer(size_t sizeInBytes, GrGpuBufferType type, GrAccessPattern accessPattern, const void*)219 sk_sp<GrGpuBuffer> GrMockGpu::onCreateBuffer(size_t sizeInBytes, GrGpuBufferType type,
220 GrAccessPattern accessPattern, const void*) {
221 return sk_sp<GrGpuBuffer>(new GrMockBuffer(this, sizeInBytes, type, accessPattern));
222 }
223
makeStencilAttachment(const GrBackendFormat& , SkISize dimensions, int numStencilSamples)224 sk_sp<GrAttachment> GrMockGpu::makeStencilAttachment(const GrBackendFormat& /*colorFormat*/,
225 SkISize dimensions, int numStencilSamples) {
226 fStats.incStencilAttachmentCreates();
227 return sk_sp<GrAttachment>(
228 new GrMockAttachment(this, dimensions, GrAttachment::UsageFlags::kStencilAttachment,
229 numStencilSamples));
230 }
231
onCreateBackendTexture(SkISize dimensions, const GrBackendFormat& format, GrRenderable, GrMipmapped mipMapped, GrProtected)232 GrBackendTexture GrMockGpu::onCreateBackendTexture(SkISize dimensions,
233 const GrBackendFormat& format,
234 GrRenderable,
235 GrMipmapped mipMapped,
236 GrProtected) {
237 SkImage::CompressionType compression = format.asMockCompressionType();
238 if (compression != SkImage::CompressionType::kNone) {
239 return {}; // should go through onCreateCompressedBackendTexture
240 }
241
242 auto colorType = format.asMockColorType();
243 if (!this->caps()->isFormatTexturable(format, GrTextureType::k2D)) {
244 return GrBackendTexture(); // invalid
245 }
246
247 GrMockTextureInfo info(colorType, SkImage::CompressionType::kNone, NextExternalTextureID());
248
249 fOutstandingTestingOnlyTextureIDs.add(info.id());
250 return GrBackendTexture(dimensions.width(), dimensions.height(), mipMapped, info);
251 }
252
onCreateCompressedBackendTexture( SkISize dimensions, const GrBackendFormat& format, GrMipmapped mipMapped, GrProtected)253 GrBackendTexture GrMockGpu::onCreateCompressedBackendTexture(
254 SkISize dimensions, const GrBackendFormat& format, GrMipmapped mipMapped,
255 GrProtected) {
256 SkImage::CompressionType compression = format.asMockCompressionType();
257 if (compression == SkImage::CompressionType::kNone) {
258 return {}; // should go through onCreateBackendTexture
259 }
260
261 if (!this->caps()->isFormatTexturable(format, GrTextureType::k2D)) {
262 return {};
263 }
264
265 GrMockTextureInfo info(GrColorType::kUnknown, compression, NextExternalTextureID());
266
267 fOutstandingTestingOnlyTextureIDs.add(info.id());
268 return GrBackendTexture(dimensions.width(), dimensions.height(), mipMapped, info);
269 }
270
deleteBackendTexture(const GrBackendTexture& tex)271 void GrMockGpu::deleteBackendTexture(const GrBackendTexture& tex) {
272 SkASSERT(GrBackendApi::kMock == tex.backend());
273
274 GrMockTextureInfo info;
275 if (tex.getMockTextureInfo(&info)) {
276 fOutstandingTestingOnlyTextureIDs.remove(info.id());
277 }
278 }
279
280 #if GR_TEST_UTILS
isTestingOnlyBackendTexture(const GrBackendTexture& tex) const281 bool GrMockGpu::isTestingOnlyBackendTexture(const GrBackendTexture& tex) const {
282 SkASSERT(GrBackendApi::kMock == tex.backend());
283
284 GrMockTextureInfo info;
285 if (!tex.getMockTextureInfo(&info)) {
286 return false;
287 }
288
289 return fOutstandingTestingOnlyTextureIDs.contains(info.id());
290 }
291
createTestingOnlyBackendRenderTarget(SkISize dimensions, GrColorType colorType, int sampleCnt, GrProtected)292 GrBackendRenderTarget GrMockGpu::createTestingOnlyBackendRenderTarget(SkISize dimensions,
293 GrColorType colorType,
294 int sampleCnt,
295 GrProtected) {
296 GrMockRenderTargetInfo info(colorType, NextExternalRenderTargetID());
297 static constexpr int kStencilBits = 8;
298 return GrBackendRenderTarget(dimensions.width(), dimensions.height(), sampleCnt, kStencilBits,
299 info);
300 }
301
deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget&)302 void GrMockGpu::deleteTestingOnlyBackendRenderTarget(const GrBackendRenderTarget&) {}
303 #endif
304