Lines Matching refs:proxy

46 void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end,
52 if (proxy->canSkipResourceAllocator()) {
56 // If a proxy is read only it must refer to a texture with specific content that cannot be
57 // recycled. We don't need to assign a texture to it and no other proxy can be instantiated
59 if (proxy->readOnly()) {
61 if (proxy->isLazy() && !proxy->priv().doLazyInstantiation(resourceProvider)) {
64 // Since we aren't going to add an interval we won't revisit this proxy in assign(). So
65 // must already be instantiated or it must be a lazy proxy that we instantiated above.
66 SkASSERT(proxy->isInstantiated());
70 uint32_t proxyID = proxy->uniqueID().asUInt();
76 // This interval is for the initial upload to a deferred proxy. Due to the vagaries
94 Interval* newIntvl = fInternalAllocator.make<Interval>(proxy, start, end);
104 static bool can_proxy_use_scratch(const GrCaps& caps, GrSurfaceProxy* proxy) {
105 return caps.reuseScratchTextures() || proxy->asRenderTargetProxy();
108 static bool user_cache_proxy(GrSurfaceProxy* proxy) {
109 GrTextureProxy* texProxy = proxy->asTextureProxy();
137 GrSurfaceProxy* proxy,
139 if (!can_proxy_use_scratch(caps, proxy)) {
149 // If all the refs on the proxy are known to the resource allocator then no one
151 return !proxy->refCntGreaterThan(knownUseCount);
154 bool GrResourceAllocator::Register::instantiateSurface(GrSurfaceProxy* proxy,
156 SkASSERT(!proxy->peekSurface());
160 if (proxy == fOriginatingProxy) {
161 newSurface = proxy->priv().createSurface(resourceProvider);
171 // Make surface budgeted if this proxy is budgeted.
172 if (SkBudgeted::kYes == proxy->isBudgeted() &&
179 // Propagate the proxy unique key to the surface if we have one.
180 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
186 proxy->priv().assign(fExistingSurface ? fExistingSurface : std::move(newSurface));
281 GrResourceAllocator::Register* GrResourceAllocator::findOrCreateRegisterFor(GrSurfaceProxy* proxy) {
284 if (const auto& uniqueKey = proxy->getUniqueKey(); uniqueKey.isValid()) {
289 Register* r = fInternalAllocator.make<Register>(proxy, GrScratchKey(), resourceProvider);
296 proxy->priv().computeScratchKey(*fDContext->priv().caps(), &scratchKey);
305 return fInternalAllocator.make<Register>(proxy, std::move(scratchKey), resourceProvider);
316 if (r && r->isRecyclable(*fDContext->priv().caps(), intvl->proxy(), intvl->uses())) {
344 if (cur->proxy()->isInstantiated()) {
349 if (cur->proxy()->isLazy()) {
350 if (cur->proxy()->isFullyLazy()) {
351 fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(resourceProvider);
359 Register* r = this->findOrCreateRegisterFor(cur->proxy());
363 cur->proxy()->uniqueID().asUInt());
365 SkASSERT(!cur->proxy()->peekSurface());
379 GrSurfaceProxy* proxy = cur->proxy();
380 if (SkBudgeted::kNo == proxy->isBudgeted() || proxy->isInstantiated()) {
385 if (proxy->isLazy()) {
386 additionalBytesNeeded += proxy->gpuMemorySize();
391 additionalBytesNeeded += proxy->gpuMemorySize();
425 if (cur->proxy()->isInstantiated()) {
428 if (cur->proxy()->isLazy()) {
429 fFailedInstantiation = !cur->proxy()->priv().doLazyInstantiation(resourceProvider);
434 fFailedInstantiation = !r->instantiateSurface(cur->proxy(), resourceProvider);
447 cur->proxy()->uniqueID().asUInt(),
448 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1,
451 cur->proxy()->priv().getProxyRefCnt(),
452 cur->proxy()->testingOnly_getBackingRefCnt());
460 cur->proxy()->uniqueID().asUInt(),
461 cur->proxy()->isInstantiated() ? cur->proxy()->underlyingUniqueID().asUInt() : -1);