1/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrManagedResource_DEFINED
9#define GrManagedResource_DEFINED
10
11#include "include/private/GrTypesPriv.h"
12#include "include/private/SkMutex.h"
13#include "include/private/SkTHash.h"
14#include "include/utils/SkRandom.h"
15#include <atomic>
16
17class GrTexture;
18
19// uncomment to enable tracing of resource refs
20#ifdef SK_DEBUG
21#define SK_TRACE_MANAGED_RESOURCES
22#endif
23
24/** \class GrManagedResource
25
26  GrManagedResource is the base class for GPU resources that may be shared by
27  multiple objects, in particular objects that are tracked by a command buffer.
28  When an existing owner wants to share a reference, it calls ref().
29  When an owner wants to release its reference, it calls unref(). When the
30  shared object's reference count goes to zero as the result of an unref()
31  call, its (virtual) destructor is called. It is an error for the
32  destructor to be called explicitly (or via the object going out of scope on
33  the stack or calling delete) if getRefCnt() > 1.
34
35  This is nearly identical to SkRefCntBase. The exceptions are that unref()
36  takes a GrGpu, and any derived classes must implement freeGPUData().
37*/
38
39class GrManagedResource : SkNoncopyable {
40public:
41    // Simple refCount tracing, to ensure that everything ref'ed is unref'ed.
42#ifdef SK_TRACE_MANAGED_RESOURCES
43    struct Hash {
44        uint32_t operator()(const GrManagedResource* const& r) const {
45            SkASSERT(r);
46            return r->fKey;
47        }
48    };
49
50    class Trace {
51    public:
52        ~Trace() {
53            fHashSet.foreach([](const GrManagedResource* r) {
54                r->dumpInfo();
55            });
56            SkASSERT(0 == fHashSet.count());
57        }
58
59        void add(const GrManagedResource* r) {
60            SkAutoMutexExclusive locked(fLock);
61            fHashSet.add(r);
62        }
63
64        void remove(const GrManagedResource* r) {
65            SkAutoMutexExclusive locked(fLock);
66            fHashSet.remove(r);
67        }
68
69    private:
70        SkMutex fLock;
71        SkTHashSet<const GrManagedResource*, GrManagedResource::Hash> fHashSet SK_GUARDED_BY(fLock);
72    };
73
74    static std::atomic<uint32_t> fKeyCounter;
75#endif
76
77    /** Default construct, initializing the reference count to 1.
78     */
79    GrManagedResource() : fRefCnt(1) {
80#ifdef SK_TRACE_MANAGED_RESOURCES
81        fKey = fKeyCounter.fetch_add(+1, std::memory_order_relaxed);
82        GetTrace()->add(this);
83#endif
84    }
85
86    /** Destruct, asserting that the reference count is 1.
87     */
88    virtual ~GrManagedResource() {
89#ifdef SK_DEBUG
90        auto count = this->getRefCnt();
91        SkASSERTF(count == 1, "fRefCnt was %d", count);
92        fRefCnt.store(0);    // illegal value, to catch us if we reuse after delete
93#endif
94    }
95
96#ifdef SK_DEBUG
97    /** Return the reference count. Use only for debugging. */
98    int32_t getRefCnt() const { return fRefCnt.load(); }
99#endif
100
101    /** May return true if the caller is the only owner.
102     *  Ensures that all previous owner's actions are complete.
103     */
104    bool unique() const {
105        // The acquire barrier is only really needed if we return true.  It
106        // prevents code conditioned on the result of unique() from running
107        // until previous owners are all totally done calling unref().
108        return 1 == fRefCnt.load(std::memory_order_acquire);
109    }
110
111    /** Increment the reference count.
112        Must be balanced by a call to unref() or unrefAndFreeResources().
113     */
114    void ref() const {
115        // No barrier required.
116        SkDEBUGCODE(int newRefCount = )fRefCnt.fetch_add(+1, std::memory_order_relaxed);
117        SkASSERT(newRefCount >= 1);
118    }
119
120    /** Decrement the reference count. If the reference count is 1 before the
121        decrement, then delete the object. Note that if this is the case, then
122        the object needs to have been allocated via new, and not on the stack.
123        Any GPU data associated with this resource will be freed before it's deleted.
124     */
125    void unref() const {
126        // A release here acts in place of all releases we "should" have been doing in ref().
127        int newRefCount = fRefCnt.fetch_add(-1, std::memory_order_acq_rel);
128        SkASSERT(newRefCount >= 0);
129        if (newRefCount == 1) {
130            // Like unique(), the acquire is only needed on success, to make sure
131            // code in internal_dispose() doesn't happen before the decrement.
132            this->internal_dispose();
133        }
134    }
135
136#ifdef SK_DEBUG
137    // This is used for validating in the vulkan backend when using a main command buffer and temp
138    // command buffer at the same time. We need to validate that no images in the temp command
139    // buffer have been used in the main command buffer.
140    virtual const GrManagedResource* asVkImageResource() const { return nullptr; }
141#endif
142
143#ifdef SK_DEBUG
144    void validate() const {
145        SkASSERT(this->getRefCnt() > 0);
146    }
147#endif
148
149#ifdef SK_TRACE_MANAGED_RESOURCES
150    /** Output a human-readable dump of this resource's information
151     */
152    virtual void dumpInfo() const = 0;
153#endif
154
155private:
156#ifdef SK_TRACE_MANAGED_RESOURCES
157    static Trace* GetTrace() {
158        static Trace kTrace;
159        return &kTrace;
160    }
161#endif
162
163    /** Must be implemented by any subclasses.
164     *  Deletes any GPU data associated with this resource
165     */
166    virtual void freeGPUData() const = 0;
167
168    /**
169     *  Called when the ref count goes to 0. Will free GPU resources.
170     */
171    void internal_dispose() const {
172        this->freeGPUData();
173#ifdef SK_TRACE_MANAGED_RESOURCES
174        GetTrace()->remove(this);
175#endif
176
177#ifdef SK_DEBUG
178        SkASSERT(0 == this->getRefCnt());
179        fRefCnt.store(1);
180#endif
181        delete this;
182    }
183
184    mutable std::atomic<int32_t> fRefCnt;
185#ifdef SK_TRACE_MANAGED_RESOURCES
186    uint32_t fKey;
187#endif
188
189    using INHERITED = SkNoncopyable;
190};
191
192// This subclass allows for recycling
193class GrRecycledResource : public GrManagedResource {
194public:
195    // When recycle is called and there is only one ref left on the resource, we will signal that
196    // the resource can be recycled for reuse. If the subclass (or whoever is managing this resource)
197    // decides not to recycle the objects, it is their responsibility to call unref on the object.
198    void recycle() const {
199        if (this->unique()) {
200            this->onRecycle();
201        } else {
202            this->unref();
203        }
204    }
205
206private:
207    virtual void onRecycle() const = 0;
208};
209
210/** \class GrTextureResource
211
212  GrTextureResource is the base class for managed texture resources, and implements the
213  basic releaseProc functionality for them.
214
215*/
216class GrTextureResource : public GrManagedResource {
217public:
218    GrTextureResource() {}
219
220    ~GrTextureResource() override {
221        SkASSERT(!fReleaseHelper);
222    }
223
224    void setRelease(sk_sp<GrRefCntedCallback> releaseHelper) {
225        fReleaseHelper = std::move(releaseHelper);
226    }
227
228protected:
229    mutable sk_sp<GrRefCntedCallback> fReleaseHelper;
230
231    void invokeReleaseProc() const {
232        if (fReleaseHelper) {
233            // Depending on the ref count of fReleaseHelper this may or may not actually trigger
234            // the ReleaseProc to be called.
235            fReleaseHelper.reset();
236        }
237    }
238
239private:
240    using INHERITED = GrManagedResource;
241};
242
243#endif
244