xref: /third_party/skia/src/gpu/GrRenderTask.h (revision cb93a386)
1/*
2 * Copyright 2019 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrRenderTask_DEFINED
9#define GrRenderTask_DEFINED
10
11#include "include/core/SkRefCnt.h"
12#include "include/private/SkTArray.h"
13#include "src/core/SkTInternalLList.h"
14#include "src/gpu/GrSurfaceProxyView.h"
15#include "src/gpu/GrTextureProxy.h"
16#include "src/gpu/GrTextureResolveManager.h"
17
18class GrMockRenderTask;
19class GrOpFlushState;
20class GrResourceAllocator;
21class GrTextureResolveRenderTask;
22namespace skgpu { namespace v1 { class OpsTask; }}
23
24// This class abstracts a task that targets a single GrSurfaceProxy, participates in the
25// GrDrawingManager's DAG, and implements the onExecute method to modify its target proxy's
26// contents. (e.g., an opsTask that executes a command buffer, a task to regenerate mipmaps, etc.)
27class GrRenderTask : public SkRefCnt {
28public:
29    GrRenderTask();
30    SkDEBUGCODE(~GrRenderTask() override);
31
32    void makeClosed(GrRecordingContext*);
33
34    void prePrepare(GrRecordingContext* context) { this->onPrePrepare(context); }
35
36    // These two methods are only invoked at flush time
37    void prepare(GrOpFlushState* flushState);
38    bool execute(GrOpFlushState* flushState) { return this->onExecute(flushState); }
39
40    virtual bool requiresExplicitCleanup() const { return false; }
41
42    // Called when this class will survive a flush and needs to truncate its ops and start over.
43    // TODO: ultimately it should be invalid for an op list to survive a flush.
44    // https://bugs.chromium.org/p/skia/issues/detail?id=7111
45    virtual void endFlush(GrDrawingManager*) {}
46
47    // This method "disowns" all the GrSurfaceProxies this RenderTask modifies. In
48    // practice this just means telling the drawingManager to forget the relevant
49    // mappings from surface proxy to last modifying rendertask.
50    virtual void disown(GrDrawingManager*);
51
52    bool isClosed() const { return this->isSetFlag(kClosed_Flag); }
53
54    /**
55     * Make this task skippable. This must be used purely for optimization purposes
56     * at this point as not all tasks will actually skip their work. It would be better if we could
57     * detect tasks that can be skipped automatically. We'd need to support minimal flushes (i.e.,
58     * only flush that which is required for SkSurfaces/SkImages) and the ability to detect
59     * "orphaned tasks" and clean them out from the DAG so they don't indefinitely accumulate.
60     * Finally, we'd probably have to track whether a proxy's backing store was imported or ever
61     * exported to the client in case the client is doing direct reads outside of Skia and thus
62     * may require tasks targeting the proxy to execute even if our DAG contains no reads.
63     */
64    void makeSkippable();
65
66    bool isSkippable() const { return this->isSetFlag(kSkippable_Flag); }
67
68    /*
69     * Notify this GrRenderTask that it relies on the contents of 'dependedOn'
70     */
71    void addDependency(GrDrawingManager*, GrSurfaceProxy* dependedOn, GrMipmapped,
72                       GrTextureResolveManager, const GrCaps& caps);
73
74    /*
75     * Notify this GrRenderTask that it relies on the contents of all GrRenderTasks which otherTask
76     * depends on.
77     */
78    void addDependenciesFromOtherTask(GrRenderTask* otherTask);
79
80    SkSpan<GrRenderTask*> dependencies() { return SkMakeSpan(fDependencies); }
81    SkSpan<GrRenderTask*> dependents() { return SkMakeSpan(fDependents); }
82
83    void replaceDependency(const GrRenderTask* toReplace, GrRenderTask* replaceWith);
84    void replaceDependent(const GrRenderTask* toReplace, GrRenderTask* replaceWith);
85
86
87    /*
88     * Does this renderTask depend on 'dependedOn'?
89     */
90    bool dependsOn(const GrRenderTask* dependedOn) const;
91
92    virtual void gatherIDs(SkSTArray<8, uint32_t, true>* idArray) const {
93        idArray->push_back(fUniqueID);
94    }
95    uint32_t uniqueID() const { return fUniqueID; }
96    int numTargets() const { return fTargets.count(); }
97    GrSurfaceProxy* target(int i) const { return fTargets[i].get(); }
98
99    /*
100     * Safely cast this GrRenderTask to a OpsTask (if possible).
101     */
102    virtual skgpu::v1::OpsTask* asOpsTask() { return nullptr; }
103
104#if GR_TEST_UTILS
105    /*
106     * Dump out the GrRenderTask dependency DAG
107     */
108    virtual void dump(const SkString& label,
109                      SkString indent,
110                      bool printDependencies,
111                      bool close) const;
112    virtual const char* name() const = 0;
113#endif
114
115#ifdef SK_DEBUG
116    virtual int numClips() const { return 0; }
117
118    virtual void visitProxies_debugOnly(const GrVisitProxyFunc&) const = 0;
119
120    void visitTargetAndSrcProxies_debugOnly(const GrVisitProxyFunc& func) const {
121        this->visitProxies_debugOnly(func);
122        for (const sk_sp<GrSurfaceProxy>& target : fTargets) {
123            func(target.get(), GrMipmapped::kNo);
124        }
125    }
126#endif
127
128    bool isUsed(GrSurfaceProxy* proxy) const {
129        for (const sk_sp<GrSurfaceProxy>& target : fTargets) {
130            if (target.get() == proxy) {
131                return true;
132            }
133        }
134
135        return this->onIsUsed(proxy);
136    }
137
138    // Feed proxy usage intervals to the GrResourceAllocator class
139    virtual void gatherProxyIntervals(GrResourceAllocator*) const = 0;
140
141    // In addition to just the GrSurface being allocated, has the stencil buffer been allocated (if
142    // it is required)?
143    bool isInstantiated() const;
144
145    // Used by GrRenderTaskCluster.
146    SK_DECLARE_INTERNAL_LLIST_INTERFACE(GrRenderTask);
147
148protected:
149    SkDEBUGCODE(bool deferredProxiesAreInstantiated() const;)
150
151    // Add a target surface proxy to the list of targets for this task.
152    // This also informs the drawing manager to update the lastRenderTask association.
153    void addTarget(GrDrawingManager*, sk_sp<GrSurfaceProxy>);
154
155    // Helper that adds the proxy owned by a view.
156    void addTarget(GrDrawingManager* dm, const GrSurfaceProxyView& view) {
157        this->addTarget(dm, view.refProxy());
158    }
159
160    enum class ExpectedOutcome : bool {
161        kTargetUnchanged,
162        kTargetDirty,
163    };
164
165    // Performs any work to finalize this renderTask prior to execution. If returning
166    // ExpectedOutcome::kTargetDiry, the caller is also responsible to fill out the area it will
167    // modify in targetUpdateBounds.
168    //
169    // targetUpdateBounds must not extend beyond the proxy bounds.
170    virtual ExpectedOutcome onMakeClosed(GrRecordingContext*, SkIRect* targetUpdateBounds) = 0;
171
172    SkSTArray<1, sk_sp<GrSurfaceProxy>> fTargets;
173
174    // List of texture proxies whose contents are being prepared on a worker thread
175    // TODO: this list exists so we can fire off the proper upload when an renderTask begins
176    // executing. Can this be replaced?
177    SkTArray<GrTextureProxy*, true> fDeferredProxies;
178
179    enum Flags {
180        kClosed_Flag    = 0x01,   //!< This task can't accept any more dependencies.
181        kDisowned_Flag  = 0x02,   //!< This task is disowned by its creating GrDrawingManager.
182        kSkippable_Flag = 0x04,   //!< This task is skippable.
183        kAtlas_Flag     = 0x08,   //!< This task is atlas.
184
185        kWasOutput_Flag = 0x10,   //!< Flag for topological sorting
186        kTempMark_Flag  = 0x20,   //!< Flag for topological sorting
187    };
188
189    void setFlag(uint32_t flag) {
190        fFlags |= flag;
191    }
192
193    void resetFlag(uint32_t flag) {
194        fFlags &= ~flag;
195    }
196
197    bool isSetFlag(uint32_t flag) const {
198        return SkToBool(fFlags & flag);
199    }
200
201    void setIndex(uint32_t index) {
202        SkASSERT(!this->isSetFlag(kWasOutput_Flag));
203        SkASSERT(index < (1 << 26));
204        fFlags |= index << 6;
205    }
206
207    uint32_t getIndex() const {
208        SkASSERT(this->isSetFlag(kWasOutput_Flag));
209        return fFlags >> 6;
210    }
211
212private:
213    // for TopoSortTraits, fTextureResolveTask, addDependency
214    friend class GrDrawingManager;
215    friend class GrMockRenderTask;
216
217    // Derived classes can override to indicate usage of proxies _other than target proxies_.
218    // GrRenderTask itself will handle checking the target proxies.
219    virtual bool onIsUsed(GrSurfaceProxy*) const = 0;
220
221    void addDependency(GrRenderTask* dependedOn);
222    void addDependent(GrRenderTask* dependent);
223    SkDEBUGCODE(bool isDependent(const GrRenderTask* dependent) const;)
224    SkDEBUGCODE(void validate() const;)
225
226    static uint32_t CreateUniqueID();
227
228    struct TopoSortTraits {
229        static uint32_t GetIndex(GrRenderTask* renderTask) {
230            return renderTask->getIndex();
231        }
232        static void Output(GrRenderTask* renderTask, uint32_t index) {
233            renderTask->setIndex(index);
234            renderTask->setFlag(kWasOutput_Flag);
235        }
236        static bool WasOutput(const GrRenderTask* renderTask) {
237            return renderTask->isSetFlag(kWasOutput_Flag);
238        }
239        static void SetTempMark(GrRenderTask* renderTask) {
240            renderTask->setFlag(kTempMark_Flag);
241        }
242        static void ResetTempMark(GrRenderTask* renderTask) {
243            renderTask->resetFlag(kTempMark_Flag);
244        }
245        static bool IsTempMarked(const GrRenderTask* renderTask) {
246            return renderTask->isSetFlag(kTempMark_Flag);
247        }
248        static int NumDependencies(const GrRenderTask* renderTask) {
249            return renderTask->fDependencies.count();
250        }
251        static GrRenderTask* Dependency(GrRenderTask* renderTask, int index) {
252            return renderTask->fDependencies[index];
253        }
254    };
255
256    virtual void onMakeSkippable() {}
257    virtual void onPrePrepare(GrRecordingContext*) {} // Only OpsTask currently overrides this
258    virtual void onPrepare(GrOpFlushState*) {} // OpsTask and GrDDLTask override this
259    virtual bool onExecute(GrOpFlushState* flushState) = 0;
260
261    const uint32_t         fUniqueID;
262    uint32_t               fFlags;
263
264    // 'this' GrRenderTask relies on the output of the GrRenderTasks in 'fDependencies'
265    SkSTArray<1, GrRenderTask*, true> fDependencies;
266    // 'this' GrRenderTask's output is relied on by the GrRenderTasks in 'fDependents'
267    SkSTArray<1, GrRenderTask*, true> fDependents;
268
269    // For performance reasons, we should perform texture resolves back-to-back as much as possible.
270    // (http://skbug.com/9406). To accomplish this, we make and reuse one single resolve task for
271    // each render task, then add it as a dependency during makeClosed().
272    GrTextureResolveRenderTask* fTextureResolveTask = nullptr;
273
274    SkDEBUGCODE(GrDrawingManager *fDrawingMgr = nullptr;)
275};
276
277#endif
278