1 /* 2 * Copyright 2019 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef GrRenderTask_DEFINED 9 #define GrRenderTask_DEFINED 10 11 #include "include/core/SkRefCnt.h" 12 #include "include/private/SkColorData.h" 13 #include "include/private/SkTDArray.h" 14 #include "src/gpu/GrTextureProxy.h" 15 #include "src/gpu/GrTextureResolveManager.h" 16 17 class GrOpFlushState; 18 class GrOpsTask; 19 class GrResourceAllocator; 20 class GrTextureResolveRenderTask; 21 22 // This class abstracts a task that targets a single GrSurfaceProxy, participates in the 23 // GrDrawingManager's DAG, and implements the onExecute method to modify its target proxy's 24 // contents. (e.g., an opsTask that executes a command buffer, a task to regenerate mipmaps, etc.) 25 class GrRenderTask : public SkRefCnt { 26 public: 27 GrRenderTask(sk_sp<GrSurfaceProxy> target); 28 ~GrRenderTask() override; 29 30 void makeClosed(const GrCaps&); 31 prePrepare(GrRecordingContext * context)32 void prePrepare(GrRecordingContext* context) { this->onPrePrepare(context); } 33 34 // These two methods are only invoked at flush time 35 void prepare(GrOpFlushState* flushState); execute(GrOpFlushState * flushState)36 bool execute(GrOpFlushState* flushState) { return this->onExecute(flushState); } 37 38 // Called when this class will survive a flush and needs to truncate its ops and start over. 39 // TODO: ultimately it should be invalid for an op list to survive a flush. 40 // https://bugs.chromium.org/p/skia/issues/detail?id=7111 endFlush()41 virtual void endFlush() {} 42 isClosed()43 bool isClosed() const { return this->isSetFlag(kClosed_Flag); } 44 45 /* 46 * Notify this GrRenderTask that it relies on the contents of 'dependedOn' 47 */ 48 void addDependency(GrSurfaceProxy* dependedOn, GrMipMapped, GrTextureResolveManager, 49 const GrCaps& caps); 50 51 /* 52 * Notify this GrRenderTask that it relies on the contents of all GrRenderTasks which otherTask 53 * depends on. 54 */ 55 void addDependenciesFromOtherTask(GrRenderTask* otherTask); 56 57 /* 58 * Does this renderTask depend on 'dependedOn'? 59 */ 60 bool dependsOn(const GrRenderTask* dependedOn) const; 61 uniqueID()62 uint32_t uniqueID() const { return fUniqueID; } 63 64 /* 65 * Safely cast this GrRenderTask to a GrOpsTask (if possible). 66 */ asOpsTask()67 virtual GrOpsTask* asOpsTask() { return nullptr; } 68 69 #ifdef SK_DEBUG 70 /* 71 * Dump out the GrRenderTask dependency DAG 72 */ 73 virtual void dump(bool printDependencies) const; 74 numClips()75 virtual int numClips() const { return 0; } 76 77 using VisitSurfaceProxyFunc = std::function<void(GrSurfaceProxy*, GrMipMapped)>; 78 79 virtual void visitProxies_debugOnly(const VisitSurfaceProxyFunc&) const = 0; 80 visitTargetAndSrcProxies_debugOnly(const VisitSurfaceProxyFunc & fn)81 void visitTargetAndSrcProxies_debugOnly(const VisitSurfaceProxyFunc& fn) const { 82 this->visitProxies_debugOnly(fn); 83 if (fTarget) { 84 fn(fTarget.get(), GrMipMapped::kNo); 85 } 86 } 87 #endif 88 89 protected: 90 // In addition to just the GrSurface being allocated, has the stencil buffer been allocated (if 91 // it is required)? 92 bool isInstantiated() const; 93 94 SkDEBUGCODE(bool deferredProxiesAreInstantiated() const;) 95 96 enum class ExpectedOutcome : bool { 97 kTargetUnchanged, 98 kTargetDirty, 99 }; 100 101 // Performs any work to finalize this renderTask prior to execution. If returning 102 // ExpectedOutcome::kTargetDiry, the caller is also responsible to fill out the area it will 103 // modify in targetUpdateBounds. 104 // 105 // targetUpdateBounds must not extend beyond the proxy bounds. 106 virtual ExpectedOutcome onMakeClosed(const GrCaps&, SkIRect* targetUpdateBounds) = 0; 107 108 sk_sp<GrSurfaceProxy> fTarget; 109 110 // List of texture proxies whose contents are being prepared on a worker thread 111 // TODO: this list exists so we can fire off the proper upload when an renderTask begins 112 // executing. Can this be replaced? 113 SkTArray<GrTextureProxy*, true> fDeferredProxies; 114 115 private: 116 // for resetFlag, TopoSortTraits, gatherProxyIntervals, handleInternalAllocationFailure 117 friend class GrDrawingManager; 118 119 // Drops any pending operations that reference proxies that are not instantiated. 120 // NOTE: Derived classes don't need to check fTarget. That is handled when the drawingManager 121 // calls isInstantiated. 122 virtual void handleInternalAllocationFailure() = 0; 123 124 virtual bool onIsUsed(GrSurfaceProxy*) const = 0; 125 isUsed(GrSurfaceProxy * proxy)126 bool isUsed(GrSurfaceProxy* proxy) const { 127 if (proxy == fTarget.get()) { 128 return true; 129 } 130 131 return this->onIsUsed(proxy); 132 } 133 134 void addDependency(GrRenderTask* dependedOn); 135 void addDependent(GrRenderTask* dependent); 136 SkDEBUGCODE(bool isDependedent(const GrRenderTask* dependent) const;) 137 SkDEBUGCODE(void validate() const;) 138 void closeThoseWhoDependOnMe(const GrCaps&); 139 140 // Feed proxy usage intervals to the GrResourceAllocator class 141 virtual void gatherProxyIntervals(GrResourceAllocator*) const = 0; 142 143 static uint32_t CreateUniqueID(); 144 145 enum Flags { 146 kClosed_Flag = 0x01, //!< This GrRenderTask can't accept any more dependencies. 147 148 kWasOutput_Flag = 0x02, //!< Flag for topological sorting 149 kTempMark_Flag = 0x04, //!< Flag for topological sorting 150 }; 151 setFlag(uint32_t flag)152 void setFlag(uint32_t flag) { 153 fFlags |= flag; 154 } 155 resetFlag(uint32_t flag)156 void resetFlag(uint32_t flag) { 157 fFlags &= ~flag; 158 } 159 isSetFlag(uint32_t flag)160 bool isSetFlag(uint32_t flag) const { 161 return SkToBool(fFlags & flag); 162 } 163 164 struct TopoSortTraits { OutputTopoSortTraits165 static void Output(GrRenderTask* renderTask, int /* index */) { 166 renderTask->setFlag(kWasOutput_Flag); 167 } WasOutputTopoSortTraits168 static bool WasOutput(const GrRenderTask* renderTask) { 169 return renderTask->isSetFlag(kWasOutput_Flag); 170 } SetTempMarkTopoSortTraits171 static void SetTempMark(GrRenderTask* renderTask) { 172 renderTask->setFlag(kTempMark_Flag); 173 } ResetTempMarkTopoSortTraits174 static void ResetTempMark(GrRenderTask* renderTask) { 175 renderTask->resetFlag(kTempMark_Flag); 176 } IsTempMarkedTopoSortTraits177 static bool IsTempMarked(const GrRenderTask* renderTask) { 178 return renderTask->isSetFlag(kTempMark_Flag); 179 } NumDependenciesTopoSortTraits180 static int NumDependencies(const GrRenderTask* renderTask) { 181 return renderTask->fDependencies.count(); 182 } DependencyTopoSortTraits183 static GrRenderTask* Dependency(GrRenderTask* renderTask, int index) { 184 return renderTask->fDependencies[index]; 185 } 186 }; 187 188 // Only the GrOpsTask currently overrides this virtual onPrePrepare(GrRecordingContext *)189 virtual void onPrePrepare(GrRecordingContext*) {} onPrepare(GrOpFlushState *)190 virtual void onPrepare(GrOpFlushState*) {} // Only the GrOpsTask overrides this virtual 191 virtual bool onExecute(GrOpFlushState* flushState) = 0; 192 193 const uint32_t fUniqueID; 194 uint32_t fFlags; 195 196 // 'this' GrRenderTask relies on the output of the GrRenderTasks in 'fDependencies' 197 SkSTArray<1, GrRenderTask*, true> fDependencies; 198 // 'this' GrRenderTask's output is relied on by the GrRenderTasks in 'fDependents' 199 SkSTArray<1, GrRenderTask*, true> fDependents; 200 201 // For performance reasons, we should perform texture resolves back-to-back as much as possible. 202 // (http://skbug.com/9406). To accomplish this, we make and reuse one single resolve task for 203 // each render task, then add it as a dependency during makeClosed(). 204 GrTextureResolveRenderTask* fTextureResolveTask = nullptr; 205 }; 206 207 #endif 208