xref: /aosp_15_r20/external/skia/src/gpu/graphite/dawn/DawnErrorChecker.cpp (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1 /*
2  * Copyright 2023 Google LLC
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "src/gpu/graphite/dawn/DawnErrorChecker.h"
9 
10 #include "include/private/base/SkAssert.h"
11 #include "src/gpu/graphite/Log.h"
12 #include "src/gpu/graphite/dawn/DawnAsyncWait.h"
13 #include "src/gpu/graphite/dawn/DawnSharedContext.h"
14 
15 namespace skgpu::graphite {
16 namespace {
17 
18 constexpr const char* kErrorScopeNames[] = {"validation", "out-of-memory", "internal"};
19 constexpr DawnErrorType kErrorScopeTypes[] = {
20         DawnErrorType::kValidation, DawnErrorType::kOutOfMemory, DawnErrorType::kInternal};
21 static_assert(std::size(kErrorScopeNames) == std::size(kErrorScopeTypes));
22 constexpr int kScopeCount = std::size(kErrorScopeTypes);
23 
24 }  // namespace
25 
DawnErrorChecker(const DawnSharedContext * sharedContext)26 DawnErrorChecker::DawnErrorChecker(const DawnSharedContext* sharedContext)
27         : fArmed(true), fSharedContext(sharedContext) {
28     fSharedContext->device().PushErrorScope(wgpu::ErrorFilter::Validation);
29     fSharedContext->device().PushErrorScope(wgpu::ErrorFilter::OutOfMemory);
30     fSharedContext->device().PushErrorScope(wgpu::ErrorFilter::Internal);
31 }
32 
~DawnErrorChecker()33 DawnErrorChecker::~DawnErrorChecker() {
34     [[maybe_unused]] auto err = this->popErrorScopes();
35     SkASSERT(!fArmed);
36     SkASSERT(err == DawnErrorType::kNoError);
37 }
38 
popErrorScopes()39 SkEnumBitMask<DawnErrorType> DawnErrorChecker::popErrorScopes() {
40     if (!fArmed) {
41         return DawnErrorType::kNoError;
42     }
43 
44 #if defined(__EMSCRIPTEN__)
45     struct ErrorState {
46         SkEnumBitMask<DawnErrorType> fError;
47         int fScopeIdx;
48         DawnAsyncWait fWait;
49 
50         ErrorState(const DawnSharedContext* sharedContext)
51                 : fError(DawnErrorType::kNoError)
52                 , fScopeIdx(kScopeCount - 1)
53                 , fWait(sharedContext) {}
54     } errorState(fSharedContext);
55 
56     wgpu::ErrorCallback errorCallback = [](WGPUErrorType status, const char* msg, void* userData) {
57         ErrorState* errorState = static_cast<ErrorState*>(userData);
58         if (status != WGPUErrorType_NoError) {
59             SkASSERT(errorState->fScopeIdx >= 0);
60             const char* errorScopeName = kErrorScopeNames[errorState->fScopeIdx];
61             SKGPU_LOG_E("Failed in error scope (%s): %s", errorScopeName, msg);
62             errorState->fError |= kErrorScopeTypes[errorState->fScopeIdx];
63         }
64         errorState->fScopeIdx--;
65         errorState->fWait.signal();
66     };
67 
68     // Pop all three error scopes:
69     // Internal
70     fSharedContext->device().PopErrorScope(errorCallback, &errorState);
71     errorState.fWait.busyWait();
72     errorState.fWait.reset();
73 
74     // OutOfMemory
75     fSharedContext->device().PopErrorScope(errorCallback, &errorState);
76     errorState.fWait.busyWait();
77     errorState.fWait.reset();
78 
79     // Validation
80     fSharedContext->device().PopErrorScope(errorCallback, &errorState);
81     errorState.fWait.busyWait();
82 #else
83     struct ErrorState {
84         SkEnumBitMask<DawnErrorType> fError = DawnErrorType::kNoError;
85         int fScopeIdx = kScopeCount - 1;
86     } errorState = {};
87 
88     auto errorCallback = [](wgpu::PopErrorScopeStatus status,
89                             wgpu::ErrorType type,
90                             wgpu::StringView msg,
91                             ErrorState* errorState) {
92         SkASSERT(status == wgpu::PopErrorScopeStatus::Success);
93         if (type != wgpu::ErrorType::NoError) {
94             SkASSERT(errorState->fScopeIdx >= 0);
95             const char* errorScopeName = kErrorScopeNames[errorState->fScopeIdx];
96             SKGPU_LOG_E("Failed in error scope (%s): %.*s",
97                         errorScopeName,
98                         static_cast<int>(msg.length),
99                         msg.data);
100             errorState->fError |= kErrorScopeTypes[errorState->fScopeIdx];
101         }
102         errorState->fScopeIdx--;
103     };
104 
105     // Pop all three error scopes:
106     auto internalFuture = fSharedContext->device().PopErrorScope(
107             wgpu::CallbackMode::WaitAnyOnly, errorCallback, &errorState);
108     auto oomFuture = fSharedContext->device().PopErrorScope(
109             wgpu::CallbackMode::WaitAnyOnly, errorCallback, &errorState);
110     auto validationFuture = fSharedContext->device().PopErrorScope(
111             wgpu::CallbackMode::WaitAnyOnly, errorCallback, &errorState);
112 
113     wgpu::WaitStatus status = wgpu::WaitStatus::Success;
114     wgpu::Instance instance = fSharedContext->device().GetAdapter().GetInstance();
115 
116     status = instance.WaitAny(internalFuture, /*timeout=*/std::numeric_limits<uint64_t>::max());
117     if (status != wgpu::WaitStatus::Success) {
118         SKGPU_LOG_E("Failed waiting for 'internal' error scope to pop.");
119     }
120     SkASSERT(status == wgpu::WaitStatus::Success);
121 
122     status = instance.WaitAny(oomFuture, /*timeout=*/std::numeric_limits<uint64_t>::max());
123     if (status != wgpu::WaitStatus::Success) {
124         SKGPU_LOG_E("Failed waiting for 'out-of-memory' error scope to pop.");
125     }
126     SkASSERT(status == wgpu::WaitStatus::Success);
127 
128     status = instance.WaitAny(validationFuture, /*timeout=*/std::numeric_limits<uint64_t>::max());
129     if (status != wgpu::WaitStatus::Success) {
130         SKGPU_LOG_E("Failed waiting for 'validation' error scope to pop.");
131     }
132     SkASSERT(status == wgpu::WaitStatus::Success);
133 #endif  // defined(__EMSCRIPTEN__)
134 
135     fArmed = false;
136     return errorState.fError;
137 }
138 
139 }  // namespace skgpu::graphite
140