1 /*
2 * Copyright © Microsoft Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "d3d12_context.h"
25 #include "d3d12_screen.h"
26 #include "d3d12_video_proc.h"
27 #include "d3d12_residency.h"
28 #include "d3d12_util.h"
29 #include "d3d12_resource.h"
30 #include "d3d12_video_buffer.h"
31 #include "d3d12_format.h"
32
33 void
d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * target,struct pipe_picture_desc * picture)34 d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,
35 struct pipe_video_buffer *target,
36 struct pipe_picture_desc *picture)
37 {
38 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
39 debug_printf("[d3d12_video_processor] d3d12_video_processor_begin_frame - "
40 "fenceValue: %d\n",
41 pD3D12Proc->m_fenceValue);
42
43 ///
44 /// Wait here to make sure the next in flight resource set is empty before using it
45 ///
46 uint64_t fenceValueToWaitOn = static_cast<uint64_t>(std::max(static_cast<int64_t>(0l), static_cast<int64_t>(pD3D12Proc->m_fenceValue) - static_cast<int64_t>(D3D12_VIDEO_PROC_ASYNC_DEPTH) ));
47
48 debug_printf("[d3d12_video_processor] d3d12_video_processor_begin_frame Waiting for completion of in flight resource sets with previous work with fenceValue: %" PRIu64 "\n",
49 fenceValueToWaitOn);
50
51 ASSERTED bool wait_res = d3d12_video_processor_sync_completion(codec, fenceValueToWaitOn, OS_TIMEOUT_INFINITE);
52 assert(wait_res);
53
54 HRESULT hr = pD3D12Proc->m_spCommandList->Reset(pD3D12Proc->m_spCommandAllocators[d3d12_video_processor_pool_current_index(pD3D12Proc)].Get());
55 if (FAILED(hr)) {
56 debug_printf(
57 "[d3d12_video_processor] resetting ID3D12GraphicsCommandList failed with HR %x\n",
58 hr);
59 assert(false);
60 }
61
62 // Setup process frame arguments for output/target texture.
63 struct d3d12_video_buffer *pOutputVideoBuffer = (struct d3d12_video_buffer *) target;
64
65 ID3D12Resource *pDstD3D12Res = d3d12_resource_resource(pOutputVideoBuffer->texture);
66 auto dstDesc = GetDesc(pDstD3D12Res);
67 pD3D12Proc->m_OutputArguments = {
68 //struct D3D12_VIDEO_PROCESS_OUTPUT_STREAM_ARGUMENTS args;
69 {
70 {
71 {
72 pDstD3D12Res, // ID3D12Resource *pTexture2D;
73 0, // UINT Subresource;
74 },
75 {
76 NULL, // ID3D12Resource *pTexture2D;
77 0 // UINT Subresource;
78 }
79 },
80 { 0, 0, (int) dstDesc.Width, (int) dstDesc.Height }
81 },
82 // struct d3d12_resource* buffer;
83 pOutputVideoBuffer,
84 };
85
86 debug_printf("d3d12_video_processor_begin_frame: Beginning new scene with Output ID3D12Resource: %p (%d %d)\n", pDstD3D12Res, (int) dstDesc.Width, (int) dstDesc.Height);
87 }
88
89 int
d3d12_video_processor_end_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * target,struct pipe_picture_desc * picture)90 d3d12_video_processor_end_frame(struct pipe_video_codec * codec,
91 struct pipe_video_buffer *target,
92 struct pipe_picture_desc *picture)
93 {
94 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
95 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - "
96 "fenceValue: %d\n",
97 pD3D12Proc->m_fenceValue);
98
99 if(pD3D12Proc->m_ProcessInputs.size() > pD3D12Proc->m_vpMaxInputStreams.MaxInputStreams) {
100 debug_printf("[d3d12_video_processor] ERROR: Requested number of input surfaces (%" PRIu64 ") exceeds underlying D3D12 driver capabilities (%d)\n", (uint64_t) pD3D12Proc->m_ProcessInputs.size(), pD3D12Proc->m_vpMaxInputStreams.MaxInputStreams);
101 assert(false);
102 }
103
104 auto curOutputDesc = GetOutputStreamDesc(pD3D12Proc->m_spVideoProcessor.Get());
105 auto curOutputTexFmt = GetDesc(pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D).Format;
106
107 bool inputFmtsMatch = pD3D12Proc->m_inputStreamDescs.size() == pD3D12Proc->m_ProcessInputs.size();
108 unsigned curInputIdx = 0;
109 while( (curInputIdx < pD3D12Proc->m_inputStreamDescs.size()) && inputFmtsMatch)
110 {
111 inputFmtsMatch = inputFmtsMatch && (pD3D12Proc->m_inputStreamDescs[curInputIdx].Format == GetDesc(pD3D12Proc->m_ProcessInputs[curInputIdx].InputStream[0].pTexture2D).Format);
112 curInputIdx++;
113 }
114
115 bool inputCountMatches = (pD3D12Proc->m_ProcessInputs.size() == pD3D12Proc->m_spVideoProcessor->GetNumInputStreamDescs());
116 bool outputFmtMatches = (curOutputDesc.Format == curOutputTexFmt);
117 bool needsVPRecreation = (
118 !inputCountMatches // Requested batch has different number of Inputs to be blit'd
119 || !outputFmtMatches // output texture format different than vid proc object expects
120 || !inputFmtsMatch // inputs texture formats different than vid proc object expects
121 );
122
123 if(needsVPRecreation) {
124 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Attempting to re-create ID3D12VideoProcessor "
125 "input count matches %d inputFmtsMatch: %d outputFmtsMatch %d \n", inputCountMatches, inputFmtsMatch, outputFmtMatches);
126
127 DXGI_COLOR_SPACE_TYPE OutputColorSpace = d3d12_convert_from_legacy_color_space(
128 !util_format_is_yuv(d3d12_get_pipe_format(curOutputTexFmt)),
129 util_format_get_blocksize(d3d12_get_pipe_format(curOutputTexFmt)) * 8 /*bytes to bits conversion*/,
130 /* StudioRGB= */ false,
131 /* P709= */ true,
132 /* StudioYUV= */ true);
133
134 std::vector<DXGI_FORMAT> InputFormats;
135 for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
136 {
137 InputFormats.push_back(GetDesc(curInput.InputStream[0].pTexture2D).Format);
138 }
139 DXGI_COLOR_SPACE_TYPE InputColorSpace = d3d12_convert_from_legacy_color_space(
140 !util_format_is_yuv(d3d12_get_pipe_format(InputFormats[0])),
141 util_format_get_blocksize(d3d12_get_pipe_format(InputFormats[0])) * 8 /*bytes to bits conversion*/,
142 /* StudioRGB= */ false,
143 /* P709= */ true,
144 /* StudioYUV= */ true);
145
146 // Release previous allocation
147 pD3D12Proc->m_spVideoProcessor.Reset();
148 if(!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, curOutputTexFmt, OutputColorSpace))
149 {
150 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Failure when "
151 " trying to re-create the ID3D12VideoProcessor for current batch streams configuration\n");
152 assert(false);
153 }
154 }
155
156 // Schedule barrier transitions
157 std::vector<D3D12_RESOURCE_BARRIER> barrier_transitions;
158 barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
159 pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D,
160 D3D12_RESOURCE_STATE_COMMON,
161 D3D12_RESOURCE_STATE_VIDEO_PROCESS_WRITE));
162
163 for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
164 barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
165 curInput.InputStream[0].pTexture2D,
166 D3D12_RESOURCE_STATE_COMMON,
167 D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ));
168
169 pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
170
171 // Schedule process operation
172
173 pD3D12Proc->m_spCommandList->ProcessFrames1(pD3D12Proc->m_spVideoProcessor.Get(), &pD3D12Proc->m_OutputArguments.args, pD3D12Proc->m_ProcessInputs.size(), pD3D12Proc->m_ProcessInputs.data());
174
175 // Schedule reverse (back to common) transitions before command list closes for current frame
176
177 for (auto &BarrierDesc : barrier_transitions)
178 std::swap(BarrierDesc.Transition.StateBefore, BarrierDesc.Transition.StateAfter);
179
180 pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
181
182 pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)].value = pD3D12Proc->m_fenceValue;
183 pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)].cmdqueue_fence = pD3D12Proc->m_spFence.Get();
184 *picture->fence = (pipe_fence_handle*) &pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)];
185 return 0;
186 }
187
188 void
d3d12_video_processor_process_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * input_texture,const struct pipe_vpp_desc * process_properties)189 d3d12_video_processor_process_frame(struct pipe_video_codec *codec,
190 struct pipe_video_buffer *input_texture,
191 const struct pipe_vpp_desc *process_properties)
192 {
193 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
194
195 // begin_frame gets only called once so wouldn't update process_properties->src_surface_fence correctly
196 pD3D12Proc->input_surface_fence = (struct d3d12_fence*) process_properties->src_surface_fence;
197
198 // Get the underlying resources from the pipe_video_buffers
199 struct d3d12_video_buffer *pInputVideoBuffer = (struct d3d12_video_buffer *) input_texture;
200
201 ID3D12Resource *pSrcD3D12Res = d3d12_resource_resource(pInputVideoBuffer->texture);
202
203 // y0 = top
204 // x0 = left
205 // x1 = right
206 // y1 = bottom
207
208 debug_printf("d3d12_video_processor_process_frame: Adding Input ID3D12Resource: %p to scene (Output target %p)\n", pSrcD3D12Res, pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D);
209 debug_printf("d3d12_video_processor_process_frame: Input box: top: %d left: %d right: %d bottom: %d\n", process_properties->src_region.y0, process_properties->src_region.x0, process_properties->src_region.x1, process_properties->src_region.y1);
210 debug_printf("d3d12_video_processor_process_frame: Output box: top: %d left: %d right: %d bottom: %d\n", process_properties->dst_region.y0, process_properties->dst_region.x0, process_properties->dst_region.x1, process_properties->dst_region.y1);
211 debug_printf("d3d12_video_processor_process_frame: Requested alpha blend mode %d global alpha: %f \n", process_properties->blend.mode, process_properties->blend.global_alpha);
212
213 // Setup process frame arguments for current input texture.
214
215 D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 InputArguments = {
216 {
217 { // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[0];
218 pSrcD3D12Res, // ID3D12Resource *pTexture2D;
219 0, // UINT Subresource
220 {//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
221 0, //UINT NumPastFrames;
222 NULL, //ID3D12Resource **ppPastFrames;
223 NULL, // UINT *pPastSubresources;
224 0, //UINT NumFutureFrames;
225 NULL, //ID3D12Resource **ppFutureFrames;
226 NULL //UINT *pFutureSubresources;
227 }
228 },
229 { // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[1];
230 NULL, //ID3D12Resource *pTexture2D;
231 0, //UINT Subresource;
232 {//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
233 0, //UINT NumPastFrames;
234 NULL, //ID3D12Resource **ppPastFrames;
235 NULL, // UINT *pPastSubresources;
236 0, //UINT NumFutureFrames;
237 NULL, //ID3D12Resource **ppFutureFrames;
238 NULL //UINT *pFutureSubresources;
239 }
240 }
241 },
242 { // D3D12_VIDEO_PROCESS_TRANSFORM Transform;
243 // y0 = top
244 // x0 = left
245 // x1 = right
246 // y1 = bottom
247 // typedef struct _RECT
248 // {
249 // int left;
250 // int top;
251 // int right;
252 // int bottom;
253 // } RECT;
254 { process_properties->src_region.x0/*left*/, process_properties->src_region.y0/*top*/, process_properties->src_region.x1/*right*/, process_properties->src_region.y1/*bottom*/ },
255 { process_properties->dst_region.x0/*left*/, process_properties->dst_region.y0/*top*/, process_properties->dst_region.x1/*right*/, process_properties->dst_region.y1/*bottom*/ }, // D3D12_RECT DestinationRectangle;
256 pD3D12Proc->m_inputStreamDescs[0].EnableOrientation ? d3d12_video_processor_convert_pipe_rotation(process_properties->orientation) : D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT, // D3D12_VIDEO_PROCESS_ORIENTATION Orientation;
257 },
258 D3D12_VIDEO_PROCESS_INPUT_STREAM_FLAG_NONE,
259 { // D3D12_VIDEO_PROCESS_INPUT_STREAM_RATE RateInfo;
260 0,
261 0,
262 },
263 // INT FilterLevels[32];
264 {
265 0, // Trailing zeroes on the rest
266 },
267 //D3D12_VIDEO_PROCESS_ALPHA_BLENDING;
268 {
269 (process_properties->blend.mode == PIPE_VIDEO_VPP_BLEND_MODE_GLOBAL_ALPHA),
270 process_properties->blend.global_alpha
271 },
272 // D3D12_VIDEO_FIELD_TYPE FieldType
273 D3D12_VIDEO_FIELD_TYPE_NONE,
274 };
275
276 debug_printf("ProcessFrame InArgs Orientation %d \n\tSrc top: %d left: %d right: %d bottom: %d\n\tDst top: %d left: %d right: %d bottom: %d\n", InputArguments.Transform.Orientation,
277 InputArguments.Transform.SourceRectangle.top, InputArguments.Transform.SourceRectangle.left, InputArguments.Transform.SourceRectangle.right, InputArguments.Transform.SourceRectangle.bottom,
278 InputArguments.Transform.DestinationRectangle.top, InputArguments.Transform.DestinationRectangle.left, InputArguments.Transform.DestinationRectangle.right, InputArguments.Transform.DestinationRectangle.bottom);
279
280 pD3D12Proc->m_ProcessInputs.push_back(InputArguments);
281 pD3D12Proc->m_InputBuffers.push_back(pInputVideoBuffer);
282
283 ///
284 /// Flush work to the GPU and blocking wait until GPU finishes
285 ///
286 pD3D12Proc->m_needsGPUFlush = true;
287 }
288
289 void
d3d12_video_processor_destroy(struct pipe_video_codec * codec)290 d3d12_video_processor_destroy(struct pipe_video_codec * codec)
291 {
292 if (codec == nullptr) {
293 return;
294 }
295 // Flush pending work before destroying.
296 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
297
298 uint64_t curBatchFence = pD3D12Proc->m_fenceValue;
299 if (pD3D12Proc->m_needsGPUFlush)
300 {
301 d3d12_video_processor_flush(codec);
302 d3d12_video_processor_sync_completion(codec, curBatchFence, OS_TIMEOUT_INFINITE);
303 }
304
305 // Call dtor to make ComPtr work
306 delete pD3D12Proc;
307 }
308
309 void
d3d12_video_processor_flush(struct pipe_video_codec * codec)310 d3d12_video_processor_flush(struct pipe_video_codec * codec)
311 {
312 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
313 assert(pD3D12Proc);
314 assert(pD3D12Proc->m_spD3D12VideoDevice);
315 assert(pD3D12Proc->m_spCommandQueue);
316
317 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Will flush video queue work and CPU wait on "
318 "fenceValue: %d\n",
319 pD3D12Proc->m_fenceValue);
320
321 if (!pD3D12Proc->m_needsGPUFlush) {
322 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Nothing to flush, all up to date.\n");
323 } else {
324 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Promoting the output texture %p to d3d12_permanently_resident.\n",
325 pD3D12Proc->m_OutputArguments.buffer->texture);
326
327 // Make the resources permanently resident for video use
328 d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, pD3D12Proc->m_OutputArguments.buffer->texture);
329
330 for(auto curInput : pD3D12Proc->m_InputBuffers)
331 {
332 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Promoting the input texture %p to d3d12_permanently_resident.\n",
333 curInput->texture);
334 // Make the resources permanently resident for video use
335 d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, curInput->texture);
336 }
337
338 HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
339 if (hr != S_OK) {
340 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
341 " - D3D12Device was removed BEFORE commandlist "
342 "execution with HR %x.\n",
343 hr);
344 goto flush_fail;
345 }
346
347 // Close and execute command list and wait for idle on CPU blocking
348 // this method before resetting list and allocator for next submission.
349
350 if (pD3D12Proc->m_transitionsBeforeCloseCmdList.size() > 0) {
351 pD3D12Proc->m_spCommandList->ResourceBarrier(pD3D12Proc->m_transitionsBeforeCloseCmdList.size(),
352 pD3D12Proc->m_transitionsBeforeCloseCmdList.data());
353 pD3D12Proc->m_transitionsBeforeCloseCmdList.clear();
354 }
355
356 hr = pD3D12Proc->m_spCommandList->Close();
357 if (FAILED(hr)) {
358 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Can't close command list with HR %x\n", hr);
359 goto flush_fail;
360 }
361
362 // Flush any work batched in the d3d12_screen and Wait on the m_spCommandQueue
363 struct pipe_fence_handle *completion_fence = NULL;
364 pD3D12Proc->base.context->flush(pD3D12Proc->base.context, &completion_fence, PIPE_FLUSH_ASYNC | PIPE_FLUSH_HINT_FINISH);
365 struct d3d12_fence *casted_completion_fence = d3d12_fence(completion_fence);
366 pD3D12Proc->m_spCommandQueue->Wait(casted_completion_fence->cmdqueue_fence, casted_completion_fence->value);
367 pD3D12Proc->m_pD3D12Screen->base.fence_reference(&pD3D12Proc->m_pD3D12Screen->base, &completion_fence, NULL);
368
369 struct d3d12_fence *input_surface_fence = pD3D12Proc->input_surface_fence;
370 if (input_surface_fence)
371 pD3D12Proc->m_spCommandQueue->Wait(input_surface_fence->cmdqueue_fence, input_surface_fence->value);
372
373 ID3D12CommandList *ppCommandLists[1] = { pD3D12Proc->m_spCommandList.Get() };
374 pD3D12Proc->m_spCommandQueue->ExecuteCommandLists(1, ppCommandLists);
375 pD3D12Proc->m_spCommandQueue->Signal(pD3D12Proc->m_spFence.Get(), pD3D12Proc->m_fenceValue);
376
377 // Validate device was not removed
378 hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
379 if (hr != S_OK) {
380 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
381 " - D3D12Device was removed AFTER commandlist "
382 "execution with HR %x, but wasn't before.\n",
383 hr);
384 goto flush_fail;
385 }
386
387 debug_printf(
388 "[d3d12_video_processor] d3d12_video_processor_flush - GPU signaled execution finalized for fenceValue: %d\n",
389 pD3D12Proc->m_fenceValue);
390
391 pD3D12Proc->m_fenceValue++;
392 pD3D12Proc->m_needsGPUFlush = false;
393 }
394 pD3D12Proc->m_ProcessInputs.clear();
395 pD3D12Proc->m_InputBuffers.clear();
396 // Free the fence after completion finished
397
398 return;
399
400 flush_fail:
401 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush failed for fenceValue: %d\n", pD3D12Proc->m_fenceValue);
402 assert(false);
403 }
404
405 struct pipe_video_codec *
d3d12_video_processor_create(struct pipe_context * context,const struct pipe_video_codec * codec)406 d3d12_video_processor_create(struct pipe_context *context, const struct pipe_video_codec *codec)
407 {
408 ///
409 /// Initialize d3d12_video_processor
410 ///
411
412 // Not using new doesn't call ctor and the initializations in the class declaration are lost
413 struct d3d12_video_processor *pD3D12Proc = new d3d12_video_processor;
414
415 pD3D12Proc->m_PendingFences.resize(D3D12_VIDEO_PROC_ASYNC_DEPTH);
416 pD3D12Proc->base = *codec;
417
418 pD3D12Proc->base.context = context;
419 pD3D12Proc->base.width = codec->width;
420 pD3D12Proc->base.height = codec->height;
421 pD3D12Proc->base.destroy = d3d12_video_processor_destroy;
422 pD3D12Proc->base.begin_frame = d3d12_video_processor_begin_frame;
423 pD3D12Proc->base.process_frame = d3d12_video_processor_process_frame;
424 pD3D12Proc->base.end_frame = d3d12_video_processor_end_frame;
425 pD3D12Proc->base.flush = d3d12_video_processor_flush;
426 pD3D12Proc->base.get_processor_fence = d3d12_video_processor_get_processor_fence;
427
428 ///
429
430 ///
431 /// Try initializing D3D12 Video device and check for device caps
432 ///
433
434 struct d3d12_context *pD3D12Ctx = (struct d3d12_context *) context;
435 pD3D12Proc->m_pD3D12Context = pD3D12Ctx;
436 pD3D12Proc->m_pD3D12Screen = d3d12_screen(pD3D12Ctx->base.screen);
437
438 // Assume defaults for now, can re-create if necessary when d3d12_video_processor_end_frame kicks off the processing
439 DXGI_COLOR_SPACE_TYPE InputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
440 std::vector<DXGI_FORMAT> InputFormats = { DXGI_FORMAT_NV12 };
441 DXGI_FORMAT OutputFormat = DXGI_FORMAT_NV12;
442 DXGI_COLOR_SPACE_TYPE OutputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
443
444 ///
445 /// Create processor objects
446 ///
447 if (FAILED(pD3D12Proc->m_pD3D12Screen->dev->QueryInterface(
448 IID_PPV_ARGS(pD3D12Proc->m_spD3D12VideoDevice.GetAddressOf())))) {
449 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - D3D12 Device has no Video support\n");
450 goto failed;
451 }
452
453 if (FAILED(pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_MAX_INPUT_STREAMS, &pD3D12Proc->m_vpMaxInputStreams, sizeof(pD3D12Proc->m_vpMaxInputStreams)))) {
454 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Failed to query D3D12_FEATURE_VIDEO_PROCESS_MAX_INPUT_STREAMS\n");
455 goto failed;
456 }
457
458 if (!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, OutputFormat, OutputColorSpace)) {
459 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Failure on "
460 "d3d12_video_processor_check_caps_and_create_processor\n");
461 goto failed;
462 }
463
464 if (!d3d12_video_processor_create_command_objects(pD3D12Proc)) {
465 debug_printf(
466 "[d3d12_video_processor] d3d12_video_create_processor - Failure on d3d12_video_processor_create_command_objects\n");
467 goto failed;
468 }
469
470 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Created successfully!\n");
471
472 return &pD3D12Proc->base;
473
474 failed:
475 if (pD3D12Proc != nullptr) {
476 d3d12_video_processor_destroy(&pD3D12Proc->base);
477 }
478
479 return nullptr;
480 }
481
482 bool
d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor * pD3D12Proc,std::vector<DXGI_FORMAT> InputFormats,DXGI_COLOR_SPACE_TYPE InputColorSpace,DXGI_FORMAT OutputFormat,DXGI_COLOR_SPACE_TYPE OutputColorSpace)483 d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor *pD3D12Proc,
484 std::vector<DXGI_FORMAT> InputFormats,
485 DXGI_COLOR_SPACE_TYPE InputColorSpace,
486 DXGI_FORMAT OutputFormat,
487 DXGI_COLOR_SPACE_TYPE OutputColorSpace)
488 {
489 HRESULT hr = S_OK;
490
491 D3D12_VIDEO_FIELD_TYPE FieldType = D3D12_VIDEO_FIELD_TYPE_NONE;
492 D3D12_VIDEO_FRAME_STEREO_FORMAT StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
493 DXGI_RATIONAL FrameRate = { 30, 1 };
494 DXGI_RATIONAL AspectRatio = { 1, 1 };
495
496 struct ResolStruct {
497 uint Width;
498 uint Height;
499 };
500
501 ResolStruct resolutionsList[] = {
502 { 8192, 8192 }, // 8k
503 { 8192, 4320 }, // 8k - alternative
504 { 7680, 4800 }, // 8k - alternative
505 { 7680, 4320 }, // 8k - alternative
506 { 4096, 2304 }, // 2160p (4K)
507 { 4096, 2160 }, // 2160p (4K) - alternative
508 { 2560, 1440 }, // 1440p
509 { 1920, 1200 }, // 1200p
510 { 1920, 1080 }, // 1080p
511 { 1280, 720 }, // 720p
512 { 800, 600 },
513 };
514
515 pD3D12Proc->m_SupportCaps =
516 {
517 0, // NodeIndex
518 { resolutionsList[0].Width, resolutionsList[0].Height, { InputFormats[0], InputColorSpace } },
519 FieldType,
520 StereoFormat,
521 FrameRate,
522 { OutputFormat, OutputColorSpace },
523 StereoFormat,
524 FrameRate,
525 };
526
527 uint32_t idxResol = 0;
528 bool bSupportsAny = false;
529 while ((idxResol < ARRAY_SIZE(resolutionsList)) && !bSupportsAny) {
530 pD3D12Proc->m_SupportCaps.InputSample.Width = resolutionsList[idxResol].Width;
531 pD3D12Proc->m_SupportCaps.InputSample.Height = resolutionsList[idxResol].Height;
532 if (SUCCEEDED(pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_SUPPORT, &pD3D12Proc->m_SupportCaps, sizeof(pD3D12Proc->m_SupportCaps)))) {
533 bSupportsAny = ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != 0);
534 }
535 idxResol++;
536 }
537
538 if ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED)
539 {
540 if((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) {
541 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED not returned by driver. "
542 "failed with SupportFlags %x\n",
543 pD3D12Proc->m_SupportCaps.SupportFlags);
544 }
545 }
546
547 D3D12_VIDEO_PROCESS_FILTER_FLAGS enabledFilterFlags = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE;
548
549 bool enableOrientation = (
550 ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION) != 0)
551 || ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP) != 0)
552 );
553
554 D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC inputStreamDesc = {
555 InputFormats[0],
556 InputColorSpace,
557 AspectRatio, // SourceAspectRatio;
558 AspectRatio, // DestinationAspectRatio;
559 FrameRate, // FrameRate
560 pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // SourceSizeRange
561 pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // DestinationSizeRange
562 enableOrientation,
563 enabledFilterFlags,
564 StereoFormat,
565 FieldType,
566 D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_NONE,
567 ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING) != 0)
568 && ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL) != 0), // EnableAlphaBlending
569 {}, // LumaKey
570 0, // NumPastFrames
571 0, // NumFutureFrames
572 false // EnableAutoProcessing
573 };
574
575 D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC outputStreamDesc =
576 {
577 pD3D12Proc->m_SupportCaps.OutputFormat.Format,
578 OutputColorSpace,
579 D3D12_VIDEO_PROCESS_ALPHA_FILL_MODE_OPAQUE, // AlphaFillMode
580 0u, // AlphaFillModeSourceStreamIndex
581 {0, 0, 0, 0}, // BackgroundColor
582 FrameRate, // FrameRate
583 false // EnableStereo
584 };
585
586 // gets the required past/future frames for VP creation
587 {
588 D3D12_FEATURE_DATA_VIDEO_PROCESS_REFERENCE_INFO referenceInfo = {};
589 referenceInfo.NodeIndex = 0;
590 D3D12_VIDEO_PROCESS_FEATURE_FLAGS featureFlags = D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
591 featureFlags |= outputStreamDesc.AlphaFillMode ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
592 featureFlags |= inputStreamDesc.LumaKey.Enable ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_LUMA_KEY : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
593 featureFlags |= (inputStreamDesc.StereoFormat != D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE || outputStreamDesc.EnableStereo) ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_STEREO : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
594 featureFlags |= inputStreamDesc.EnableOrientation ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION | D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
595 featureFlags |= inputStreamDesc.EnableAlphaBlending ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
596
597 referenceInfo.DeinterlaceMode = inputStreamDesc.DeinterlaceMode;
598 referenceInfo.Filters = inputStreamDesc.FilterFlags;
599 referenceInfo.FeatureSupport = featureFlags;
600 referenceInfo.InputFrameRate = inputStreamDesc.FrameRate;
601 referenceInfo.OutputFrameRate = outputStreamDesc.FrameRate;
602 referenceInfo.EnableAutoProcessing = inputStreamDesc.EnableAutoProcessing;
603
604 hr = pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_REFERENCE_INFO, &referenceInfo, sizeof(referenceInfo));
605 if (FAILED(hr)) {
606 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CheckFeatureSupport "
607 "failed with HR %x\n",
608 hr);
609 return false;
610 }
611
612 inputStreamDesc.NumPastFrames = referenceInfo.PastFrames;
613 inputStreamDesc.NumFutureFrames = referenceInfo.FutureFrames;
614 }
615
616 pD3D12Proc->m_outputStreamDesc = outputStreamDesc;
617
618 debug_printf("[d3d12_video_processor]\t Creating Video Processor\n");
619 debug_printf("[d3d12_video_processor]\t NumInputs: %d\n", (int) InputFormats.size());
620
621 pD3D12Proc->m_inputStreamDescs.clear();
622 for (unsigned i = 0; i < InputFormats.size(); i++)
623 {
624 inputStreamDesc.Format = InputFormats[i];
625 pD3D12Proc->m_inputStreamDescs.push_back(inputStreamDesc);
626 debug_printf("[d3d12_video_processor]\t Input Stream #%d Format: %d\n", i, inputStreamDesc.Format);
627 }
628 debug_printf("[d3d12_video_processor]\t Output Stream Format: %d\n", pD3D12Proc->m_outputStreamDesc.Format);
629
630 hr = pD3D12Proc->m_spD3D12VideoDevice->CreateVideoProcessor(pD3D12Proc->m_NodeMask,
631 &pD3D12Proc->m_outputStreamDesc,
632 pD3D12Proc->m_inputStreamDescs.size(),
633 pD3D12Proc->m_inputStreamDescs.data(),
634 IID_PPV_ARGS(pD3D12Proc->m_spVideoProcessor.GetAddressOf()));
635 if (FAILED(hr)) {
636 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CreateVideoProcessor "
637 "failed with HR %x\n",
638 hr);
639 return false;
640 }
641
642 return true;
643 }
644
645 bool
d3d12_video_processor_create_command_objects(struct d3d12_video_processor * pD3D12Proc)646 d3d12_video_processor_create_command_objects(struct d3d12_video_processor *pD3D12Proc)
647 {
648 assert(pD3D12Proc->m_spD3D12VideoDevice);
649
650 D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS };
651 HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandQueue(
652 &commandQueueDesc,
653 IID_PPV_ARGS(pD3D12Proc->m_spCommandQueue.GetAddressOf()));
654
655 if (FAILED(hr)) {
656 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandQueue "
657 "failed with HR %x\n",
658 hr);
659 return false;
660 }
661
662 hr = pD3D12Proc->m_pD3D12Screen->dev->CreateFence(0,
663 D3D12_FENCE_FLAG_SHARED,
664 IID_PPV_ARGS(&pD3D12Proc->m_spFence));
665
666 if (FAILED(hr)) {
667 debug_printf(
668 "[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateFence failed with HR %x\n",
669 hr);
670 return false;
671 }
672
673 pD3D12Proc->m_spCommandAllocators.resize(D3D12_VIDEO_PROC_ASYNC_DEPTH);
674 for (uint32_t i = 0; i < pD3D12Proc->m_spCommandAllocators.size() ; i++) {
675 hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandAllocator(
676 D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
677 IID_PPV_ARGS(pD3D12Proc->m_spCommandAllocators[i].GetAddressOf()));
678
679 if (FAILED(hr)) {
680 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to "
681 "CreateCommandAllocator failed with HR %x\n",
682 hr);
683 return false;
684 }
685 }
686
687 ComPtr<ID3D12Device4> spD3D12Device4;
688 if (FAILED(pD3D12Proc->m_pD3D12Screen->dev->QueryInterface(
689 IID_PPV_ARGS(spD3D12Device4.GetAddressOf())))) {
690 debug_printf(
691 "[d3d12_video_processor] d3d12_video_processor_create_processor - D3D12 Device has no ID3D12Device4 support\n");
692 return false;
693 }
694
695 hr = spD3D12Device4->CreateCommandList1(0,
696 D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
697 D3D12_COMMAND_LIST_FLAG_NONE,
698 IID_PPV_ARGS(pD3D12Proc->m_spCommandList.GetAddressOf()));
699
700 if (FAILED(hr)) {
701 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandList "
702 "failed with HR %x\n",
703 hr);
704 return false;
705 }
706
707 return true;
708 }
709
710 D3D12_VIDEO_PROCESS_ORIENTATION
d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation_flags)711 d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation_flags)
712 {
713 D3D12_VIDEO_PROCESS_ORIENTATION result = D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT;
714
715 if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_90)
716 {
717 result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90;
718 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90");
719 }
720 else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_180)
721 {
722 result = D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180;
723 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180\n");
724 }
725 else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_270)
726 {
727 result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270;
728 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270");
729 }
730 else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL)
731 {
732 result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL;
733 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL\n");
734 }
735 else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_VERTICAL)
736 {
737 result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL;
738 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL\n");
739 }
740
741 return result;
742 }
743
744 uint64_t
d3d12_video_processor_pool_current_index(struct d3d12_video_processor * pD3D12Proc)745 d3d12_video_processor_pool_current_index(struct d3d12_video_processor *pD3D12Proc)
746 {
747 return pD3D12Proc->m_fenceValue % D3D12_VIDEO_PROC_ASYNC_DEPTH;
748 }
749
750
751 bool
d3d12_video_processor_ensure_fence_finished(struct pipe_video_codec * codec,uint64_t fenceValueToWaitOn,uint64_t timeout_ns)752 d3d12_video_processor_ensure_fence_finished(struct pipe_video_codec *codec,
753 uint64_t fenceValueToWaitOn,
754 uint64_t timeout_ns)
755 {
756 bool wait_result = true;
757 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
758 HRESULT hr = S_OK;
759 uint64_t completedValue = pD3D12Proc->m_spFence->GetCompletedValue();
760
761 debug_printf(
762 "[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Waiting for fence (with timeout_ns %" PRIu64
763 ") to finish with "
764 "fenceValue: %" PRIu64 " - Current Fence Completed Value %" PRIu64 "\n",
765 timeout_ns,
766 fenceValueToWaitOn,
767 completedValue);
768
769 if (completedValue < fenceValueToWaitOn) {
770
771 HANDLE event = {};
772 int event_fd = 0;
773 event = d3d12_fence_create_event(&event_fd);
774
775 hr = pD3D12Proc->m_spFence->SetEventOnCompletion(fenceValueToWaitOn, event);
776 if (FAILED(hr)) {
777 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - SetEventOnCompletion for "
778 "fenceValue %" PRIu64 " failed with HR %x\n",
779 fenceValueToWaitOn,
780 hr);
781 goto ensure_fence_finished_fail;
782 }
783
784 wait_result = d3d12_fence_wait_event(event, event_fd, timeout_ns);
785 d3d12_fence_close_event(event, event_fd);
786
787 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Waiting on fence to be done with "
788 "fenceValue: %" PRIu64 " - current CompletedValue: %" PRIu64 "\n",
789 fenceValueToWaitOn,
790 completedValue);
791 } else {
792 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Fence already done with "
793 "fenceValue: %" PRIu64 " - current CompletedValue: %" PRIu64 "\n",
794 fenceValueToWaitOn,
795 completedValue);
796 }
797 return wait_result;
798
799 ensure_fence_finished_fail:
800 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion failed for fenceValue: %" PRIu64 "\n",
801 fenceValueToWaitOn);
802 assert(false);
803 return false;
804 }
805
806 bool
d3d12_video_processor_sync_completion(struct pipe_video_codec * codec,uint64_t fenceValueToWaitOn,uint64_t timeout_ns)807 d3d12_video_processor_sync_completion(struct pipe_video_codec *codec, uint64_t fenceValueToWaitOn, uint64_t timeout_ns)
808 {
809 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
810 assert(pD3D12Proc);
811 assert(pD3D12Proc->m_spD3D12VideoDevice);
812 assert(pD3D12Proc->m_spCommandQueue);
813 HRESULT hr = S_OK;
814
815 ASSERTED bool wait_result = d3d12_video_processor_ensure_fence_finished(codec, fenceValueToWaitOn, timeout_ns);
816 assert(wait_result);
817
818 hr =
819 pD3D12Proc->m_spCommandAllocators[fenceValueToWaitOn % D3D12_VIDEO_PROC_ASYNC_DEPTH]->Reset();
820 if (FAILED(hr)) {
821 debug_printf("m_spCommandAllocator->Reset() failed with %x.\n", hr);
822 goto sync_with_token_fail;
823 }
824
825 // Validate device was not removed
826 hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
827 if (hr != S_OK) {
828 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion"
829 " - D3D12Device was removed AFTER d3d12_video_processor_ensure_fence_finished "
830 "execution with HR %x, but wasn't before.\n",
831 hr);
832 goto sync_with_token_fail;
833 }
834
835 debug_printf(
836 "[d3d12_video_processor] d3d12_video_processor_sync_completion - GPU execution finalized for fenceValue: %" PRIu64
837 "\n",
838 fenceValueToWaitOn);
839
840 return wait_result;
841
842 sync_with_token_fail:
843 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion failed for fenceValue: %" PRIu64 "\n",
844 fenceValueToWaitOn);
845 assert(false);
846 return false;
847 }
848
d3d12_video_processor_get_processor_fence(struct pipe_video_codec * codec,struct pipe_fence_handle * fence,uint64_t timeout)849 int d3d12_video_processor_get_processor_fence(struct pipe_video_codec *codec,
850 struct pipe_fence_handle *fence,
851 uint64_t timeout)
852 {
853 struct d3d12_fence *fenceValueToWaitOn = (struct d3d12_fence *) fence;
854 assert(fenceValueToWaitOn);
855
856 ASSERTED bool wait_res = d3d12_video_processor_sync_completion(codec, fenceValueToWaitOn->value, timeout);
857
858 // Return semantics based on p_video_codec interface
859 // ret == 0 -> work in progress
860 // ret != 0 -> work completed
861 return wait_res ? 1 : 0;
862 }
863