1 /*
2 * Copyright 2011 Joakim Sindholt <[email protected]>
3 * Copyright 2015 Patrick Rudolph <[email protected]>
4 * SPDX-License-Identifier: MIT
5 */
6
7 #ifndef _NINE_BUFFER9_H_
8 #define _NINE_BUFFER9_H_
9
10 #include "device9.h"
11 #include "nine_buffer_upload.h"
12 #include "nine_state.h"
13 #include "resource9.h"
14 #include "pipe/p_context.h"
15 #include "pipe/p_defines.h"
16 #include "pipe/p_state.h"
17 #include "util/list.h"
18 #include "util/box.h"
19 #include "util/u_upload_mgr.h"
20
21 struct pipe_screen;
22 struct pipe_context;
23 struct pipe_transfer;
24
25 struct NineTransfer {
26 struct pipe_transfer *transfer;
27 bool is_pipe_secondary;
28 struct nine_subbuffer *buf; /* NULL unless subbuffer are used */
29 bool should_destroy_buf; /* If the subbuffer should be destroyed */
30 };
31
32 struct NineBuffer9
33 {
34 struct NineResource9 base;
35
36 /* G3D */
37 struct NineTransfer *maps;
38 int nlocks, nmaps, maxmaps;
39 UINT size;
40
41 int16_t bind_count; /* to Device9->state.stream */
42 /* Whether only discard and nooverwrite were used so far
43 * for this buffer. Allows some optimization. */
44 bool discard_nooverwrite_only;
45 bool need_sync_if_nooverwrite;
46 struct nine_subbuffer *buf;
47
48 /* Specific to managed buffers */
49 struct {
50 void *data;
51 bool dirty;
52 struct pipe_box dirty_box; /* region in the resource to update */
53 struct pipe_box upload_pending_regions; /* region with uploads pending */
54 struct list_head list; /* for update_buffers */
55 struct list_head list2; /* for managed_buffers */
56 unsigned pending_upload; /* for uploads */
57 /* SYSTEMMEM DYNAMIC */
58 bool can_unsynchronized; /* Whether the upload can use nooverwrite */
59 struct pipe_box valid_region; /* Region in the GPU buffer with valid content */
60 struct pipe_box required_valid_region; /* Region that needs to be valid right now. */
61 struct pipe_box filled_region; /* Region in the GPU buffer filled since last discard */
62 unsigned num_worker_thread_syncs;
63 unsigned frame_count_last_discard;
64 } managed;
65 };
66 static inline struct NineBuffer9 *
NineBuffer9(void * data)67 NineBuffer9( void *data )
68 {
69 return (struct NineBuffer9 *)data;
70 }
71
72 HRESULT
73 NineBuffer9_ctor( struct NineBuffer9 *This,
74 struct NineUnknownParams *pParams,
75 D3DRESOURCETYPE Type,
76 DWORD Usage,
77 UINT Size,
78 D3DPOOL Pool );
79
80 void
81 NineBuffer9_dtor( struct NineBuffer9 *This );
82
83 struct pipe_resource *
84 NineBuffer9_GetResource( struct NineBuffer9 *This, unsigned *offset );
85
86 HRESULT NINE_WINAPI
87 NineBuffer9_Lock( struct NineBuffer9 *This,
88 UINT OffsetToLock,
89 UINT SizeToLock,
90 void **ppbData,
91 DWORD Flags );
92
93 HRESULT NINE_WINAPI
94 NineBuffer9_Unlock( struct NineBuffer9 *This );
95
96 void
97 NineBuffer9_Upload( struct NineBuffer9 *This );
98
99 static void inline
NineBindBufferToDevice(struct NineDevice9 * device,struct NineBuffer9 ** slot,struct NineBuffer9 * buf)100 NineBindBufferToDevice( struct NineDevice9 *device,
101 struct NineBuffer9 **slot,
102 struct NineBuffer9 *buf )
103 {
104 struct NineBuffer9 *old = *slot;
105
106 if (buf) {
107 if ((buf->managed.dirty) && list_is_empty(&buf->managed.list))
108 list_add(&buf->managed.list, &device->update_buffers);
109 buf->bind_count++;
110 }
111 if (old) {
112 old->bind_count--;
113 if (!old->bind_count && old->managed.dirty)
114 list_delinit(&old->managed.list);
115 }
116
117 nine_bind(slot, buf);
118 }
119
120 void
121 NineBuffer9_SetDirty( struct NineBuffer9 *This );
122
123 #define BASEBUF_REGISTER_UPDATE(b) { \
124 if ((b)->managed.dirty && (b)->bind_count) \
125 if (list_is_empty(&(b)->managed.list)) \
126 list_add(&(b)->managed.list, &(b)->base.base.device->update_buffers); \
127 }
128
129 #endif /* _NINE_BUFFER9_H_ */
130