1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 */
5
6 #ifndef VN_CS_H
7 #define VN_CS_H
8
9 #include "vn_common.h"
10
11 #include "venus-protocol/vn_protocol_driver_info.h"
12
13 #define VN_CS_ENCODER_BUFFER_INITIALIZER(storage) \
14 (struct vn_cs_encoder_buffer) \
15 { \
16 .base = storage, \
17 }
18
19 /* note that buffers points to an unamed local variable */
20 #define VN_CS_ENCODER_INITIALIZER_LOCAL(storage, size) \
21 (struct vn_cs_encoder) \
22 { \
23 .storage_type = VN_CS_ENCODER_STORAGE_POINTER, \
24 .buffers = &VN_CS_ENCODER_BUFFER_INITIALIZER(storage), \
25 .buffer_count = 1, .buffer_max = 1, .current_buffer_size = size, \
26 .cur = storage, .end = (const void *)(storage) + (size), \
27 }
28
29 #define VN_CS_ENCODER_INITIALIZER(buf, size) \
30 (struct vn_cs_encoder) \
31 { \
32 .storage_type = VN_CS_ENCODER_STORAGE_POINTER, .buffers = (buf), \
33 .buffer_count = 1, .buffer_max = 1, .current_buffer_size = size, \
34 .cur = (buf)->base, .end = (buf)->base + (size), \
35 }
36
37 #define VN_CS_DECODER_INITIALIZER(storage, size) \
38 (struct vn_cs_decoder) \
39 { \
40 .cur = storage, .end = (const void *)(storage) + (size), \
41 }
42
43 enum vn_cs_encoder_storage_type {
44 /* a pointer to an externally-managed storage */
45 VN_CS_ENCODER_STORAGE_POINTER,
46 /* an array of dynamically allocated shmems */
47 VN_CS_ENCODER_STORAGE_SHMEM_ARRAY,
48 /* same as above, but shmems are suballocated from a pool */
49 VN_CS_ENCODER_STORAGE_SHMEM_POOL,
50 };
51
52 struct vn_cs_encoder_buffer {
53 struct vn_renderer_shmem *shmem;
54 size_t offset;
55 void *base;
56 size_t committed_size;
57 };
58
59 struct vn_cs_encoder {
60 struct vn_instance *instance;
61 enum vn_cs_encoder_storage_type storage_type;
62 size_t min_buffer_size;
63
64 bool fatal_error;
65
66 struct vn_cs_encoder_buffer *buffers;
67 uint32_t buffer_count;
68 uint32_t buffer_max;
69 size_t total_committed_size;
70
71 /* the current buffer is buffers[buffer_count - 1].shmem */
72 size_t current_buffer_size;
73
74 /* cur is the write pointer. When cur passes end, the slow path is
75 * triggered.
76 */
77 void *cur;
78 const void *end;
79 };
80
81 struct vn_cs_decoder {
82 const void *cur;
83 const void *end;
84 };
85
86 struct vn_cs_renderer_protocol_info {
87 simple_mtx_t mutex;
88 bool init_once;
89 uint32_t api_version;
90 BITSET_DECLARE(extension_bitset, VN_INFO_EXTENSION_MAX_NUMBER + 1);
91 };
92
93 extern struct vn_cs_renderer_protocol_info _vn_cs_renderer_protocol_info;
94
95 static inline bool
vn_cs_renderer_protocol_has_api_version(uint32_t api_version)96 vn_cs_renderer_protocol_has_api_version(uint32_t api_version)
97 {
98 return _vn_cs_renderer_protocol_info.api_version >= api_version;
99 }
100
101 static inline bool
vn_cs_renderer_protocol_has_extension(uint32_t ext_number)102 vn_cs_renderer_protocol_has_extension(uint32_t ext_number)
103 {
104 return BITSET_TEST(_vn_cs_renderer_protocol_info.extension_bitset,
105 ext_number);
106 }
107
108 void
109 vn_cs_renderer_protocol_info_init(struct vn_instance *instance);
110
111 void
112 vn_cs_encoder_init(struct vn_cs_encoder *enc,
113 struct vn_instance *instance,
114 enum vn_cs_encoder_storage_type storage_type,
115 size_t min_size);
116
117 void
118 vn_cs_encoder_fini(struct vn_cs_encoder *enc);
119
120 void
121 vn_cs_encoder_reset(struct vn_cs_encoder *enc);
122
123 static inline void
vn_cs_encoder_set_fatal(const struct vn_cs_encoder * enc)124 vn_cs_encoder_set_fatal(const struct vn_cs_encoder *enc)
125 {
126 /* This is fatal and should be treated as VK_ERROR_DEVICE_LOST or even
127 * abort(). Note that vn_cs_encoder_reset does not clear this.
128 */
129 ((struct vn_cs_encoder *)enc)->fatal_error = true;
130 }
131
132 static inline bool
vn_cs_encoder_get_fatal(const struct vn_cs_encoder * enc)133 vn_cs_encoder_get_fatal(const struct vn_cs_encoder *enc)
134 {
135 return enc->fatal_error;
136 }
137
138 static inline bool
vn_cs_encoder_is_empty(const struct vn_cs_encoder * enc)139 vn_cs_encoder_is_empty(const struct vn_cs_encoder *enc)
140 {
141 return !enc->buffer_count || enc->cur == enc->buffers[0].base;
142 }
143
144 static inline size_t
vn_cs_encoder_get_len(const struct vn_cs_encoder * enc)145 vn_cs_encoder_get_len(const struct vn_cs_encoder *enc)
146 {
147 if (unlikely(!enc->buffer_count))
148 return 0;
149
150 size_t len = enc->total_committed_size;
151 const struct vn_cs_encoder_buffer *cur_buf =
152 &enc->buffers[enc->buffer_count - 1];
153 if (!cur_buf->committed_size)
154 len += enc->cur - cur_buf->base;
155 return len;
156 }
157
158 bool
159 vn_cs_encoder_reserve_internal(struct vn_cs_encoder *enc, size_t size);
160
161 /**
162 * Reserve space for commands.
163 */
164 static inline bool
vn_cs_encoder_reserve(struct vn_cs_encoder * enc,size_t size)165 vn_cs_encoder_reserve(struct vn_cs_encoder *enc, size_t size)
166 {
167 if (unlikely(size > enc->end - enc->cur)) {
168 if (!vn_cs_encoder_reserve_internal(enc, size)) {
169 vn_cs_encoder_set_fatal(enc);
170 return false;
171 }
172 assert(size <= enc->end - enc->cur);
173 }
174
175 return true;
176 }
177
178 static inline void
vn_cs_encoder_write(struct vn_cs_encoder * enc,size_t size,const void * val,size_t val_size)179 vn_cs_encoder_write(struct vn_cs_encoder *enc,
180 size_t size,
181 const void *val,
182 size_t val_size)
183 {
184 assert(val_size <= size);
185 assert(size <= enc->end - enc->cur);
186
187 /* we should not rely on the compiler to optimize away memcpy... */
188 memcpy(enc->cur, val, val_size);
189 enc->cur += size;
190 }
191
192 void
193 vn_cs_encoder_commit(struct vn_cs_encoder *enc);
194
195 bool
196 vn_cs_encoder_needs_roundtrip(struct vn_cs_encoder *enc);
197
198 static inline void
vn_cs_decoder_init(struct vn_cs_decoder * dec,const void * data,size_t size)199 vn_cs_decoder_init(struct vn_cs_decoder *dec, const void *data, size_t size)
200 {
201 *dec = VN_CS_DECODER_INITIALIZER(data, size);
202 }
203
204 static inline void
vn_cs_decoder_set_fatal(const struct vn_cs_decoder * dec)205 vn_cs_decoder_set_fatal(const struct vn_cs_decoder *dec)
206 {
207 abort();
208 }
209
210 static inline bool
vn_cs_decoder_peek_internal(const struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)211 vn_cs_decoder_peek_internal(const struct vn_cs_decoder *dec,
212 size_t size,
213 void *val,
214 size_t val_size)
215 {
216 assert(val_size <= size);
217
218 if (unlikely(size > dec->end - dec->cur)) {
219 vn_cs_decoder_set_fatal(dec);
220 memset(val, 0, val_size);
221 return false;
222 }
223
224 /* we should not rely on the compiler to optimize away memcpy... */
225 memcpy(val, dec->cur, val_size);
226 return true;
227 }
228
229 static inline void
vn_cs_decoder_read(struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)230 vn_cs_decoder_read(struct vn_cs_decoder *dec,
231 size_t size,
232 void *val,
233 size_t val_size)
234 {
235 if (vn_cs_decoder_peek_internal(dec, size, val, val_size))
236 dec->cur += size;
237 }
238
239 static inline void
vn_cs_decoder_peek(const struct vn_cs_decoder * dec,size_t size,void * val,size_t val_size)240 vn_cs_decoder_peek(const struct vn_cs_decoder *dec,
241 size_t size,
242 void *val,
243 size_t val_size)
244 {
245 vn_cs_decoder_peek_internal(dec, size, val, val_size);
246 }
247
248 static inline vn_object_id
vn_cs_handle_load_id(const void ** handle,VkObjectType type)249 vn_cs_handle_load_id(const void **handle, VkObjectType type)
250 {
251 return *handle ? vn_object_get_id(*handle, type) : 0;
252 }
253
254 static inline void
vn_cs_handle_store_id(void ** handle,vn_object_id id,VkObjectType type)255 vn_cs_handle_store_id(void **handle, vn_object_id id, VkObjectType type)
256 {
257 vn_object_set_id(*handle, id, type);
258 }
259
260 #endif /* VN_CS_H */
261