File src/compute/cmd_unit.cpp changed (mode: 100644) (index 1141cac..476c617) |
... |
... |
struct jen::ModuleCompute::Data { |
26 |
26 |
Device device; |
Device device; |
27 |
27 |
}; |
}; |
28 |
28 |
|
|
29 |
|
void |
|
|
29 |
|
static void |
30 |
30 |
transitionLayout(Image *p, vkw::CmdBuffer *p_cmd, |
transitionLayout(Image *p, vkw::CmdBuffer *p_cmd, |
31 |
31 |
vkw::ImLayout layout, vkw::StageMaskChange stages) { |
vkw::ImLayout layout, vkw::StageMaskChange stages) { |
32 |
32 |
vkw::BarrierImMem barrier; { |
vkw::BarrierImMem barrier; { |
|
... |
... |
transitionLayout(Image *p, vkw::CmdBuffer *p_cmd, |
44 |
44 |
} |
} |
45 |
45 |
p_cmd->cmd_barriers(stages, {}, {}, barrier); |
p_cmd->cmd_barriers(stages, {}, {}, barrier); |
46 |
46 |
} |
} |
47 |
|
void check_transfer(const jen::DeviceBufferPart &part, |
|
|
47 |
|
|
|
48 |
|
static void |
|
49 |
|
check_transfer(const jen::DeviceBufferPart &part, |
48 |
50 |
vkw::DeviceSize offset, vkw::DeviceSize size) { |
vkw::DeviceSize offset, vkw::DeviceSize size) { |
49 |
51 |
jassert(offset + size <= part.size(), "region exceeds buffer"); |
jassert(offset + size <= part.size(), "region exceeds buffer"); |
50 |
52 |
jassert(part.is_mapped(), "cannot access memory"); |
jassert(part.is_mapped(), "cannot access memory"); |
51 |
53 |
jassert(not part.is_flush_needed(), "flush not supported"); |
jassert(not part.is_flush_needed(), "flush not supported"); |
52 |
54 |
} |
} |
53 |
|
void |
|
|
55 |
|
static void |
54 |
56 |
write_to_allocation(void *p_src, jen::DeviceBufferPart *p_dst, |
write_to_allocation(void *p_src, jen::DeviceBufferPart *p_dst, |
55 |
57 |
vkw::DeviceSize dst_offset, vkw::DeviceSize size) { |
vkw::DeviceSize dst_offset, vkw::DeviceSize size) { |
56 |
58 |
check_transfer(*p_dst, dst_offset, size); |
check_transfer(*p_dst, dst_offset, size); |
57 |
59 |
memcpy(p_dst->p_data() + dst_offset, p_src, size); |
memcpy(p_dst->p_data() + dst_offset, p_src, size); |
58 |
60 |
} |
} |
59 |
|
|
|
60 |
|
void |
|
|
61 |
|
static void |
61 |
62 |
read_from_allocation(jen::DeviceBufferPart *p_src, void *p_dst, |
read_from_allocation(jen::DeviceBufferPart *p_src, void *p_dst, |
62 |
63 |
vkw::DeviceSize src_offset, vkw::DeviceSize size) { |
vkw::DeviceSize src_offset, vkw::DeviceSize size) { |
63 |
64 |
check_transfer(*p_src, src_offset, size); |
check_transfer(*p_src, src_offset, size); |
64 |
65 |
memcpy(p_dst, p_src->p_data() + src_offset, size); |
memcpy(p_dst, p_src->p_data() + src_offset, size); |
65 |
66 |
} |
} |
66 |
67 |
|
|
67 |
|
|
|
68 |
68 |
struct jen::ComputeCmdUnit::Data { |
struct jen::ComputeCmdUnit::Data { |
69 |
69 |
[[nodiscard]] Result |
[[nodiscard]] Result |
70 |
70 |
init(Device*); |
init(Device*); |
|
... |
... |
struct jen::ComputeCmdUnit::Data { |
91 |
91 |
jl::array<bool, SyncCounts::FENCES> reset_fence; |
jl::array<bool, SyncCounts::FENCES> reset_fence; |
92 |
92 |
}; |
}; |
93 |
93 |
[[nodiscard]] Result ComputeCmdUnit::Data:: |
[[nodiscard]] Result ComputeCmdUnit::Data:: |
94 |
|
init(Device *p_dev) { |
|
95 |
|
this->p_dev = p_dev; |
|
|
94 |
|
init(Device *p_d) { |
|
95 |
|
p_dev = p_d; |
96 |
96 |
Result res; |
Result res; |
97 |
97 |
res = compute_cmds |
res = compute_cmds |
98 |
98 |
.init(*p_dev, p_dev->queue_indices.compute.family, |
.init(*p_dev, p_dev->queue_indices.compute.family, |
|
... |
... |
proceed_writes(BufferTransfers buffer_writes, |
215 |
215 |
for (auto &r : w.transfers) { |
for (auto &r : w.transfers) { |
216 |
216 |
auto ext = im.extent; |
auto ext = im.extent; |
217 |
217 |
uint64_t moffset = 0; |
uint64_t moffset = 0; |
218 |
|
for (uint32_t i = 1; i < r.mip_level; ++i) { |
|
|
218 |
|
for (uint32_t j = 1; j < r.mip_level; ++j) { |
219 |
219 |
moffset += ext.all_scale() * x_size; |
moffset += ext.all_scale() * x_size; |
220 |
220 |
ext /= 2; |
ext /= 2; |
221 |
221 |
ext.x = jl::max(ext.x, 1u); |
ext.x = jl::max(ext.x, 1u); |
|
... |
... |
proceed_staging_reads(BufferTransfers buffer_reads, |
324 |
324 |
for (auto &r : w.transfers) { |
for (auto &r : w.transfers) { |
325 |
325 |
auto ext = im.extent; |
auto ext = im.extent; |
326 |
326 |
uint64_t moffset = 0; |
uint64_t moffset = 0; |
327 |
|
for (uint32_t i = 1; i < r.mip_level; ++i) { |
|
|
327 |
|
for (uint32_t j = 1; j < r.mip_level; ++j) { |
328 |
328 |
moffset += ext.all_scale() * x_size; |
moffset += ext.all_scale() * x_size; |
329 |
329 |
ext /= 2; |
ext /= 2; |
330 |
330 |
ext.x = jl::max(ext.x, 1u); |
ext.x = jl::max(ext.x, 1u); |
|
... |
... |
proceed_staging_reads(BufferTransfers buffer_reads, |
376 |
376 |
} |
} |
377 |
377 |
return VK_SUCCESS; |
return VK_SUCCESS; |
378 |
378 |
} |
} |
379 |
|
[[nodiscard]] Result |
|
|
379 |
|
|
|
380 |
|
[[nodiscard]] static Result |
380 |
381 |
check_computeInfo(const Device &device, const ComputeInfo &info) { |
check_computeInfo(const Device &device, const ComputeInfo &info) { |
381 |
382 |
for (int i = 0; i < 3; ++i) |
for (int i = 0; i < 3; ++i) |
382 |
383 |
if (info.group_count[i] > |
if (info.group_count[i] > |
|
... |
... |
check_computeInfo(const Device &device, const ComputeInfo &info) { |
391 |
392 |
} |
} |
392 |
393 |
return VK_SUCCESS; |
return VK_SUCCESS; |
393 |
394 |
} |
} |
|
395 |
|
|
394 |
396 |
[[nodiscard]] Result ComputeCmdUnit:: |
[[nodiscard]] Result ComputeCmdUnit:: |
395 |
397 |
compute_status() { |
compute_status() { |
396 |
398 |
jen::Result res; |
jen::Result res; |
|
... |
... |
read_result(BufferTransfers buffer_reads, ImagesTransfers images_reads) { |
501 |
503 |
for (auto &r : read.transfers) { |
for (auto &r : read.transfers) { |
502 |
504 |
auto ext = im.extent; |
auto ext = im.extent; |
503 |
505 |
uint64_t moffset = 0; |
uint64_t moffset = 0; |
504 |
|
for (uint32_t i = 1; i < r.mip_level; ++i) { |
|
|
506 |
|
for (uint32_t j = 1; j < r.mip_level; ++j) { |
505 |
507 |
moffset += ext.all_scale() * x_size; |
moffset += ext.all_scale() * x_size; |
506 |
508 |
ext /= 2; |
ext /= 2; |
507 |
509 |
ext.x = jl::max(ext.x, 1u); |
ext.x = jl::max(ext.x, 1u); |
File src/compute/compute.cpp changed (mode: 100644) (index 6c9fb54..af5b92a) |
... |
... |
destroy_pipeline(Pipeline *p_pl) { |
88 |
88 |
p_pl->shader.destroy(d); |
p_pl->shader.destroy(d); |
89 |
89 |
} |
} |
90 |
90 |
|
|
91 |
|
[[nodiscard]] Result |
|
|
91 |
|
[[nodiscard]] static Result |
92 |
92 |
init(Device *p_d, BindingBuffer *p, const BindingCreateInfo &info) { |
init(Device *p_d, BindingBuffer *p, const BindingCreateInfo &info) { |
93 |
93 |
DevMemUsage mem_use; |
DevMemUsage mem_use; |
94 |
94 |
bool map = info.use & BindingUseFlag::TRANSFER_DST |
bool map = info.use & BindingUseFlag::TRANSFER_DST |
|
... |
... |
init(Device *p_d, BindingBuffer *p, const BindingCreateInfo &info) { |
126 |
126 |
} |
} |
127 |
127 |
return res; |
return res; |
128 |
128 |
} |
} |
129 |
|
void destroy(Device *p_d, BindingBuffer *p) { |
|
|
129 |
|
static void |
|
130 |
|
destroy(Device *p_d, BindingBuffer *p) { |
130 |
131 |
if (p->use_staging) |
if (p->use_staging) |
131 |
132 |
p_d->buffer_allocator.deallocate(p->staging); |
p_d->buffer_allocator.deallocate(p->staging); |
132 |
133 |
p_d->buffer_allocator.deallocate(p->part); |
p_d->buffer_allocator.deallocate(p->part); |
133 |
134 |
} |
} |
134 |
135 |
|
|
135 |
|
[[nodiscard]] Result |
|
|
136 |
|
[[nodiscard]] static Result |
136 |
137 |
init(Device *p_d, BindingBufferView *p, |
init(Device *p_d, BindingBufferView *p, |
137 |
138 |
const BindingCreateInfo &info, VkFormat format) { |
const BindingCreateInfo &info, VkFormat format) { |
138 |
139 |
Result res; |
Result res; |
|
... |
... |
init(Device *p_d, BindingBufferView *p, |
145 |
146 |
p_d->buffer_allocator.deallocate(p->part); |
p_d->buffer_allocator.deallocate(p->part); |
146 |
147 |
return res; |
return res; |
147 |
148 |
} |
} |
148 |
|
void destroy(Device *p_d, BindingBufferView *p) { |
|
|
149 |
|
static void |
|
150 |
|
destroy(Device *p_d, BindingBufferView *p) { |
149 |
151 |
p->view.destroy(*p_d); |
p->view.destroy(*p_d); |
150 |
152 |
p_d->buffer_allocator.deallocate(p->part); |
p_d->buffer_allocator.deallocate(p->part); |
151 |
153 |
} |
} |
|
... |
... |
init(Device *p_d, Image *p, const ImageCreateInfo &info) { |
258 |
260 |
p->layer_count = info.layer_count; |
p->layer_count = info.layer_count; |
259 |
261 |
return res; |
return res; |
260 |
262 |
} |
} |
261 |
|
void destroy(Device *p_d, Image *p) { |
|
|
263 |
|
static void |
|
264 |
|
destroy(Device *p_d, Image *p) { |
262 |
265 |
p_d->buffer_allocator.deallocate(p->staging); |
p_d->buffer_allocator.deallocate(p->staging); |
263 |
266 |
p->image.destroy(p_d); |
p->image.destroy(p_d); |
264 |
267 |
} |
} |
|
... |
... |
create_images(ImageCreateInfos infos, Image *p_dst) { |
277 |
280 |
return VK_SUCCESS; |
return VK_SUCCESS; |
278 |
281 |
} |
} |
279 |
282 |
|
|
280 |
|
[[nodiscard]] Result |
|
|
283 |
|
[[nodiscard]] static Result |
281 |
284 |
init(Device *p_dev, BindingSet *p, vkw::DescrLayout setLayout, |
init(Device *p_dev, BindingSet *p, vkw::DescrLayout setLayout, |
282 |
285 |
const Bindings &bi) |
const Bindings &bi) |
283 |
286 |
{ |
{ |
|
... |
... |
init(Device *p_dev, BindingSet *p, vkw::DescrLayout setLayout, |
334 |
337 |
|
|
335 |
338 |
return res; |
return res; |
336 |
339 |
} |
} |
337 |
|
void destroy(Device *p_dev, BindingSet *p) { |
|
|
340 |
|
static void |
|
341 |
|
destroy(Device *p_dev, BindingSet *p) { |
338 |
342 |
p->pool.destroy(*p_dev); |
p->pool.destroy(*p_dev); |
339 |
343 |
} |
} |
340 |
344 |
|
|