2015-07-04 15:22:28 +02:00
|
|
|
#include "renderer.h"
|
|
|
|
|
2016-05-10 08:24:31 +02:00
|
|
|
#include "engine/array.h"
|
2016-06-03 02:17:47 +02:00
|
|
|
#include "engine/command_line_parser.h"
|
2016-05-10 08:24:31 +02:00
|
|
|
#include "engine/crc32.h"
|
2019-06-13 17:26:52 +02:00
|
|
|
#include "engine/debug.h"
|
2017-01-26 09:28:23 +01:00
|
|
|
#include "engine/engine.h"
|
2016-05-10 08:24:31 +02:00
|
|
|
#include "engine/log.h"
|
2020-02-21 22:09:11 +01:00
|
|
|
#include "engine/atomic.h"
|
2020-02-06 18:51:29 +01:00
|
|
|
#include "engine/job_system.h"
|
2020-02-21 22:09:11 +01:00
|
|
|
#include "engine/sync.h"
|
|
|
|
#include "engine/thread.h"
|
2018-12-24 17:21:50 +01:00
|
|
|
#include "engine/os.h"
|
2016-05-10 08:24:31 +02:00
|
|
|
#include "engine/profiler.h"
|
2017-11-19 14:04:10 +01:00
|
|
|
#include "engine/reflection.h"
|
2017-01-26 09:28:23 +01:00
|
|
|
#include "engine/resource_manager.h"
|
|
|
|
#include "engine/string.h"
|
2020-02-21 22:09:11 +01:00
|
|
|
#include "engine/universe.h"
|
2019-06-26 18:52:52 +02:00
|
|
|
#include "renderer/font.h"
|
2015-08-17 23:45:26 +02:00
|
|
|
#include "renderer/material.h"
|
|
|
|
#include "renderer/model.h"
|
2018-07-01 18:13:44 +02:00
|
|
|
#include "renderer/pipeline.h"
|
2018-08-22 19:52:08 +02:00
|
|
|
#include "renderer/particle_system.h"
|
2015-08-17 23:45:26 +02:00
|
|
|
#include "renderer/render_scene.h"
|
|
|
|
#include "renderer/shader.h"
|
2017-01-26 09:28:23 +01:00
|
|
|
#include "renderer/terrain.h"
|
2015-08-17 23:45:26 +02:00
|
|
|
#include "renderer/texture.h"
|
2019-06-26 18:52:52 +02:00
|
|
|
|
2018-06-24 17:50:33 +02:00
|
|
|
|
2015-07-04 15:22:28 +02:00
|
|
|
namespace Lumix
|
|
|
|
{
|
2015-02-16 00:34:18 +01:00
|
|
|
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2020-12-10 22:40:13 +01:00
|
|
|
static const ComponentType MODEL_INSTANCE_TYPE = reflection::getComponentType("model_instance");
|
2021-02-06 15:04:02 +01:00
|
|
|
static const char* downscale_src = R"#(
|
2021-02-06 14:21:00 +01:00
|
|
|
layout(local_size_x = 16, local_size_y = 16, local_size_z = 1) in;
|
|
|
|
layout (rgba8, binding = 0) uniform readonly image2D u_src;
|
|
|
|
layout (rgba8, binding = 1) uniform writeonly image2D u_dst;
|
|
|
|
layout(std140, binding = 4) uniform Data {
|
|
|
|
ivec2 u_scale;
|
|
|
|
};
|
|
|
|
void main() {
|
|
|
|
vec4 accum = vec4(0);
|
|
|
|
for (int j = 0; j < u_scale.y; ++j) {
|
|
|
|
for (int i = 0; i < u_scale.x; ++i) {
|
|
|
|
vec4 v = imageLoad(u_src, ivec2(gl_GlobalInvocationID.xy) * u_scale + ivec2(i, j));
|
|
|
|
accum += v;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
accum *= 1.0 / (u_scale.x * u_scale.y);
|
|
|
|
imageStore(u_dst, ivec2(gl_GlobalInvocationID.xy), accum);
|
|
|
|
}
|
|
|
|
)#";
|
2019-11-25 18:29:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
struct TransientBuffer {
|
2020-11-15 12:06:01 +01:00
|
|
|
static constexpr u32 INIT_SIZE = 1024 * 1024;
|
2019-11-25 18:29:52 +01:00
|
|
|
|
|
|
|
void init() {
|
|
|
|
m_buffer = gpu::allocBufferHandle();
|
|
|
|
m_offset = 0;
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::createBuffer(m_buffer, gpu::BufferFlags::MAPPABLE, INIT_SIZE, nullptr);
|
2019-11-25 18:29:52 +01:00
|
|
|
m_size = INIT_SIZE;
|
|
|
|
m_ptr = (u8*)gpu::map(m_buffer, INIT_SIZE);
|
|
|
|
}
|
|
|
|
|
|
|
|
Renderer::TransientSlice alloc(u32 size) {
|
|
|
|
Renderer::TransientSlice slice;
|
|
|
|
size = (size + 15) & ~15;
|
2020-02-21 22:09:11 +01:00
|
|
|
slice.offset = atomicAdd(&m_offset, size);
|
2019-11-25 18:29:52 +01:00
|
|
|
slice.size = size;
|
|
|
|
if (slice.offset + size <= m_size) {
|
|
|
|
slice.buffer = m_buffer;
|
|
|
|
slice.ptr = m_ptr + slice.offset;
|
|
|
|
return slice;
|
|
|
|
}
|
|
|
|
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_mutex);
|
2020-10-01 23:15:43 +02:00
|
|
|
if (!m_overflow.buffer) {
|
2019-11-25 18:29:52 +01:00
|
|
|
m_overflow.buffer = gpu::allocBufferHandle();
|
2021-03-11 19:22:33 +01:00
|
|
|
m_overflow.data = (u8*)os::memReserve(512 * 1024 * 1024);
|
2019-11-25 18:29:52 +01:00
|
|
|
m_overflow.size = 0;
|
|
|
|
m_overflow.commit = 0;
|
|
|
|
}
|
|
|
|
slice.ptr = m_overflow.data + m_overflow.size;
|
2021-03-11 19:22:33 +01:00
|
|
|
slice.offset = m_overflow.size;
|
2019-11-25 18:29:52 +01:00
|
|
|
m_overflow.size += size;
|
|
|
|
if (m_overflow.size > m_overflow.commit) {
|
2020-12-10 22:40:13 +01:00
|
|
|
const u32 page_size = os::getMemPageSize();
|
2019-11-25 18:29:52 +01:00
|
|
|
m_overflow.commit = (m_overflow.size + page_size - 1) & ~(page_size - 1);
|
2020-12-10 22:40:13 +01:00
|
|
|
os::memCommit(m_overflow.data, m_overflow.commit);
|
2019-11-25 18:29:52 +01:00
|
|
|
}
|
|
|
|
slice.buffer = m_overflow.buffer;
|
|
|
|
return slice;
|
|
|
|
}
|
|
|
|
|
|
|
|
void prepareToRender() {
|
|
|
|
gpu::unmap(m_buffer);
|
|
|
|
m_ptr = nullptr;
|
|
|
|
|
2020-10-01 23:15:43 +02:00
|
|
|
if (m_overflow.buffer) {
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::createBuffer(m_overflow.buffer, gpu::BufferFlags::NONE, nextPow2(m_overflow.size + m_size), nullptr);
|
2020-08-31 22:49:40 +02:00
|
|
|
gpu::update(m_overflow.buffer, m_overflow.data, m_overflow.size);
|
2020-12-10 22:40:13 +01:00
|
|
|
os::memRelease(m_overflow.data);
|
2019-11-25 18:29:52 +01:00
|
|
|
m_overflow.data = nullptr;
|
|
|
|
m_overflow.commit = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void renderDone() {
|
2020-10-01 23:15:43 +02:00
|
|
|
if (m_overflow.buffer) {
|
2019-11-25 18:29:52 +01:00
|
|
|
m_size = nextPow2(m_overflow.size + m_size);
|
|
|
|
gpu::destroy(m_buffer);
|
|
|
|
m_buffer = m_overflow.buffer;
|
|
|
|
m_overflow.buffer = gpu::INVALID_BUFFER;
|
|
|
|
m_overflow.size = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
ASSERT(!m_ptr);
|
|
|
|
m_ptr = (u8*)gpu::map(m_buffer, m_size);
|
|
|
|
m_offset = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
gpu::BufferHandle m_buffer = gpu::INVALID_BUFFER;
|
|
|
|
i32 m_offset = 0;
|
|
|
|
u32 m_size = 0;
|
|
|
|
u8* m_ptr = nullptr;
|
2020-02-21 22:09:11 +01:00
|
|
|
Mutex m_mutex;
|
2019-11-25 18:29:52 +01:00
|
|
|
|
|
|
|
struct {
|
|
|
|
gpu::BufferHandle buffer = gpu::INVALID_BUFFER;
|
|
|
|
u8* data = nullptr;
|
|
|
|
u32 size = 0;
|
|
|
|
u32 commit = 0;
|
|
|
|
} m_overflow;
|
|
|
|
};
|
2018-07-22 15:22:36 +02:00
|
|
|
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2019-10-16 22:36:33 +02:00
|
|
|
struct FrameData {
|
|
|
|
FrameData(struct RendererImpl& renderer, IAllocator& allocator)
|
|
|
|
: jobs(allocator)
|
|
|
|
, renderer(renderer)
|
2019-10-23 23:17:23 +02:00
|
|
|
, to_compile_shaders(allocator)
|
2020-02-20 01:49:21 +01:00
|
|
|
, material_updates(allocator)
|
2019-10-16 22:36:33 +02:00
|
|
|
{}
|
|
|
|
|
2019-10-23 23:17:23 +02:00
|
|
|
struct ShaderToCompile {
|
|
|
|
Shader* shader;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::VertexDecl decl;
|
2019-10-23 23:17:23 +02:00
|
|
|
u32 defines;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::ProgramHandle program;
|
2019-10-23 23:17:23 +02:00
|
|
|
Shader::Sources sources;
|
|
|
|
};
|
|
|
|
|
2020-02-20 01:49:21 +01:00
|
|
|
struct MaterialUpdates {
|
|
|
|
u32 idx;
|
|
|
|
MaterialConsts value;
|
|
|
|
};
|
|
|
|
|
2019-11-25 18:29:52 +01:00
|
|
|
TransientBuffer transient_buffer;
|
2020-09-29 19:56:03 +02:00
|
|
|
u32 gpu_frame = 0xffFFffFF;
|
2019-11-25 18:29:52 +01:00
|
|
|
|
2020-02-20 01:49:21 +01:00
|
|
|
Array<MaterialUpdates> material_updates;
|
2019-10-16 22:36:33 +02:00
|
|
|
Array<Renderer::RenderJob*> jobs;
|
2020-02-21 22:09:11 +01:00
|
|
|
Mutex shader_mutex;
|
2019-10-23 23:17:23 +02:00
|
|
|
Array<ShaderToCompile> to_compile_shaders;
|
2019-10-16 22:36:33 +02:00
|
|
|
RendererImpl& renderer;
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::SignalHandle can_setup = jobs::INVALID_HANDLE;
|
|
|
|
jobs::SignalHandle setup_done = jobs::INVALID_HANDLE;
|
2019-10-16 22:36:33 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2019-06-12 23:38:06 +02:00
|
|
|
template <typename T>
|
2020-02-21 22:09:11 +01:00
|
|
|
struct RenderResourceManager : ResourceManager
|
2019-06-12 23:38:06 +02:00
|
|
|
{
|
|
|
|
RenderResourceManager(Renderer& renderer, IAllocator& allocator)
|
|
|
|
: ResourceManager(allocator)
|
|
|
|
, m_renderer(renderer)
|
|
|
|
{}
|
|
|
|
|
|
|
|
|
|
|
|
Resource* createResource(const Path& path) override
|
|
|
|
{
|
|
|
|
return LUMIX_NEW(m_allocator, T)(path, *this, m_renderer, m_allocator);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void destroyResource(Resource& resource) override
|
|
|
|
{
|
|
|
|
LUMIX_DELETE(m_allocator, &resource);
|
|
|
|
}
|
|
|
|
|
|
|
|
Renderer& m_renderer;
|
|
|
|
};
|
2019-06-02 01:57:10 +02:00
|
|
|
|
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
struct GPUProfiler
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2019-06-02 01:57:10 +02:00
|
|
|
struct Query
|
|
|
|
{
|
|
|
|
StaticString<32> name;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::QueryHandle handle;
|
2019-06-02 01:57:10 +02:00
|
|
|
u64 result;
|
2019-06-09 23:53:38 +02:00
|
|
|
i64 profiler_link;
|
2019-06-02 01:57:10 +02:00
|
|
|
bool is_end;
|
|
|
|
bool is_frame;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
GPUProfiler(IAllocator& allocator)
|
2018-07-14 17:52:06 +02:00
|
|
|
: m_queries(allocator)
|
2018-07-14 10:03:38 +02:00
|
|
|
, m_pool(allocator)
|
2019-06-02 01:57:10 +02:00
|
|
|
, m_gpu_to_cpu_offset(0)
|
2018-07-14 10:03:38 +02:00
|
|
|
{
|
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
~GPUProfiler()
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2018-07-14 17:52:06 +02:00
|
|
|
ASSERT(m_pool.empty());
|
|
|
|
ASSERT(m_queries.empty());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-06-02 01:57:10 +02:00
|
|
|
u64 toCPUTimestamp(u64 gpu_timestamp) const
|
|
|
|
{
|
2020-12-10 22:40:13 +01:00
|
|
|
return u64(gpu_timestamp * (os::Timer::getFrequency() / double(gpu::getQueryFrequency()))) + m_gpu_to_cpu_offset;
|
2019-06-02 01:57:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void init()
|
|
|
|
{
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::QueryHandle q = gpu::createQuery();
|
|
|
|
gpu::queryTimestamp(q);
|
2020-12-10 22:40:13 +01:00
|
|
|
const u64 cpu_timestamp = os::Timer::getRawTimestamp();
|
2020-10-05 18:48:30 +02:00
|
|
|
|
2019-08-25 13:39:05 +02:00
|
|
|
u32 try_num = 0;
|
2020-10-05 18:48:30 +02:00
|
|
|
while (!gpu::isQueryReady(q) && try_num < 10) {
|
|
|
|
gpu::swapBuffers();
|
2019-08-25 13:39:05 +02:00
|
|
|
++try_num;
|
|
|
|
}
|
2020-10-05 18:48:30 +02:00
|
|
|
if (try_num == 10) {
|
2020-11-16 22:27:05 +01:00
|
|
|
logError("Failed to get GPU timestamp, timings are unreliable.");
|
2019-08-25 13:39:05 +02:00
|
|
|
m_gpu_to_cpu_offset = 0;
|
|
|
|
}
|
|
|
|
else {
|
2019-10-24 21:53:19 +02:00
|
|
|
const u64 gpu_timestamp = gpu::getQueryResult(q);
|
2020-12-10 22:40:13 +01:00
|
|
|
m_gpu_to_cpu_offset = cpu_timestamp - u64(gpu_timestamp * (os::Timer::getFrequency() / double(gpu::getQueryFrequency())));
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::destroy(q);
|
2019-08-25 13:39:05 +02:00
|
|
|
}
|
2019-06-02 01:57:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-07-14 17:52:06 +02:00
|
|
|
void clear()
|
|
|
|
{
|
2019-06-23 14:22:33 +02:00
|
|
|
m_queries.clear();
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
for(const gpu::QueryHandle h : m_pool) {
|
|
|
|
gpu::destroy(h);
|
2018-07-14 17:52:06 +02:00
|
|
|
}
|
|
|
|
m_pool.clear();
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::QueryHandle allocQuery()
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2018-07-14 10:03:38 +02:00
|
|
|
if(!m_pool.empty()) {
|
2019-10-24 21:53:19 +02:00
|
|
|
const gpu::QueryHandle res = m_pool.back();
|
2018-07-14 10:03:38 +02:00
|
|
|
m_pool.pop();
|
|
|
|
return res;
|
2018-07-11 23:35:34 +02:00
|
|
|
}
|
2019-10-24 21:53:19 +02:00
|
|
|
return gpu::createQuery();
|
2018-07-14 10:03:38 +02:00
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
|
|
|
|
2019-06-09 23:53:38 +02:00
|
|
|
void beginQuery(const char* name, i64 profiler_link)
|
2018-07-14 10:03:38 +02:00
|
|
|
{
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_mutex);
|
2018-07-14 10:03:38 +02:00
|
|
|
Query& q = m_queries.emplace();
|
2019-06-09 23:53:38 +02:00
|
|
|
q.profiler_link = profiler_link;
|
2018-07-14 10:03:38 +02:00
|
|
|
q.name = name;
|
|
|
|
q.is_end = false;
|
2019-06-02 01:57:10 +02:00
|
|
|
q.is_frame = false;
|
2018-07-14 10:03:38 +02:00
|
|
|
q.handle = allocQuery();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::queryTimestamp(q.handle);
|
2018-07-14 10:03:38 +02:00
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
|
|
|
|
2018-07-14 10:03:38 +02:00
|
|
|
void endQuery()
|
|
|
|
{
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_mutex);
|
2018-07-14 10:03:38 +02:00
|
|
|
Query& q = m_queries.emplace();
|
|
|
|
q.is_end = true;
|
2019-06-02 01:57:10 +02:00
|
|
|
q.is_frame = false;
|
2018-07-14 10:03:38 +02:00
|
|
|
q.handle = allocQuery();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::queryTimestamp(q.handle);
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2014-09-28 02:12:41 +02:00
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
void frame()
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2019-05-04 23:37:59 +02:00
|
|
|
PROFILE_FUNCTION();
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_mutex);
|
2019-06-02 01:57:10 +02:00
|
|
|
Query frame_query;
|
|
|
|
frame_query.is_frame = true;
|
|
|
|
m_queries.push(frame_query);
|
|
|
|
while (!m_queries.empty()) {
|
|
|
|
Query q = m_queries[0];
|
|
|
|
if (q.is_frame) {
|
2020-12-25 18:11:09 +01:00
|
|
|
profiler::gpuFrame();
|
2019-06-02 01:57:10 +02:00
|
|
|
m_queries.erase(0);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
if (!gpu::isQueryReady(q.handle)) break;
|
2019-06-02 01:57:10 +02:00
|
|
|
|
|
|
|
if (q.is_end) {
|
2019-10-24 21:53:19 +02:00
|
|
|
const u64 timestamp = toCPUTimestamp(gpu::getQueryResult(q.handle));
|
2020-12-25 18:11:09 +01:00
|
|
|
profiler::endGPUBlock(timestamp);
|
2019-06-02 01:57:10 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-10-24 21:53:19 +02:00
|
|
|
const u64 timestamp = toCPUTimestamp(gpu::getQueryResult(q.handle));
|
2020-12-25 18:11:09 +01:00
|
|
|
profiler::beginGPUBlock(q.name, timestamp, q.profiler_link);
|
2019-06-02 01:57:10 +02:00
|
|
|
}
|
2018-07-14 10:03:38 +02:00
|
|
|
m_pool.push(q.handle);
|
2019-06-02 01:57:10 +02:00
|
|
|
m_queries.erase(0);
|
2018-07-14 10:03:38 +02:00
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
Array<Query> m_queries;
|
2019-10-24 21:53:19 +02:00
|
|
|
Array<gpu::QueryHandle> m_pool;
|
2020-02-21 22:09:11 +01:00
|
|
|
Mutex m_mutex;
|
2019-06-02 01:57:10 +02:00
|
|
|
i64 m_gpu_to_cpu_offset;
|
2018-07-08 18:16:16 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2020-02-21 22:09:11 +01:00
|
|
|
struct RendererImpl final : Renderer
|
2015-07-25 02:22:51 +02:00
|
|
|
{
|
2016-03-01 16:48:04 +01:00
|
|
|
explicit RendererImpl(Engine& engine)
|
2015-07-25 02:22:51 +02:00
|
|
|
: m_engine(engine)
|
|
|
|
, m_allocator(engine.getAllocator())
|
2018-07-08 18:16:16 +02:00
|
|
|
, m_texture_manager(*this, m_allocator)
|
2019-06-13 17:26:52 +02:00
|
|
|
, m_pipeline_manager(*this, m_allocator)
|
2017-11-07 16:49:18 +01:00
|
|
|
, m_model_manager(*this, m_allocator)
|
2019-06-13 17:26:52 +02:00
|
|
|
, m_particle_emitter_manager(*this, m_allocator)
|
2015-10-07 18:40:17 +02:00
|
|
|
, m_material_manager(*this, m_allocator)
|
2015-07-25 02:22:51 +02:00
|
|
|
, m_shader_manager(*this, m_allocator)
|
2018-01-08 13:08:11 +01:00
|
|
|
, m_font_manager(nullptr)
|
2015-09-05 13:08:47 +02:00
|
|
|
, m_shader_defines(m_allocator)
|
2019-05-23 23:28:50 +02:00
|
|
|
, m_profiler(m_allocator)
|
2018-10-13 15:08:58 +02:00
|
|
|
, m_layers(m_allocator)
|
2019-08-07 21:21:31 +02:00
|
|
|
, m_material_buffer(m_allocator)
|
2020-10-07 19:19:16 +02:00
|
|
|
, m_plugins(m_allocator)
|
2021-02-13 15:39:02 +01:00
|
|
|
, m_free_sort_keys(m_allocator)
|
|
|
|
, m_sort_key_to_mesh_map(m_allocator)
|
2015-07-25 02:22:51 +02:00
|
|
|
{
|
2020-12-01 00:17:00 +01:00
|
|
|
RenderScene::reflect();
|
|
|
|
|
2021-03-04 22:06:18 +01:00
|
|
|
LUMIX_GLOBAL_FUNC(Model::getBoneCount);
|
|
|
|
LUMIX_GLOBAL_FUNC(Model::getBoneName);
|
|
|
|
LUMIX_GLOBAL_FUNC(Model::getBoneParent);
|
2020-10-20 19:47:34 +02:00
|
|
|
|
2019-08-21 21:11:51 +02:00
|
|
|
m_shader_defines.reserve(32);
|
2020-09-20 12:22:33 +02:00
|
|
|
|
|
|
|
gpu::preinit(m_allocator, shouldLoadRenderdoc());
|
2020-10-16 19:56:08 +02:00
|
|
|
m_frames[0].create(*this, m_allocator);
|
|
|
|
m_frames[1].create(*this, m_allocator);
|
|
|
|
m_frames[2].create(*this, m_allocator);
|
2019-03-23 13:34:03 +01:00
|
|
|
}
|
|
|
|
|
2020-04-29 19:13:16 +02:00
|
|
|
u32 getVersion() const override { return 0; }
|
|
|
|
void serialize(OutputMemoryStream& stream) const override {}
|
|
|
|
bool deserialize(u32 version, InputMemoryStream& stream) override { return version == 0; }
|
2019-03-23 13:34:03 +01:00
|
|
|
|
|
|
|
~RendererImpl()
|
|
|
|
{
|
|
|
|
m_particle_emitter_manager.destroy();
|
|
|
|
m_pipeline_manager.destroy();
|
|
|
|
m_texture_manager.destroy();
|
|
|
|
m_model_manager.destroy();
|
|
|
|
m_material_manager.destroy();
|
|
|
|
m_shader_manager.destroy();
|
|
|
|
m_font_manager->destroy();
|
|
|
|
LUMIX_DELETE(m_allocator, m_font_manager);
|
|
|
|
|
2019-07-09 02:28:08 +02:00
|
|
|
frame();
|
2020-10-03 23:14:17 +02:00
|
|
|
frame();
|
|
|
|
frame();
|
|
|
|
|
2019-10-19 00:23:56 +02:00
|
|
|
waitForRender();
|
2019-10-17 18:10:38 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::SignalHandle signal = jobs::INVALID_HANDLE;
|
|
|
|
jobs::runEx(this, [](void* data) {
|
2019-05-23 23:28:50 +02:00
|
|
|
RendererImpl* renderer = (RendererImpl*)data;
|
2020-10-16 19:56:08 +02:00
|
|
|
for (const Local<FrameData>& frame : renderer->m_frames) {
|
|
|
|
gpu::destroy(frame->transient_buffer.m_buffer);
|
2019-10-16 22:36:33 +02:00
|
|
|
}
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::destroy(renderer->m_material_buffer.buffer);
|
2020-08-31 22:49:40 +02:00
|
|
|
gpu::destroy(renderer->m_material_buffer.staging_buffer);
|
2021-02-09 21:10:40 +01:00
|
|
|
gpu::destroy(renderer->m_tmp_uniform_buffer);
|
|
|
|
gpu::destroy(renderer->m_scratch_buffer);
|
2021-02-16 21:41:22 +01:00
|
|
|
gpu::destroy(renderer->m_downscale_program);
|
2019-05-23 23:28:50 +02:00
|
|
|
renderer->m_profiler.clear();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::shutdown();
|
2020-12-25 18:11:09 +01:00
|
|
|
}, &signal, jobs::INVALID_HANDLE, 1);
|
|
|
|
jobs::wait(signal);
|
2019-03-23 13:34:03 +01:00
|
|
|
}
|
|
|
|
|
2020-09-20 12:22:33 +02:00
|
|
|
static bool shouldLoadRenderdoc() {
|
|
|
|
char cmd_line[4096];
|
2020-12-10 22:40:13 +01:00
|
|
|
os::getCommandLine(Span(cmd_line));
|
2020-09-20 12:22:33 +02:00
|
|
|
CommandLineParser cmd_line_parser(cmd_line);
|
|
|
|
while (cmd_line_parser.next()) {
|
|
|
|
if (cmd_line_parser.currentEquals("-renderdoc")) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2019-03-23 13:34:03 +01:00
|
|
|
|
2020-12-01 00:17:00 +01:00
|
|
|
void init() override {
|
2019-10-17 18:10:38 +02:00
|
|
|
struct InitData {
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::InitFlags flags = gpu::InitFlags::VSYNC;
|
2019-10-17 18:10:38 +02:00
|
|
|
RendererImpl* renderer;
|
|
|
|
} init_data;
|
|
|
|
init_data.renderer = this;
|
|
|
|
|
2016-06-03 02:17:47 +02:00
|
|
|
char cmd_line[4096];
|
2020-12-10 22:40:13 +01:00
|
|
|
os::getCommandLine(Span(cmd_line));
|
2017-05-23 19:57:11 +02:00
|
|
|
CommandLineParser cmd_line_parser(cmd_line);
|
2019-07-30 21:58:31 +02:00
|
|
|
while (cmd_line_parser.next()) {
|
|
|
|
if (cmd_line_parser.currentEquals("-no_vsync")) {
|
2020-11-26 20:25:14 +01:00
|
|
|
init_data.flags = init_data.flags & ~gpu::InitFlags::VSYNC;
|
2019-07-30 21:58:31 +02:00
|
|
|
}
|
|
|
|
else if (cmd_line_parser.currentEquals("-debug_opengl")) {
|
2020-11-26 20:25:14 +01:00
|
|
|
init_data.flags = init_data.flags | gpu::InitFlags::DEBUG_OUTPUT;
|
2017-10-02 16:35:21 +02:00
|
|
|
}
|
2016-06-03 02:17:47 +02:00
|
|
|
}
|
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::SignalHandle signal = jobs::INVALID_HANDLE;
|
|
|
|
jobs::runEx(&init_data, [](void* data) {
|
2019-05-23 23:28:50 +02:00
|
|
|
PROFILE_BLOCK("init_render");
|
2019-10-17 18:10:38 +02:00
|
|
|
InitData* init_data = (InitData*)data;
|
|
|
|
RendererImpl& renderer = *(RendererImpl*)init_data->renderer;
|
2019-05-23 23:28:50 +02:00
|
|
|
Engine& engine = renderer.getEngine();
|
2020-02-05 18:08:37 +01:00
|
|
|
void* window_handle = engine.getWindowHandle();
|
2020-01-13 20:30:26 +01:00
|
|
|
if (!gpu::init(window_handle, init_data->flags)) {
|
2020-12-10 22:40:13 +01:00
|
|
|
os::messageBox("Failed to initialize renderer. More info in lumix.log.");
|
2020-01-13 20:30:26 +01:00
|
|
|
fatal(false, "gpu::init()");
|
|
|
|
}
|
2019-10-16 22:36:33 +02:00
|
|
|
|
2019-11-25 16:35:42 +01:00
|
|
|
gpu::MemoryStats mem_stats;
|
2021-03-09 21:31:55 +01:00
|
|
|
if (gpu::getMemoryStats(mem_stats)) {
|
2020-11-16 22:27:05 +01:00
|
|
|
logInfo("Initial GPU memory stats:\n",
|
|
|
|
"total: ", (mem_stats.total_available_mem / (1024.f * 1024.f)), "MB\n"
|
|
|
|
"currect: ", (mem_stats.current_available_mem / (1024.f * 1024.f)), "MB\n"
|
|
|
|
"dedicated: ", (mem_stats.dedicated_vidmem/ (1024.f * 1024.f)), "MB\n");
|
2019-11-25 16:35:42 +01:00
|
|
|
}
|
|
|
|
|
2020-10-16 19:56:08 +02:00
|
|
|
for (const Local<FrameData>& frame : renderer.m_frames) {
|
|
|
|
frame->transient_buffer.init();
|
2019-10-16 22:36:33 +02:00
|
|
|
}
|
2020-10-16 19:56:08 +02:00
|
|
|
renderer.m_cpu_frame = renderer.m_frames[0].get();
|
|
|
|
renderer.m_gpu_frame = renderer.m_frames[0].get();
|
2019-10-16 22:36:33 +02:00
|
|
|
|
2019-06-02 01:57:10 +02:00
|
|
|
renderer.m_profiler.init();
|
2019-08-07 21:21:31 +02:00
|
|
|
|
2020-02-20 01:49:21 +01:00
|
|
|
MaterialBuffer& mb = renderer.m_material_buffer;
|
2020-08-31 22:05:33 +02:00
|
|
|
mb.buffer = gpu::allocBufferHandle();
|
2020-08-31 22:49:40 +02:00
|
|
|
mb.staging_buffer = gpu::allocBufferHandle();
|
2020-02-20 01:49:21 +01:00
|
|
|
mb.map.insert(0, 0);
|
|
|
|
mb.data.resize(400);
|
|
|
|
mb.data[0].hash = 0;
|
|
|
|
mb.data[0].ref_count = 1;
|
|
|
|
mb.first_free = 1;
|
|
|
|
for (int i = 1; i < 400; ++i) {
|
|
|
|
mb.data[i].ref_count = 0;
|
|
|
|
mb.data[i].next_free = i + 1;
|
2019-08-07 21:21:31 +02:00
|
|
|
}
|
2020-02-20 01:49:21 +01:00
|
|
|
mb.data.back().next_free = -1;
|
2020-08-31 22:05:33 +02:00
|
|
|
gpu::createBuffer(mb.buffer
|
2020-11-26 20:25:14 +01:00
|
|
|
, gpu::BufferFlags::UNIFORM_BUFFER
|
2020-08-31 22:05:33 +02:00
|
|
|
, sizeof(MaterialConsts) * 400
|
2020-02-20 01:49:21 +01:00
|
|
|
, nullptr
|
|
|
|
);
|
2020-08-31 22:49:40 +02:00
|
|
|
gpu::createBuffer(mb.staging_buffer
|
2020-11-26 20:25:14 +01:00
|
|
|
, gpu::BufferFlags::UNIFORM_BUFFER
|
2020-08-31 22:49:40 +02:00
|
|
|
, sizeof(MaterialConsts)
|
|
|
|
, nullptr
|
|
|
|
);
|
2020-02-20 01:49:21 +01:00
|
|
|
|
2021-02-06 14:21:00 +01:00
|
|
|
renderer.m_downscale_program = gpu::allocProgramHandle();
|
|
|
|
const gpu::ShaderType type = gpu::ShaderType::COMPUTE;
|
|
|
|
const char* srcs[] = { downscale_src };
|
|
|
|
gpu::createProgram(renderer.m_downscale_program, {}, srcs, &type, 1, nullptr, 0, "downscale");
|
|
|
|
|
|
|
|
renderer.m_tmp_uniform_buffer = gpu::allocBufferHandle();
|
|
|
|
gpu::createBuffer(renderer.m_tmp_uniform_buffer, gpu::BufferFlags::UNIFORM_BUFFER, 16 * 1024, nullptr);
|
2020-09-01 01:50:18 +02:00
|
|
|
|
2021-02-09 21:10:40 +01:00
|
|
|
renderer.m_scratch_buffer = gpu::allocBufferHandle();
|
|
|
|
gpu::createBuffer(renderer.m_scratch_buffer, gpu::BufferFlags::SHADER_BUFFER | gpu::BufferFlags::COMPUTE_WRITE, SCRATCH_BUFFER_SIZE, nullptr);
|
|
|
|
|
2020-09-28 22:36:48 +02:00
|
|
|
MaterialConsts default_mat;
|
|
|
|
default_mat.color = Vec4(1, 0, 1, 1);
|
|
|
|
gpu::update(mb.buffer, &default_mat, sizeof(MaterialConsts));
|
2020-12-25 18:11:09 +01:00
|
|
|
}, &signal, jobs::INVALID_HANDLE, 1);
|
|
|
|
jobs::wait(signal);
|
2019-05-23 23:28:50 +02:00
|
|
|
|
2019-03-23 13:34:03 +01:00
|
|
|
ResourceManagerHub& manager = m_engine.getResourceManager();
|
2018-09-09 17:58:25 +02:00
|
|
|
m_pipeline_manager.create(PipelineResource::TYPE, manager);
|
2018-01-11 21:13:59 +01:00
|
|
|
m_texture_manager.create(Texture::TYPE, manager);
|
|
|
|
m_model_manager.create(Model::TYPE, manager);
|
|
|
|
m_material_manager.create(Material::TYPE, manager);
|
2018-08-22 19:52:08 +02:00
|
|
|
m_particle_emitter_manager.create(ParticleEmitterResource::TYPE, manager);
|
2018-01-11 21:13:59 +01:00
|
|
|
m_shader_manager.create(Shader::TYPE, manager);
|
2018-01-08 13:08:11 +01:00
|
|
|
m_font_manager = LUMIX_NEW(m_allocator, FontManager)(*this, m_allocator);
|
2018-01-11 21:13:59 +01:00
|
|
|
m_font_manager->create(FontResource::TYPE, manager);
|
2015-07-25 02:22:51 +02:00
|
|
|
|
2016-04-02 15:35:08 +02:00
|
|
|
RenderScene::registerLuaAPI(m_engine.getState());
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-10-27 21:05:52 +02:00
|
|
|
m_layers.emplace("default");
|
2015-07-25 02:22:51 +02:00
|
|
|
}
|
2014-09-07 14:24:52 +02:00
|
|
|
|
2017-10-11 17:13:47 +02:00
|
|
|
|
2019-07-25 18:50:31 +02:00
|
|
|
MemRef copy(const void* data, u32 size) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
|
|
|
MemRef mem = allocate(size);
|
2019-10-25 19:33:14 +02:00
|
|
|
memcpy(mem.data, data, size);
|
2018-07-08 18:16:16 +02:00
|
|
|
return mem;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
IAllocator& getAllocator() override
|
|
|
|
{
|
|
|
|
return m_allocator;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
void free(const MemRef& memory) override
|
|
|
|
{
|
|
|
|
ASSERT(memory.own);
|
|
|
|
m_allocator.deallocate(memory.data);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-07-25 18:50:31 +02:00
|
|
|
MemRef allocate(u32 size) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
|
|
|
MemRef ret;
|
|
|
|
ret.size = size;
|
|
|
|
ret.own = true;
|
|
|
|
ret.data = m_allocator.allocate(size);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2021-02-09 21:10:40 +01:00
|
|
|
gpu::BufferHandle getScratchBuffer() override {
|
|
|
|
return m_scratch_buffer;
|
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2019-06-09 23:53:38 +02:00
|
|
|
void beginProfileBlock(const char* name, i64 link) override
|
2018-07-14 10:03:38 +02:00
|
|
|
{
|
2019-06-09 23:53:38 +02:00
|
|
|
m_profiler.beginQuery(name, link);
|
2018-07-14 10:03:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void endProfileBlock() override
|
|
|
|
{
|
2019-05-23 23:28:50 +02:00
|
|
|
m_profiler.endQuery();
|
2018-07-14 10:03:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-11-14 22:26:43 +01:00
|
|
|
void getTextureImage(gpu::TextureHandle texture, u32 w, u32 h, gpu::TextureFormat out_format, Span<u8> data) override
|
2018-09-05 20:45:06 +02:00
|
|
|
{
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-09-05 20:45:06 +02:00
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
2019-06-07 01:26:09 +02:00
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::pushDebugGroup("get image data");
|
2019-11-05 19:11:59 +01:00
|
|
|
gpu::TextureHandle staging = gpu::allocTextureHandle();
|
2020-11-26 20:25:14 +01:00
|
|
|
const gpu::TextureFlags flags = gpu::TextureFlags::NO_MIPS | gpu::TextureFlags::READBACK;
|
2019-11-14 22:26:43 +01:00
|
|
|
gpu::createTexture(staging, w, h, 1, out_format, flags, nullptr, "staging_buffer");
|
2020-06-03 22:51:28 +02:00
|
|
|
gpu::copy(staging, handle, 0, 0);
|
2020-05-23 13:52:45 +02:00
|
|
|
gpu::readTexture(staging, 0, buf);
|
2019-11-05 19:11:59 +01:00
|
|
|
gpu::destroy(staging);
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::popDebugGroup();
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle handle;
|
2019-11-14 22:26:43 +01:00
|
|
|
gpu::TextureFormat out_format;
|
2019-11-05 19:11:59 +01:00
|
|
|
u32 w;
|
|
|
|
u32 h;
|
2019-11-14 22:26:43 +01:00
|
|
|
Span<u8> buf;
|
2018-09-05 20:45:06 +02:00
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
|
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.handle = texture;
|
|
|
|
cmd.w = w;
|
|
|
|
cmd.h = h;
|
|
|
|
cmd.buf = data;
|
|
|
|
cmd.out_format = out_format;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-05-30 13:28:57 +02:00
|
|
|
void updateTexture(gpu::TextureHandle handle, u32 slice, u32 x, u32 y, u32 w, u32 h, gpu::TextureFormat format, const MemRef& mem) override
|
2019-05-12 00:18:35 +02:00
|
|
|
{
|
|
|
|
ASSERT(mem.size > 0);
|
2020-10-01 23:15:43 +02:00
|
|
|
ASSERT(handle);
|
2019-05-12 00:18:35 +02:00
|
|
|
|
|
|
|
struct Cmd : RenderJob {
|
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
2019-06-07 01:26:09 +02:00
|
|
|
PROFILE_FUNCTION();
|
2020-05-30 13:28:57 +02:00
|
|
|
gpu::update(handle, 0, slice, x, y, w, h, format, mem.data);
|
2019-05-12 00:18:35 +02:00
|
|
|
if (mem.own) {
|
|
|
|
renderer->free(mem);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle handle;
|
2020-05-30 13:28:57 +02:00
|
|
|
u32 x, y, w, h, slice;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureFormat format;
|
2019-05-12 00:18:35 +02:00
|
|
|
MemRef mem;
|
|
|
|
RendererImpl* renderer;
|
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.handle = handle;
|
|
|
|
cmd.x = x;
|
|
|
|
cmd.y = y;
|
|
|
|
cmd.w = w;
|
|
|
|
cmd.h = h;
|
|
|
|
cmd.slice = slice;
|
|
|
|
cmd.format = format;
|
|
|
|
cmd.mem = mem;
|
|
|
|
cmd.renderer = this;
|
2019-05-12 00:18:35 +02:00
|
|
|
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2019-05-12 00:18:35 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::TextureHandle loadTexture(const MemRef& memory, gpu::TextureFlags flags, gpu::TextureInfo* info, const char* debug_name) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2018-07-11 23:35:34 +02:00
|
|
|
ASSERT(memory.size > 0);
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
const gpu::TextureHandle handle = gpu::allocTextureHandle();
|
2020-10-01 23:15:43 +02:00
|
|
|
if (!handle) return handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
if(info) {
|
2019-10-24 21:53:19 +02:00
|
|
|
*info = gpu::getTextureInfo(memory.data);
|
2018-07-11 23:35:34 +02:00
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-07-14 10:03:38 +02:00
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
2019-06-07 01:26:09 +02:00
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::loadTexture(handle, memory.data, memory.size, flags, debug_name);
|
2018-07-14 17:52:06 +02:00
|
|
|
if(memory.own) {
|
|
|
|
renderer->free(memory);
|
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
StaticString<LUMIX_MAX_PATH> debug_name;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
MemRef memory;
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::TextureFlags flags;
|
2018-07-14 17:52:06 +02:00
|
|
|
RendererImpl* renderer;
|
2018-07-08 18:16:16 +02:00
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.debug_name = debug_name;
|
|
|
|
cmd.handle = handle;
|
|
|
|
cmd.memory = memory;
|
|
|
|
cmd.flags = flags;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
return handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-07-25 18:50:31 +02:00
|
|
|
TransientSlice allocTransient(u32 size) override
|
2018-07-17 01:13:58 +02:00
|
|
|
{
|
2019-11-25 18:29:52 +01:00
|
|
|
return m_cpu_frame->transient_buffer.alloc(size);
|
2018-07-17 01:13:58 +02:00
|
|
|
}
|
2019-07-07 11:13:33 +02:00
|
|
|
|
2020-08-31 22:05:33 +02:00
|
|
|
gpu::BufferHandle getMaterialUniformBuffer() override {
|
2019-08-07 21:21:31 +02:00
|
|
|
return m_material_buffer.buffer;
|
|
|
|
}
|
|
|
|
|
2019-08-12 18:10:01 +02:00
|
|
|
u32 createMaterialConstants(const MaterialConsts& data) override {
|
|
|
|
const u32 hash = crc32(&data, sizeof(data));
|
2019-08-07 21:21:31 +02:00
|
|
|
auto iter = m_material_buffer.map.find(hash);
|
|
|
|
u32 idx;
|
|
|
|
if(iter.isValid()) {
|
|
|
|
idx = iter.value();
|
|
|
|
}
|
|
|
|
else {
|
2020-02-20 01:49:21 +01:00
|
|
|
if (m_material_buffer.first_free == -1) {
|
|
|
|
ASSERT(false);
|
2019-08-23 13:33:43 +02:00
|
|
|
++m_material_buffer.data[0].ref_count;
|
|
|
|
return 0;
|
|
|
|
}
|
2020-02-20 01:49:21 +01:00
|
|
|
idx = m_material_buffer.first_free;
|
|
|
|
m_material_buffer.first_free = m_material_buffer.data[m_material_buffer.first_free].next_free;
|
|
|
|
m_material_buffer.data[idx].ref_count = 0;
|
|
|
|
m_material_buffer.data[idx].hash = crc32(&data, sizeof(data));
|
2019-08-07 21:21:31 +02:00
|
|
|
m_material_buffer.map.insert(hash, idx);
|
2020-02-20 01:49:21 +01:00
|
|
|
m_cpu_frame->material_updates.push({idx, data});
|
2019-08-07 21:21:31 +02:00
|
|
|
}
|
|
|
|
++m_material_buffer.data[idx].ref_count;
|
|
|
|
return idx;
|
|
|
|
}
|
|
|
|
|
|
|
|
void destroyMaterialConstants(u32 idx) override {
|
|
|
|
--m_material_buffer.data[idx].ref_count;
|
2020-02-20 01:49:21 +01:00
|
|
|
if (m_material_buffer.data[idx].ref_count > 0) return;
|
|
|
|
|
|
|
|
const u32 hash = m_material_buffer.data[idx].hash;
|
|
|
|
m_material_buffer.data[idx].next_free = m_material_buffer.first_free;
|
|
|
|
m_material_buffer.first_free = idx;
|
|
|
|
m_material_buffer.map.erase(hash);
|
2019-08-07 21:21:31 +02:00
|
|
|
}
|
|
|
|
|
2018-07-17 01:13:58 +02:00
|
|
|
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::BufferHandle createBuffer(const MemRef& memory, gpu::BufferFlags flags) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::BufferHandle handle = gpu::allocBufferHandle();
|
2020-10-01 23:15:43 +02:00
|
|
|
if(!handle) return handle;
|
2018-07-11 23:35:34 +02:00
|
|
|
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-07-14 10:03:38 +02:00
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
2019-06-07 01:26:09 +02:00
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::createBuffer(handle, flags, memory.size, memory.data);
|
2018-07-14 17:52:06 +02:00
|
|
|
if (memory.own) {
|
|
|
|
renderer->free(memory);
|
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::BufferHandle handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
MemRef memory;
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::BufferFlags flags;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureFormat format;
|
2018-07-14 17:52:06 +02:00
|
|
|
Renderer* renderer;
|
2018-07-08 18:16:16 +02:00
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.handle = handle;
|
|
|
|
cmd.memory = memory;
|
|
|
|
cmd.renderer = this;
|
|
|
|
cmd.flags = flags;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-07-11 23:35:34 +02:00
|
|
|
return handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2018-10-27 21:05:52 +02:00
|
|
|
|
|
|
|
u8 getLayersCount() const override
|
|
|
|
{
|
|
|
|
return (u8)m_layers.size();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const char* getLayerName(u8 layer) const override
|
|
|
|
{
|
|
|
|
return m_layers[layer];
|
|
|
|
}
|
|
|
|
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2018-10-13 15:08:58 +02:00
|
|
|
u8 getLayerIdx(const char* name) override
|
|
|
|
{
|
2019-12-20 19:25:33 +01:00
|
|
|
for(u8 i = 0; i < m_layers.size(); ++i) {
|
2018-10-13 15:08:58 +02:00
|
|
|
if(m_layers[i] == name) return i;
|
|
|
|
}
|
2019-12-20 19:25:33 +01:00
|
|
|
ASSERT(m_layers.size() < 0xff);
|
2018-10-13 15:08:58 +02:00
|
|
|
m_layers.emplace(name);
|
|
|
|
return m_layers.size() - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-09-25 18:33:38 +02:00
|
|
|
void runInRenderThread(void* user_ptr, void (*fnc)(Renderer& renderer, void*)) override
|
2018-08-09 22:35:00 +02:00
|
|
|
{
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-08-09 22:35:00 +02:00
|
|
|
void setup() override {}
|
2019-06-07 01:26:09 +02:00
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
|
|
|
fnc(*renderer, ptr);
|
|
|
|
}
|
2018-08-09 22:35:00 +02:00
|
|
|
|
|
|
|
void* ptr;
|
2018-09-25 18:33:38 +02:00
|
|
|
void (*fnc)(Renderer&, void*);
|
|
|
|
Renderer* renderer;
|
|
|
|
|
2018-08-09 22:35:00 +02:00
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.fnc = fnc;
|
|
|
|
cmd.ptr = user_ptr;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-08-09 22:35:00 +02:00
|
|
|
}
|
|
|
|
|
2021-02-13 15:39:02 +01:00
|
|
|
const Mesh** getSortKeyToMeshMap() const override {
|
|
|
|
return m_sort_key_to_mesh_map.begin();
|
|
|
|
}
|
|
|
|
|
|
|
|
u32 allocSortKey(Mesh* mesh) override {
|
|
|
|
if (!m_free_sort_keys.empty()) {
|
|
|
|
const u32 key = m_free_sort_keys.back();
|
|
|
|
m_free_sort_keys.pop();
|
|
|
|
ASSERT(key != 0);
|
|
|
|
if ((u32)m_sort_key_to_mesh_map.size() < key + 1)
|
|
|
|
m_sort_key_to_mesh_map.resize(key + 1);
|
|
|
|
m_sort_key_to_mesh_map[key] = mesh;
|
|
|
|
return key;
|
|
|
|
}
|
|
|
|
++m_max_sort_key;
|
|
|
|
const u32 key = m_max_sort_key;
|
|
|
|
ASSERT(key != 0);
|
|
|
|
if ((u32)m_sort_key_to_mesh_map.size() < key + 1)
|
|
|
|
m_sort_key_to_mesh_map.resize(key + 1);
|
|
|
|
m_sort_key_to_mesh_map[key] = mesh;
|
|
|
|
return key;
|
|
|
|
}
|
|
|
|
|
|
|
|
void freeSortKey(u32 key) override {
|
|
|
|
if (key != 0) {
|
|
|
|
m_free_sort_keys.push(key);
|
|
|
|
}
|
|
|
|
}
|
2018-08-09 22:35:00 +02:00
|
|
|
|
2021-02-13 15:39:02 +01:00
|
|
|
u32 getMaxSortKey() const override {
|
|
|
|
return m_max_sort_key;
|
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
void destroy(gpu::ProgramHandle program) override
|
2018-08-09 22:35:00 +02:00
|
|
|
{
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-08-09 22:35:00 +02:00
|
|
|
void setup() override {}
|
2019-06-07 01:26:09 +02:00
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::destroy(program);
|
2019-06-07 01:26:09 +02:00
|
|
|
}
|
2018-08-09 22:35:00 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::ProgramHandle program;
|
2018-08-09 22:35:00 +02:00
|
|
|
RendererImpl* renderer;
|
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.program = program;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-08-09 22:35:00 +02:00
|
|
|
}
|
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
void destroy(gpu::BufferHandle buffer) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2020-10-07 22:17:59 +02:00
|
|
|
if (!buffer) return;
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-07-14 10:03:38 +02:00
|
|
|
void setup() override {}
|
2019-06-07 01:26:09 +02:00
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::destroy(buffer);
|
2019-06-07 01:26:09 +02:00
|
|
|
}
|
2018-07-11 23:35:34 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::BufferHandle buffer;
|
2018-07-11 23:35:34 +02:00
|
|
|
RendererImpl* renderer;
|
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.buffer = buffer;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2021-02-21 19:11:52 +01:00
|
|
|
void copy(gpu::TextureHandle dst, gpu::TextureHandle src) override {
|
|
|
|
struct Cmd : RenderJob {
|
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
|
|
|
gpu::copy(dst, src, 0, 0);
|
|
|
|
}
|
|
|
|
gpu::TextureHandle src;
|
|
|
|
gpu::TextureHandle dst;
|
|
|
|
};
|
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.src = src;
|
|
|
|
cmd.dst = dst;
|
|
|
|
queue(cmd, 0);
|
|
|
|
}
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2021-02-06 14:21:00 +01:00
|
|
|
void downscale(gpu::TextureHandle src, u32 src_w, u32 src_h, gpu::TextureHandle dst, u32 dst_w, u32 dst_h) override {
|
|
|
|
ASSERT(src_w % dst_w == 0);
|
|
|
|
ASSERT(src_h % dst_h == 0);
|
|
|
|
struct Cmd : RenderJob {
|
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
|
|
|
|
|
|
|
const IVec2 scale = src_size / dst_size;
|
|
|
|
gpu::update(ub, &scale, sizeof(scale));
|
|
|
|
gpu::bindUniformBuffer(4, ub, 0, sizeof(scale));
|
|
|
|
gpu::bindImageTexture(src, 0);
|
|
|
|
gpu::bindImageTexture(dst, 1);
|
|
|
|
gpu::useProgram(program);
|
|
|
|
gpu::dispatch((dst_size.x + 15) / 16, (dst_size.y + 15) / 16, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
gpu::TextureHandle src;
|
|
|
|
gpu::TextureHandle dst;
|
|
|
|
gpu::ProgramHandle program;
|
|
|
|
gpu::BufferHandle ub;
|
|
|
|
IVec2 src_size;
|
|
|
|
IVec2 dst_size;
|
|
|
|
};
|
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.src = src;
|
|
|
|
cmd.dst = dst;
|
|
|
|
cmd.src_size = {(i32)src_w, (i32)src_h};
|
|
|
|
cmd.dst_size = {(i32)dst_w, (i32)dst_h};
|
|
|
|
cmd.program = m_downscale_program;
|
|
|
|
cmd.ub = m_tmp_uniform_buffer;
|
|
|
|
queue(cmd, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::TextureHandle createTexture(u32 w, u32 h, u32 depth, gpu::TextureFormat format, gpu::TextureFlags flags, const MemRef& memory, const char* debug_name) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle handle = gpu::allocTextureHandle();
|
2020-10-01 23:15:43 +02:00
|
|
|
if(!handle) return handle;
|
2018-07-11 23:35:34 +02:00
|
|
|
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-07-14 10:03:38 +02:00
|
|
|
void setup() override {}
|
2018-07-14 17:52:06 +02:00
|
|
|
void execute() override
|
|
|
|
{
|
2019-06-07 01:26:09 +02:00
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::createTexture(handle, w, h, depth, format, flags, memory.data, debug_name);
|
2018-10-21 18:05:43 +02:00
|
|
|
if (memory.own) renderer->free(memory);
|
2018-07-14 17:52:06 +02:00
|
|
|
}
|
2018-07-11 23:35:34 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
StaticString<LUMIX_MAX_PATH> debug_name;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle handle;
|
2018-07-11 23:35:34 +02:00
|
|
|
MemRef memory;
|
2019-07-25 18:50:31 +02:00
|
|
|
u32 w;
|
|
|
|
u32 h;
|
|
|
|
u32 depth;
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureFormat format;
|
2018-07-14 17:52:06 +02:00
|
|
|
Renderer* renderer;
|
2020-11-26 20:25:14 +01:00
|
|
|
gpu::TextureFlags flags;
|
2018-07-11 23:35:34 +02:00
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.debug_name = debug_name;
|
|
|
|
cmd.handle = handle;
|
|
|
|
cmd.memory = memory;
|
|
|
|
cmd.format = format;
|
|
|
|
cmd.flags = flags;
|
|
|
|
cmd.w = w;
|
|
|
|
cmd.h = h;
|
|
|
|
cmd.depth = depth;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-07-11 23:35:34 +02:00
|
|
|
|
|
|
|
return handle;
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-12-20 19:25:33 +01:00
|
|
|
void destroy(gpu::TextureHandle tex) override
|
2018-07-11 23:35:34 +02:00
|
|
|
{
|
2020-10-07 22:17:59 +02:00
|
|
|
if (!tex) return;
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-07-14 10:03:38 +02:00
|
|
|
void setup() override {}
|
2019-06-07 01:26:09 +02:00
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::destroy(texture);
|
2019-06-07 01:26:09 +02:00
|
|
|
}
|
2018-07-11 23:35:34 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::TextureHandle texture;
|
2018-07-11 23:35:34 +02:00
|
|
|
RendererImpl* renderer;
|
|
|
|
};
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
|
|
|
cmd.texture = tex;
|
|
|
|
cmd.renderer = this;
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-07-11 23:35:34 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
void queue(RenderJob& cmd, i64 profiler_link) override
|
2018-07-08 18:16:16 +02:00
|
|
|
{
|
2020-10-14 21:13:29 +02:00
|
|
|
cmd.profiler_link = profiler_link;
|
2019-05-23 23:28:50 +02:00
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
m_cpu_frame->jobs.push(&cmd);
|
2019-07-04 13:31:48 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::run(&cmd, [](void* data){
|
2019-05-23 23:28:50 +02:00
|
|
|
RenderJob* cmd = (RenderJob*)data;
|
|
|
|
PROFILE_BLOCK("setup_render_job");
|
2018-07-14 10:03:38 +02:00
|
|
|
cmd->setup();
|
2019-10-17 18:10:38 +02:00
|
|
|
}, &m_cpu_frame->setup_done);
|
2018-07-08 18:16:16 +02:00
|
|
|
}
|
|
|
|
|
2020-10-07 19:19:16 +02:00
|
|
|
void addPlugin(RenderPlugin& plugin) override {
|
|
|
|
m_plugins.push(&plugin);
|
|
|
|
}
|
|
|
|
|
|
|
|
void removePlugin(RenderPlugin& plugin) override {
|
|
|
|
m_plugins.eraseItem(&plugin);
|
|
|
|
}
|
|
|
|
|
|
|
|
Span<RenderPlugin*> getPlugins() override { return m_plugins; }
|
|
|
|
|
2018-07-08 18:16:16 +02:00
|
|
|
|
2019-06-26 18:52:52 +02:00
|
|
|
ResourceManager& getTextureManager() override { return m_texture_manager; }
|
2018-01-08 13:08:11 +01:00
|
|
|
FontManager& getFontManager() override { return *m_font_manager; }
|
2015-12-09 17:12:59 +01:00
|
|
|
|
2016-11-09 09:51:03 +01:00
|
|
|
void createScenes(Universe& ctx) override
|
2015-07-25 02:22:51 +02:00
|
|
|
{
|
2020-10-09 20:16:55 +02:00
|
|
|
UniquePtr<RenderScene> scene = RenderScene::createInstance(*this, m_engine, ctx, m_allocator);
|
|
|
|
ctx.addScene(scene.move());
|
2015-07-25 02:22:51 +02:00
|
|
|
}
|
|
|
|
|
2020-10-14 21:13:29 +02:00
|
|
|
void* allocJob(u32 size, u32 align) override {
|
|
|
|
return m_allocator.allocate_aligned(size, align);
|
|
|
|
}
|
|
|
|
|
|
|
|
void deallocJob(void* job) override {
|
|
|
|
m_allocator.deallocate_aligned(job);
|
|
|
|
}
|
2015-07-25 02:22:51 +02:00
|
|
|
|
2015-11-22 00:31:23 +01:00
|
|
|
const char* getName() const override { return "renderer"; }
|
2015-11-20 16:58:10 +01:00
|
|
|
Engine& getEngine() override { return m_engine; }
|
2017-02-22 15:12:36 +01:00
|
|
|
int getShaderDefinesCount() const override { return m_shader_defines.size(); }
|
2018-07-01 18:13:44 +02:00
|
|
|
const char* getShaderDefine(int define_idx) const override { return m_shader_defines[define_idx]; }
|
2019-06-13 17:26:52 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::ProgramHandle queueShaderCompile(Shader& shader, gpu::VertexDecl decl, u32 defines) override {
|
2019-11-06 18:23:40 +01:00
|
|
|
ASSERT(shader.isReady());
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_cpu_frame->shader_mutex);
|
2019-10-23 23:17:23 +02:00
|
|
|
|
|
|
|
for (const auto& i : m_cpu_frame->to_compile_shaders) {
|
|
|
|
if (i.shader == &shader && decl.hash == i.decl.hash && defines == i.defines) {
|
|
|
|
return i.program;
|
|
|
|
}
|
|
|
|
}
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::ProgramHandle program = gpu::allocProgramHandle();
|
2019-10-23 23:17:23 +02:00
|
|
|
m_cpu_frame->to_compile_shaders.push({&shader, decl, defines, program, shader.m_sources});
|
|
|
|
return program;
|
|
|
|
}
|
|
|
|
|
2018-06-24 17:50:33 +02:00
|
|
|
void makeScreenshot(const Path& filename) override { }
|
2015-09-05 21:36:34 +02:00
|
|
|
|
|
|
|
|
2016-11-20 17:27:41 +01:00
|
|
|
u8 getShaderDefineIdx(const char* define) override
|
2015-09-05 13:08:47 +02:00
|
|
|
{
|
2020-02-21 22:09:11 +01:00
|
|
|
MutexGuard lock(m_shader_defines_mutex);
|
2015-09-05 13:08:47 +02:00
|
|
|
for (int i = 0; i < m_shader_defines.size(); ++i)
|
|
|
|
{
|
2016-04-28 10:14:47 +02:00
|
|
|
if (m_shader_defines[i] == define)
|
2015-09-05 13:08:47 +02:00
|
|
|
{
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-16 19:16:01 +02:00
|
|
|
if (m_shader_defines.size() >= MAX_SHADER_DEFINES) {
|
|
|
|
ASSERT(false);
|
2020-11-16 22:27:05 +01:00
|
|
|
logError("Too many shader defines.");
|
2018-06-16 19:16:01 +02:00
|
|
|
}
|
|
|
|
|
2016-04-28 10:14:47 +02:00
|
|
|
m_shader_defines.emplace(define);
|
2019-08-21 21:11:51 +02:00
|
|
|
ASSERT(m_shader_defines.size() <= 32); // m_shader_defines are reserved in renderer constructor, so getShaderDefine() is MT safe
|
2019-12-20 19:25:33 +01:00
|
|
|
return u8(m_shader_defines.size() - 1);
|
2015-09-05 13:08:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-09-05 20:45:06 +02:00
|
|
|
void startCapture() override
|
|
|
|
{
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-09-05 20:45:06 +02:00
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::startCapture();
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
};
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void stopCapture() override
|
|
|
|
{
|
2018-10-12 00:12:44 +02:00
|
|
|
struct Cmd : RenderJob {
|
2018-09-05 20:45:06 +02:00
|
|
|
void setup() override {}
|
|
|
|
void execute() override {
|
|
|
|
PROFILE_FUNCTION();
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::stopCapture();
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
};
|
2020-10-14 21:13:29 +02:00
|
|
|
Cmd& cmd = createJob<Cmd>();
|
2019-07-04 13:31:48 +02:00
|
|
|
queue(cmd, 0);
|
2018-09-05 20:45:06 +02:00
|
|
|
}
|
|
|
|
|
2019-10-17 18:10:38 +02:00
|
|
|
void render() {
|
|
|
|
FrameData& frame = *m_gpu_frame;
|
2020-01-10 22:24:48 +01:00
|
|
|
frame.transient_buffer.prepareToRender();
|
2019-10-17 18:10:38 +02:00
|
|
|
|
2019-11-25 16:35:42 +01:00
|
|
|
gpu::MemoryStats mem_stats;
|
2021-03-09 21:31:55 +01:00
|
|
|
if (gpu::getMemoryStats(mem_stats)) {
|
2020-12-25 18:11:09 +01:00
|
|
|
profiler::gpuMemStats(mem_stats.total_available_mem, mem_stats.current_available_mem, mem_stats.dedicated_vidmem);
|
2019-11-25 16:35:42 +01:00
|
|
|
}
|
|
|
|
|
2019-10-23 23:17:23 +02:00
|
|
|
for (const auto& i : frame.to_compile_shaders) {
|
|
|
|
Shader::compile(i.program, i.decl, i.defines, i.sources, *this);
|
|
|
|
}
|
|
|
|
frame.to_compile_shaders.clear();
|
|
|
|
|
2020-02-20 01:49:21 +01:00
|
|
|
for (const auto& i : frame.material_updates) {
|
2020-08-31 22:49:40 +02:00
|
|
|
gpu::update(m_material_buffer.staging_buffer, &i.value, sizeof(MaterialConsts));
|
|
|
|
gpu::copy(m_material_buffer.buffer, m_material_buffer.staging_buffer, i.idx * sizeof(MaterialConsts), sizeof(MaterialConsts));
|
2019-10-16 22:36:33 +02:00
|
|
|
}
|
2020-02-20 01:49:21 +01:00
|
|
|
frame.material_updates.clear();
|
2019-08-22 19:25:15 +02:00
|
|
|
|
2019-10-24 21:53:19 +02:00
|
|
|
gpu::useProgram(gpu::INVALID_PROGRAM);
|
|
|
|
gpu::bindIndexBuffer(gpu::INVALID_BUFFER);
|
2019-10-16 22:36:33 +02:00
|
|
|
for (RenderJob* job : frame.jobs) {
|
|
|
|
PROFILE_BLOCK("execute_render_job");
|
2020-12-25 18:11:09 +01:00
|
|
|
profiler::blockColor(0xaa, 0xff, 0xaa);
|
|
|
|
profiler::link(job->profiler_link);
|
2019-10-16 22:36:33 +02:00
|
|
|
job->execute();
|
2020-10-14 21:13:29 +02:00
|
|
|
destroyJob(*job);
|
2019-10-16 22:36:33 +02:00
|
|
|
}
|
|
|
|
frame.jobs.clear();
|
2019-07-04 13:31:48 +02:00
|
|
|
|
2019-10-16 22:36:33 +02:00
|
|
|
PROFILE_BLOCK("swap buffers");
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::enableBackupWorker(true);
|
2019-08-09 09:55:19 +02:00
|
|
|
|
2020-09-29 19:56:03 +02:00
|
|
|
frame.gpu_frame = gpu::swapBuffers();
|
2019-07-04 13:31:48 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::enableBackupWorker(false);
|
2019-10-16 22:36:33 +02:00
|
|
|
m_profiler.frame();
|
|
|
|
|
2020-10-16 19:56:08 +02:00
|
|
|
m_gpu_frame = m_frames[(getFrameIndex(m_gpu_frame) + 1) % lengthOf(m_frames)].get();
|
2020-11-15 12:06:01 +01:00
|
|
|
FrameData& check_frame = *m_frames[(getFrameIndex(m_gpu_frame) + 1) % lengthOf(m_frames)].get();
|
|
|
|
|
|
|
|
if (check_frame.gpu_frame != 0xffFFffFF && gpu::frameFinished(check_frame.gpu_frame)) {
|
|
|
|
check_frame.gpu_frame = 0xffFFffFF;
|
|
|
|
check_frame.transient_buffer.renderDone();
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::decSignal(check_frame.can_setup);
|
2020-11-15 12:06:01 +01:00
|
|
|
}
|
2020-09-29 19:56:03 +02:00
|
|
|
|
|
|
|
if (m_gpu_frame->gpu_frame != 0xffFFffFF) {
|
|
|
|
gpu::waitFrame(m_gpu_frame->gpu_frame);
|
2020-11-15 12:06:01 +01:00
|
|
|
m_gpu_frame->gpu_frame = 0xFFffFFff;
|
2020-09-29 19:56:03 +02:00
|
|
|
m_gpu_frame->transient_buffer.renderDone();
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::decSignal(m_gpu_frame->can_setup);
|
2020-09-29 19:56:03 +02:00
|
|
|
}
|
2019-10-16 22:36:33 +02:00
|
|
|
}
|
2018-09-05 20:45:06 +02:00
|
|
|
|
2019-09-29 15:02:27 +02:00
|
|
|
void waitForCommandSetup() override
|
|
|
|
{
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::wait(m_cpu_frame->setup_done);
|
|
|
|
m_cpu_frame->setup_done = jobs::INVALID_HANDLE;
|
2019-09-29 15:02:27 +02:00
|
|
|
}
|
|
|
|
|
2019-10-19 00:23:56 +02:00
|
|
|
void waitForRender() override {
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::wait(m_last_render);
|
2019-10-19 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2020-10-16 19:56:08 +02:00
|
|
|
i32 getFrameIndex(FrameData* frame) const {
|
|
|
|
for (i32 i = 0; i < (i32)lengthOf(m_frames); ++i) {
|
|
|
|
if (frame == m_frames[i].get()) return i;
|
|
|
|
}
|
|
|
|
ASSERT(false);
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
2019-05-25 13:03:00 +02:00
|
|
|
void frame() override
|
2018-07-11 23:35:34 +02:00
|
|
|
{
|
2019-05-25 13:03:00 +02:00
|
|
|
PROFILE_FUNCTION();
|
2019-07-04 13:31:48 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::wait(m_cpu_frame->setup_done);
|
|
|
|
m_cpu_frame->setup_done = jobs::INVALID_HANDLE;
|
2019-10-23 23:17:23 +02:00
|
|
|
for (const auto& i : m_cpu_frame->to_compile_shaders) {
|
|
|
|
const u64 key = i.defines | ((u64)i.decl.hash << 32);
|
|
|
|
i.shader->m_programs.insert(key, i.program);
|
|
|
|
}
|
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::incSignal(&m_cpu_frame->can_setup);
|
2019-10-16 22:36:33 +02:00
|
|
|
|
2020-10-16 19:56:08 +02:00
|
|
|
m_cpu_frame = m_frames[(getFrameIndex(m_cpu_frame) + 1) % lengthOf(m_frames)].get();
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::runEx(this, [](void* ptr){
|
2019-10-17 18:10:38 +02:00
|
|
|
auto* renderer = (RendererImpl*)ptr;
|
|
|
|
renderer->render();
|
2020-12-25 18:11:09 +01:00
|
|
|
}, &m_last_render, jobs::INVALID_HANDLE, 1);
|
2020-09-29 19:56:03 +02:00
|
|
|
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::wait(m_cpu_frame->can_setup);
|
2015-07-31 22:22:37 +02:00
|
|
|
}
|
|
|
|
|
2015-07-25 02:22:51 +02:00
|
|
|
Engine& m_engine;
|
2016-02-04 16:20:01 +01:00
|
|
|
IAllocator& m_allocator;
|
2019-07-04 13:56:05 +02:00
|
|
|
Array<StaticString<32>> m_shader_defines;
|
2020-02-21 22:09:11 +01:00
|
|
|
Mutex m_shader_defines_mutex;
|
2018-10-13 15:08:58 +02:00
|
|
|
Array<StaticString<32>> m_layers;
|
2018-01-08 13:08:11 +01:00
|
|
|
FontManager* m_font_manager;
|
2019-08-23 13:33:43 +02:00
|
|
|
MaterialManager m_material_manager;
|
2019-06-12 23:38:06 +02:00
|
|
|
RenderResourceManager<Model> m_model_manager;
|
2019-06-13 17:26:52 +02:00
|
|
|
RenderResourceManager<ParticleEmitterResource> m_particle_emitter_manager;
|
|
|
|
RenderResourceManager<PipelineResource> m_pipeline_manager;
|
2019-06-12 23:38:06 +02:00
|
|
|
RenderResourceManager<Shader> m_shader_manager;
|
2019-06-26 18:52:52 +02:00
|
|
|
RenderResourceManager<Texture> m_texture_manager;
|
2021-02-06 14:21:00 +01:00
|
|
|
gpu::ProgramHandle m_downscale_program;
|
|
|
|
gpu::BufferHandle m_tmp_uniform_buffer;
|
2021-02-09 21:10:40 +01:00
|
|
|
gpu::BufferHandle m_scratch_buffer;
|
2021-02-13 15:39:02 +01:00
|
|
|
Array<u32> m_free_sort_keys;
|
|
|
|
Array<const Mesh*> m_sort_key_to_mesh_map;
|
|
|
|
u32 m_max_sort_key = 0;
|
2019-10-16 22:36:33 +02:00
|
|
|
|
2020-10-07 19:19:16 +02:00
|
|
|
Array<RenderPlugin*> m_plugins;
|
2020-10-16 19:56:08 +02:00
|
|
|
Local<FrameData> m_frames[3];
|
2019-10-16 22:36:33 +02:00
|
|
|
FrameData* m_gpu_frame = nullptr;
|
2019-10-17 18:10:38 +02:00
|
|
|
FrameData* m_cpu_frame = nullptr;
|
2020-12-25 18:11:09 +01:00
|
|
|
jobs::SignalHandle m_last_render = jobs::INVALID_HANDLE;
|
2019-10-16 22:36:33 +02:00
|
|
|
|
2019-05-23 23:28:50 +02:00
|
|
|
GPUProfiler m_profiler;
|
2019-08-07 21:21:31 +02:00
|
|
|
|
|
|
|
struct MaterialBuffer {
|
2020-02-20 01:49:21 +01:00
|
|
|
MaterialBuffer(IAllocator& alloc)
|
|
|
|
: map(alloc)
|
|
|
|
, data(alloc)
|
|
|
|
{}
|
|
|
|
|
|
|
|
struct Data {
|
|
|
|
u32 ref_count;
|
|
|
|
union {
|
|
|
|
u32 hash;
|
|
|
|
u32 next_free;
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2020-08-31 22:05:33 +02:00
|
|
|
gpu::BufferHandle buffer = gpu::INVALID_BUFFER;
|
2020-08-31 22:49:40 +02:00
|
|
|
gpu::BufferHandle staging_buffer = gpu::INVALID_BUFFER;
|
2020-02-20 01:49:21 +01:00
|
|
|
Array<Data> data;
|
|
|
|
int first_free;
|
2019-08-07 21:21:31 +02:00
|
|
|
HashMap<u32, u32> map;
|
|
|
|
} m_material_buffer;
|
2019-05-23 23:28:50 +02:00
|
|
|
};
|
2018-07-11 23:35:34 +02:00
|
|
|
|
|
|
|
|
2015-08-17 23:45:26 +02:00
|
|
|
extern "C"
|
2015-07-25 02:22:51 +02:00
|
|
|
{
|
2016-02-14 11:28:08 +01:00
|
|
|
LUMIX_PLUGIN_ENTRY(renderer)
|
2015-08-17 23:45:26 +02:00
|
|
|
{
|
2016-07-11 19:41:28 +02:00
|
|
|
return LUMIX_NEW(engine.getAllocator(), RendererImpl)(engine);
|
2015-08-17 23:45:26 +02:00
|
|
|
}
|
2015-07-25 02:22:51 +02:00
|
|
|
}
|
2014-11-16 19:31:51 +01:00
|
|
|
|
2015-02-27 23:56:06 +01:00
|
|
|
|
2016-02-14 21:02:09 +01:00
|
|
|
} // namespace Lumix
|
2015-08-17 23:45:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
|