#include "Universe.hpp" #include //NOLINT #include // NOLINT #include #include "../contouring/Dummy.hpp" #include "Chunk.hpp" using namespace world; const auto AREAS_FILE = "/areas.idx"; Universe::Universe(const Universe::options &options): dicts("content/zstd.dict"), contouring(std::make_shared()) { setOptions(options); folderPath = options.folderPath; struct vec_istream: std::streambuf { vec_istream(std::vector &vec) { this->setg(&vec[0], &vec[0], &vec[0] + vec.size()); } }; running = true; std::filesystem::create_directories(folderPath); { std::ifstream index(folderPath + AREAS_FILE); if(index.good()) { size_t size = 0; index.read(reinterpret_cast(&size), sizeof(size)); robin_hood::unordered_map tmp; while(!index.eof()) { size_t id = UINT32_MAX; index.read(reinterpret_cast(&id), sizeof(size_t)); Area::params params{voxel_pos(0), 0}; index.read(reinterpret_cast(¶ms.center.x), sizeof(voxel_pos::value_type)); index.read(reinterpret_cast(¶ms.center.y), sizeof(voxel_pos::value_type)); index.read(reinterpret_cast(¶ms.center.z), sizeof(voxel_pos::value_type)); index.read(reinterpret_cast(¶ms.radius), sizeof(int)); index.read(reinterpret_cast(¶ms.seed), sizeof(int)); [[maybe_unused]] auto ok = tmp.emplace(id, params).second; assert(ok && "Duplicated area"); index.peek(); } assert(tmp.size() == size && "Corrupted areas index"); far_areas = data::generational::vector(tmp); LOG_D(far_areas.size() << " areas loaded"); } else { LOG_E("No index file!!! Probably a new world..."); //TODO: generate universe far_areas.emplace(Area::params{voxel_pos(0), 1 << 20}); //far_areas.emplace(Area::params{voxel_pos(0), 1, 43}); } index.close(); } entities.emplace(nullptr, glm::vec3(1), glm::vec3(2)); // Workers for (size_t i = 0; i < 4; i++) { workers.emplace_back([&] { #if TRACY_ENABLE tracy::SetThreadName("Chunks"); #endif const auto read_ctx = dicts.make_reader(); const auto write_ctx = dicts.make_writer(); while (running) { if (std::pair, std::shared_ptr> task; loadQueue.pop(task)) { //MAYBE: loadQueue.take to avoid duplicated work on fast move ZoneScopedN("ProcessLoad"); const auto &pos = task.first; const auto rcPos = glm::split(pos.second); const auto reg = task.second->getRegion(folderPath, std::make_pair(pos.first, rcPos.first)); Region::data data; if(reg->read(rcPos.second, read_ctx, data)) { ZoneScopedN("ProcessRead"); vec_istream idata(data); std::istream iss(&idata); loadedQueue.push({pos, std::make_shared(iss)}); } else { ZoneScopedN("ProcessGenerate"); loadedQueue.push({pos, std::make_shared(pos.second, task.second->getGenerator())}); } } else if(save_task_t task; saveQueue.pop(task)) { //MAYBE: queue.take to avoid concurent write or duplicated work on fast move ZoneScopedN("ProcessSave"); if(task.second.second->isModified()) { std::ostringstream out; task.second.second->write(out); const auto rcPos = glm::split(task.second.first); const auto reg = task.first.second->getRegion(folderPath, std::make_pair(task.first.first, rcPos.first)); reg->write(rcPos.second, write_ctx, out.str()); } } else { loadQueue.wait(); } } }); } } Universe::~Universe() { contouring = nullptr; // Save all for(auto& area: areas) { for(const auto& chunk: area.second->getChunks()) { saveQueue.emplace(area, chunk); } } loadQueue.notify_all(); if (auto size = saveQueue.size(); size > 0) { LOG_I("Saving " << size << " chunks"); const auto SAVE_CHECK_TIME = 500; do { loadQueue.notify_all(); std::cout << "\rSaving... " << size << " " << std::flush; std::this_thread::sleep_for(std::chrono::microseconds(SAVE_CHECK_TIME)); size = saveQueue.size(); } while (size > 0); std::cout << std::endl; } saveAreas(); running = false; loadQueue.notify_all(); for (auto &worker: workers) { if (worker.joinable()) worker.join(); } LOG_D("Universe disappeared"); } // Write areas index (warn: file io) void Universe::saveAreas() const { std::ofstream index(folderPath + AREAS_FILE, std::ios::out | std::ios::binary); if(!index.good()) { LOG_E("Areas index write error"); return; } { size_t size = areas.size() + far_areas.size(); index.write(reinterpret_cast(&size), sizeof(size)); } std::function write = [&](area_id id, Area::params params) { auto idx = id.index; index.write(reinterpret_cast(&idx), sizeof(size_t)); index.write(reinterpret_cast(¶ms.center.x), sizeof(voxel_pos::value_type)); index.write(reinterpret_cast(¶ms.center.y), sizeof(voxel_pos::value_type)); index.write(reinterpret_cast(¶ms.center.z), sizeof(voxel_pos::value_type)); index.write(reinterpret_cast(¶ms.radius), sizeof(int)); index.write(reinterpret_cast(¶ms.seed), sizeof(int)); }; for(const auto& area: areas) { write(area.first, area.second->getParams()); } far_areas.iter(write); if(!index.good()) LOG_E("Areas index write error"); index.close(); } void Universe::update(const voxel_pos& pos, float deltaTime) { ZoneScopedN("Universe"); const chunk_pos newPos = glm::divide(pos); const auto chunkChange = last_pos != newPos; last_pos = newPos; if(chunkChange) { ZoneScopedN("Far"); far_areas.extract([&](area_id id, Area::params params){ if (const chunk_pos diff = glm::divide(pos - params.center); glm::length2(diff) > glm::pow2(loadDistance + params.radius)) return false; LOG_I("Load area " << id.index); areas.emplace(id, std::make_shared(params)); return true; }); } { // Update alive areas ZoneScopedN("World"); #if TRACY_ENABLE size_t chunk_count = 0; size_t region_count = 0; #endif const bool queuesEmpty = loadQueue.empty() && saveQueue.empty(); bool allLazy = true; auto it = areas.begin(); while (it != areas.end()) { ZoneScopedN("Area"); const bool chunkChangeArea = (false && it->first == 1 && it->second->move(glm::vec3(deltaTime))) || chunkChange; // TODO: area.velocity const chunk_pos diff = glm::divide(pos - it->second->getOffset().as_voxel()); auto &chunks = it->second->setChunks(); if (glm::length2(diff) > glm::pow2(keepDistance + it->second->getChunks().getRadius())) { auto it_c = chunks.begin(); while(it_c != chunks.end()) { saveQueue.emplace(*it, *it_c); it_c = chunks.erase(it_c); } LOG_I("Unload area " << it->first.index); [[maybe_unused]] auto ok = far_areas.put(it->first, it->second->getParams()); assert(ok); it = areas.erase(it); saveAreas(); } else { bool lazyArea = queuesEmpty; { // Update alive chunks ZoneScopedN("Alive"); auto it_c = chunks.begin(); while(it_c != chunks.end()) { if (glm::length2(diff - it_c->first) > glm::pow2(keepDistance)) { saveQueue.emplace(*it, *it_c); //MAYBE: take look lazyArea = false; it_c = chunks.erase(it_c); }else { const area_ acPos = std::make_pair(it->first, it_c->first); if (const auto neighbors = it_c->second->update()) { contouring->onUpdate(acPos, diff, chunks, neighbors.value()); } else if (chunkChangeArea) { contouring->onNotify(acPos, diff, chunks); } ++it_c; #if TRACY_ENABLE chunk_count++; #endif } } } if (chunkChangeArea) { // Enqueue missing chunks ZoneScopedN("Missing"); auto handle = loadQueue.inserter(); //TODO: need dist so no easy sphere fill for (int x = -loadDistance; x <= loadDistance; x++) { for (int y = -loadDistance; y <= loadDistance; y++) { for (int z = -loadDistance; z <= loadDistance; z++) { const auto dist2 = x * x + y * y + z * z; if (dist2 <= loadDistance * loadDistance) { const auto p = diff + chunk_pos(x, y, z); if (chunks.inRange(p) && chunks.find(p) == chunks.end()) { handle.first(std::make_pair(it->first, p), it->second, -dist2); lazyArea = false; } } }}} if(!lazyArea) loadQueue.notify_all(); } allLazy &= lazyArea; if (lazyArea) { // Clear un-used regions ZoneScopedN("Region"); const auto unique = it->second->getRegions(); // MAYBE: shared then unique #if TRACY_ENABLE region_count += unique->size(); #endif for (auto it_r = unique->begin(); it_r != unique->end(); ++it_r) { if (glm::length2(diff - glm::lvec3(it_r->first) * glm::lvec3(REGION_LENGTH)) > glm::pow2(keepDistance + REGION_LENGTH * 2)) { unique->erase(it_r); //FIXME: may wait for os file access (long) break; //NOTE: save one only max per frame } } } ++it; } } #if TRACY_ENABLE TracyPlot("ChunkCount", static_cast(chunk_count)); if(allLazy) { TracyPlot("Region", static_cast(region_count)); } TracyPlot("ChunkLoad", static_cast(loadQueue.size())); TracyPlot("ChunkUnload", static_cast(saveQueue.size())); #endif } { ZoneScopedN("Contouring"); contouring->update(pos, areas); //MAYBE: if(chunkChange) contouring->notify(chunks); } { // Update entities ZoneScopedN("Entities"); #if TRACY_ENABLE size_t entity_count = 0; #endif entities.for_each([&](entity_id, Entity &val) { val.instances.remove([&](entity_id, Entity::Instance &inst) { #if TRACY_ENABLE entity_count++; #endif inst.pos += inst.velocity * deltaTime; return glm::length2(glm::divide(pos - inst.pos.as_voxel())) > glm::pow2(keepDistance); }); }); #if TRACY_ENABLE TracyPlot("EntityCount", static_cast(entity_count)); #endif } { // Store loaded chunks ZoneScopedN("Load"); robin_hood::pair, std::shared_ptr> loaded; for (auto handle = loadedQueue.extractor(); handle.first(loaded);) { if (const auto it = areas.find(loaded.first.first); it != areas.end()) { auto &chunks = it->second->setChunks(); chunks.emplace(loaded.first.second, loaded.second); const chunk_pos diff = glm::divide(pos - it->second->getOffset().as_voxel()); contouring->onUpdate(loaded.first, diff, chunks, Faces::All); } } } } void Universe::setOptions(const Universe::options& options) { loadDistance = options.loadDistance; keepDistance = options.keepDistance; } void Universe::setContouring(const std::shared_ptr& ct) { contouring = ct; last_pos = chunk_pos(INT_MAX); // trigger chunkChange on next update } std::optional Universe::raycast(const Ray &ray) const { //MAYBE: ray + offset to get float precision std::vector points; ray.grid(points); std::optional target = std::nullopt; size_t dist = points.size(); for(auto& area: areas) { if(ray.intersect(area.second->getBounding()) != IBox::ContainmentType::Disjoint) { const auto &offset = area.second->getOffset().as_voxel(); const auto &chunks = area.second->getChunks(); std::shared_ptr chunk = nullptr; chunk_pos chunk_vec(INT_MAX); for (size_t i = 0; i < dist; i++) { const auto pos = points[i] - offset; const chunk_pos cPos = glm::divide(pos); if(cPos != chunk_vec) { if (const auto it = chunks.find(cPos); it != chunks.end()) { chunk = it->second; chunk_vec = cPos; } else { chunk = nullptr; } } if(chunk != nullptr) { const auto voxel = chunk->getAt(glm::modulo(pos)); if(voxel.density() > 0) { target = {ray_target{{area.first, pos}, voxel, offset}}; dist = i; i = points.size(); } } } } } return target; } std::optional Universe::set(const area_& pos, const Voxel& val) { if(const auto it = areas.find(pos.first); it != areas.end()) { auto &chunks = it->second->setChunks(); const auto split = glm::splitIdx(pos.second); if(chunks.inRange(split.first)) if(const auto chunk = chunks.findInRange(split.first)) return {chunk.value()->replace(split.second, val)}; } return {}; } ItemList Universe::setCube(const area_& pos, const Voxel& val, int radius) { ItemList list; if(const auto it = areas.find(pos.first); it != areas.end()) { auto& chunks = it->second->setChunks(); for (int z = -radius; z <= radius; z++) { for (int y = -radius; y <= radius; y++) { for (int x = -radius; x <= radius; x++) { //TODO: list.pop(val) const auto split = glm::splitIdx(pos.second + voxel_pos(x, y, z)); if(chunks.inRange(split.first)) if(const auto chunk = it->second->setChunks().findInRange(split.first)) list.add(chunk.value()->replace(split.second, val)); }}} } return list; } bool Universe::collide(const glm::ifvec3 &pos, const glm::vec3 &vel, int density, float radius) const { const auto dir = glm::normalize(vel); const auto velocity = vel * glm::vec3(density); const auto from = pos * density + dir; return raycast(Ray(from, dir, glm::length(velocity) + radius)).has_value(); } bool Universe::move(glm::ifvec3 &pos, const glm::vec3 &vel, int density, float radius) const { const auto dir = glm::normalize(vel); const auto velocity = vel * glm::vec3(density); const auto from = pos * density + dir; if (const auto target = raycast(Ray(from, dir, glm::length(velocity) + radius))) { const auto target_dist = from.dist(glm::ifvec3(target.value().offset + target.value().pos.second, density)) - radius; pos += vel * glm::vec3(target_dist / glm::length(vel)); return true; } pos += vel; return false; } entity_instance_id Universe::addEntity(entity_id type, const Entity::Instance &instance) { return std::make_pair(type, entities.at(type).instances.push(instance)); } void Universe::getEntitiesModels(std::vector, buffer::Abstract *const>> &buffers, const std::optional &frustum, const glm::llvec3 &offset, int density) { entities.iter([&](entity_id, const Entity &entity) { std::vector mats; entity.instances.iter([&](entity_id, const Entity::Instance &inst) { const glm::vec3 fPos = (glm::vec3(inst.pos.raw_as_long() - offset * glm::llvec3(density)) + inst.pos.offset) / glm::vec3(density); if (!frustum.has_value() || frustum.value().contains(geometry::Box::fromMin(fPos, entity.size))) mats.emplace_back(glm::scale(glm::translate(glm::mat4(1), fPos * (float)density), entity.scale)); }); if(!mats.empty()) buffers.emplace_back(mats, entity.buffer); }); }