Geometry Nodes: support baking volume geometry

This adds support for baking the volume component of a geometry. Previously,
volumes were just removed in the simulation and bake node.

On disk, each volume geometry is written to a separate `.vdb` file that is stored in
the bakes `blobs` directory and referenced from the corresponding meta `.json` file.

Technically, it would also be easy to write the volume data to the same `.blob`
files that we also write e.g. mesh attributes to. However, since `.vdb` is a well
known file format, it seems reasonable to just store it as a separate file. The
serialization code doesn't really care whether it's a separate file or embedded into
a bigger file, so this decision could be made at a higher level.

Just like with other geometry types, materials are preserved. Just note that when
using the written stand-alone .vdb files, materials are not preserved.

Currently, volume grids are not deduplicated on disk. This could be added in the
future if necessary.

Pull Request: https://projects.blender.org/blender/blender/pulls/117781
This commit is contained in:
Jacques Lucke 2024-02-03 18:13:34 +01:00
parent 82d9b384e4
commit 444e148976
6 changed files with 184 additions and 6 deletions

View File

@ -5,6 +5,7 @@
#pragma once
#include "BLI_fileops.hh"
#include "BLI_function_ref.hh"
#include "BLI_serialize.hh"
#include "BKE_bake_items.hh"
@ -33,6 +34,13 @@ class BlobReader {
* \return True on success, otherwise false.
*/
[[nodiscard]] virtual bool read(const BlobSlice &slice, void *r_data) const = 0;
/**
* Provides an #istream that can be used to read the data from the given slice.
* \return True on success, otherwise false.
*/
[[nodiscard]] virtual bool read_as_stream(const BlobSlice &slice,
FunctionRef<bool(std::istream &)> fn) const;
};
/**
@ -45,6 +53,15 @@ class BlobWriter {
* \return Slice where the data has been written to.
*/
virtual BlobSlice write(const void *data, int64_t size) = 0;
/**
* Provides an #ostream that can be used to write the blob.
* \param file_extension: May be used if the data is written to an independent file. Based on the
* implementation, this may be ignored.
* \return Slice where the data has been written to.
*/
virtual BlobSlice write_as_stream(StringRef file_extension,
FunctionRef<void(std::ostream &)> fn);
};
/**
@ -161,16 +178,22 @@ class DiskBlobWriter : public BlobWriter {
/** Directory path that contains all blob files. */
std::string blob_dir_;
/** Name of the file that data is written to. */
std::string base_name_;
std::string blob_name_;
/** File handle. The file is opened when the first data is written. */
std::fstream blob_stream_;
/** Current position in the file. */
int64_t current_offset_ = 0;
/** Used to generate file names for bake data that is stored in independent files. */
int independent_file_count_ = 0;
public:
DiskBlobWriter(std::string blob_dir, std::string blob_name);
DiskBlobWriter(std::string blob_dir, std::string base_name);
BlobSlice write(const void *data, int64_t size) override;
BlobSlice write_as_stream(StringRef file_extension,
FunctionRef<void(std::ostream &)> fn) override;
};
void serialize_bake(const BakeState &bake_state,

View File

@ -9,6 +9,7 @@
* \brief Volume data-block.
*/
#include <memory>
#include <optional>
#include "BLI_bounds_types.hh"
@ -24,6 +25,10 @@ struct Scene;
struct Volume;
struct VolumeGridVector;
namespace blender::bke::bake {
struct BakeMaterialsList;
}
/* Module */
void BKE_volumes_init();
@ -133,6 +138,8 @@ struct VolumeRuntime {
char velocity_x_grid[64] = "";
char velocity_y_grid[64] = "";
char velocity_z_grid[64] = "";
std::unique_ptr<bake::BakeMaterialsList> bake_materials;
};
} // namespace blender::bke

View File

@ -9,6 +9,7 @@
#include "BKE_lib_id.hh"
#include "BKE_mesh.hh"
#include "BKE_pointcloud.hh"
#include "BKE_volume.hh"
#include "BLI_endian_defines.h"
#include "BLI_endian_switch.h"
@ -16,6 +17,7 @@
#include "BLI_path_util.h"
#include "DNA_material_types.h"
#include "DNA_volume_types.h"
#include "RNA_access.hh"
#include "RNA_enum_types.hh"
@ -71,12 +73,17 @@ void GeometryBakeItem::prepare_geometry_for_bake(GeometrySet &main_geometry,
pointcloud->runtime->bake_materials = materials_to_weak_references(
&pointcloud->mat, &pointcloud->totcol, data_block_map);
}
if (Volume *volume = geometry.get_volume_for_write()) {
volume->runtime->bake_materials = materials_to_weak_references(
&volume->mat, &volume->totcol, data_block_map);
}
if (bke::Instances *instances = geometry.get_instances_for_write()) {
instances->attributes_for_write().remove_anonymous();
}
geometry.keep_only_during_modify({GeometryComponent::Type::Mesh,
GeometryComponent::Type::Curve,
GeometryComponent::Type::PointCloud,
GeometryComponent::Type::Volume,
GeometryComponent::Type::Instance});
});
}
@ -125,6 +132,12 @@ void GeometryBakeItem::try_restore_data_blocks(GeometrySet &main_geometry,
std::move(pointcloud->runtime->bake_materials),
data_block_map);
}
if (Volume *volume = geometry.get_volume_for_write()) {
restore_materials(&volume->mat,
&volume->totcol,
std::move(volume->runtime->bake_materials),
data_block_map);
}
});
}

View File

@ -10,6 +10,7 @@
#include "BKE_lib_id.hh"
#include "BKE_mesh.hh"
#include "BKE_pointcloud.hh"
#include "BKE_volume.hh"
#include "BLI_endian_defines.h"
#include "BLI_endian_switch.h"
@ -18,12 +19,21 @@
#include "BLI_path_util.h"
#include "DNA_material_types.h"
#include "DNA_volume_types.h"
#include "RNA_access.hh"
#include "RNA_enum_types.hh"
#include <fmt/format.h>
#include <sstream>
#if WITH_OPENVDB
# include <openvdb/io/Stream.h>
# include <openvdb/openvdb.h>
# include "BKE_volume_grid.hh"
#endif
namespace blender::bke::bake {
using namespace io::serialize;
@ -50,6 +60,30 @@ std::optional<BlobSlice> BlobSlice::deserialize(const DictionaryValue &io_slice)
return BlobSlice{*name, {*start, *size}};
}
BlobSlice BlobWriter::write_as_stream(const StringRef /*file_extension*/,
const FunctionRef<void(std::ostream &)> fn)
{
std::ostringstream stream{std::ios::binary};
fn(stream);
std::string data = stream.rdbuf()->str();
return this->write(data.data(), data.size());
}
bool BlobReader::read_as_stream(const BlobSlice &slice, FunctionRef<bool(std::istream &)> fn) const
{
const int64_t size = slice.range.size();
std::string buffer;
buffer.resize(size);
if (!this->read(slice, buffer.data())) {
return false;
}
std::istringstream stream{buffer, std::ios::binary};
if (!fn(stream)) {
return false;
}
return true;
}
DiskBlobReader::DiskBlobReader(std::string blobs_dir) : blobs_dir_(std::move(blobs_dir)) {}
[[nodiscard]] bool DiskBlobReader::read(const BlobSlice &slice, void *r_data) const
@ -73,9 +107,10 @@ DiskBlobReader::DiskBlobReader(std::string blobs_dir) : blobs_dir_(std::move(blo
return true;
}
DiskBlobWriter::DiskBlobWriter(std::string blob_dir, std::string blob_name)
: blob_dir_(std::move(blob_dir)), blob_name_(std::move(blob_name))
DiskBlobWriter::DiskBlobWriter(std::string blob_dir, std::string base_name)
: blob_dir_(std::move(blob_dir)), base_name_(std::move(base_name))
{
blob_name_ = base_name_ + ".blob";
}
BlobSlice DiskBlobWriter::write(const void *data, const int64_t size)
@ -93,6 +128,23 @@ BlobSlice DiskBlobWriter::write(const void *data, const int64_t size)
return {blob_name_, {old_offset, size}};
}
BlobSlice DiskBlobWriter::write_as_stream(const StringRef file_extension,
const FunctionRef<void(std::ostream &)> fn)
{
BLI_assert(file_extension.startswith("."));
independent_file_count_++;
const std::string file_name = fmt::format(
"{}_file_{}{}", base_name_, independent_file_count_, std::string_view(file_extension));
char path[FILE_MAX];
BLI_path_join(path, sizeof(path), blob_dir_.c_str(), file_name.c_str());
BLI_file_ensure_parent_dir_exists(path);
std::fstream stream{path, std::ios::out | std::ios::binary};
fn(stream);
const int64_t written_bytes_num = stream.tellg();
return {file_name, {0, written_bytes_num}};
}
BlobWriteSharing::~BlobWriteSharing()
{
for (const ImplicitSharingInfo *sharing_info : stored_by_runtime_.keys()) {
@ -688,6 +740,54 @@ static std::unique_ptr<Instances> try_load_instances(const DictionaryValue &io_g
return instances;
}
#ifdef WITH_OPENVDB
static Volume *try_load_volume(const DictionaryValue &io_geometry, const BlobReader &blob_reader)
{
const DictionaryValue *io_volume = io_geometry.lookup_dict("volume");
if (!io_volume) {
return nullptr;
}
const auto *io_vdb = io_volume->lookup_dict("vdb");
if (!io_vdb) {
return nullptr;
}
openvdb::GridPtrVecPtr vdb_grids;
if (std::optional<BlobSlice> vdb_slice = BlobSlice::deserialize(*io_vdb)) {
if (!blob_reader.read_as_stream(*vdb_slice, [&](std::istream &stream) {
try {
openvdb::io::Stream vdb_stream{stream};
vdb_grids = vdb_stream.getGrids();
return true;
}
catch (...) {
return false;
}
}))
{
return nullptr;
}
}
Volume *volume = reinterpret_cast<Volume *>(BKE_id_new_nomain(ID_VO, nullptr));
auto cancel = [&]() {
BKE_id_free(nullptr, volume);
return nullptr;
};
for (openvdb::GridBase::Ptr &vdb_grid : *vdb_grids) {
if (vdb_grid) {
bke::GVolumeGrid grid{std::move(vdb_grid)};
BKE_volume_grid_add(volume, *grid.release());
}
}
if (const io::serialize::ArrayValue *io_materials = io_volume->lookup_array("materials")) {
if (!load_materials(*io_materials, volume->runtime->bake_materials)) {
return cancel();
}
}
return volume;
}
#endif
static GeometrySet load_geometry(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobReadSharing &blob_sharing)
@ -697,6 +797,9 @@ static GeometrySet load_geometry(const DictionaryValue &io_geometry,
geometry.replace_pointcloud(try_load_pointcloud(io_geometry, blob_reader, blob_sharing));
geometry.replace_curves(try_load_curves(io_geometry, blob_reader, blob_sharing));
geometry.replace_instances(try_load_instances(io_geometry, blob_reader, blob_sharing).release());
#ifdef WITH_OPENVDB
geometry.replace_volume(try_load_volume(io_geometry, blob_reader));
#endif
return geometry;
}
@ -823,6 +926,34 @@ static std::shared_ptr<DictionaryValue> serialize_geometry_set(const GeometrySet
auto io_attributes = serialize_attributes(curves.attributes(), blob_writer, blob_sharing, {});
io_curves->append("attributes", io_attributes);
}
#ifdef WITH_OPENVDB
if (geometry.has_volume()) {
const Volume &volume = *geometry.get_volume();
const int grids_num = BKE_volume_num_grids(&volume);
auto io_volume = io_geometry->append_dict("volume");
auto io_vdb = blob_writer
.write_as_stream(".vdb",
[&](std::ostream &stream) {
openvdb::GridCPtrVec vdb_grids;
Vector<bke::VolumeTreeAccessToken> tree_tokens;
for (const int i : IndexRange(grids_num)) {
const bke::VolumeGridData *grid = BKE_volume_grid_get(
&volume, i);
tree_tokens.append_as();
vdb_grids.push_back(grid->grid_ptr(tree_tokens.last()));
}
openvdb::io::Stream vdb_stream(stream);
vdb_stream.write(vdb_grids);
})
.serialize();
io_volume->append("vdb", std::move(io_vdb));
auto io_materials = serialize_materials(volume.runtime->bake_materials);
io_volume->append("materials", io_materials);
}
#endif
if (geometry.has_instances()) {
const Instances &instances = *geometry.get_instances();
auto io_instances = io_geometry->append_dict("instances");

View File

@ -30,6 +30,7 @@
#include "BLI_utildefines.h"
#include "BKE_anim_data.h"
#include "BKE_bake_data_block_id.hh"
#include "BKE_bpath.h"
#include "BKE_geometry_set.hh"
#include "BKE_global.h"
@ -170,6 +171,11 @@ static void volume_copy_data(Main * /*bmain*/, ID *id_dst, const ID *id_src, con
STRNCPY(volume_dst->runtime->velocity_y_grid, volume_src->runtime->velocity_y_grid);
STRNCPY(volume_dst->runtime->velocity_z_grid, volume_src->runtime->velocity_z_grid);
if (volume_src->runtime->bake_materials) {
volume_dst->runtime->bake_materials = std::make_unique<blender::bke::bake::BakeMaterialsList>(
*volume_src->runtime->bake_materials);
}
volume_dst->batch_cache = nullptr;
}

View File

@ -318,15 +318,13 @@ static void bake_geometry_nodes_startjob(void *customdata, wmJobWorkerStatus *wo
const bake::BakePath path = request.path;
const std::string blob_file_name = frame_file_name + ".blob";
char meta_path[FILE_MAX];
BLI_path_join(meta_path,
sizeof(meta_path),
path.meta_dir.c_str(),
(frame_file_name + ".json").c_str());
BLI_file_ensure_parent_dir_exists(meta_path);
bake::DiskBlobWriter blob_writer{path.blobs_dir, blob_file_name};
bake::DiskBlobWriter blob_writer{path.blobs_dir, frame_file_name};
fstream meta_file{meta_path, std::ios::out};
bake::serialize_bake(frame_cache.state, blob_writer, *request.blob_sharing, meta_file);
}