NVIDIA Index example code nvidia_logo_transpbg.gif Up
ray_sampling_sparse_volume.h
Go to the documentation of this file.
1/******************************************************************************
2 * Copyright 2023 NVIDIA Corporation. All rights reserved.
3 *****************************************************************************/
9#ifndef EXAMPLES_RAY_SAMPLING_SPARSE_VOLUME_H
10#define EXAMPLES_RAY_SAMPLING_SPARSE_VOLUME_H
11
12#include "ray_sampling_scenes.h"
13
14#include <nv/index/isparse_volume_scene_element.h>
15#include <nv/index/isparse_volume_rendering_properties.h>
16
17#include <nv/index/app/forwarding_logger.h>
18
19namespace sparse_volume {
20
21static const char* program_begin =
22";NV_IDX_XAC_VERSION_1_0 \n"
23" \n"
24"class Volume_sample_program \n"
25"{ \n"
26" NV_IDX_VOLUME_SAMPLE_PROGRAM \n"
27" \n"
28"public: \n"
29" const nv::index::xac::Colormap colormap = state.self.get_colormap(); \n"
30" int counter; \n"
31" \n"
32" NV_IDX_DEVICE_INLINE_MEMBER void initialize() \n"
33" { \n"
34" counter = 0; \n"
35" } \n"
36" \n"
37;
38
39static const char* program_end =
40"}; \n"
41;
42
43static const char* color_program =
44" // The user program used for rendering. \n"
45" NV_IDX_DEVICE_INLINE_MEMBER int execute( \n"
46" const Sample_info_self& sample_info, \n"
47" Sample_output& sample_output) \n"
48" { \n"
49" const auto& svol = state.self; \n"
50" const auto svol_sampler = svol.generate_sampler<float>(0u, \n"
51" sample_info.sample_context); \n"
52" const float v = svol_sampler.fetch_sample( \n"
53" sample_info.sample_position_object_space); \n"
54" sample_output.set_color(colormap.lookup(v)); \n"
55" return NV_IDX_PROG_OK; \n"
56" } \n"
57;
58
59static const char* inquire_program =
60" // The user program used for picking. \n"
61" NV_IDX_DEVICE_INLINE_MEMBER int inquire( \n"
62" const Sample_info_self& sample_info, \n"
63" Query_results& query_results) \n"
64" { \n"
65" // write some attribute value of current sample \n"
66" const unsigned int attr_idx = 0u; \n"
67" const auto& svol = state.self; \n"
68" const auto svol_sampler = svol.generate_sampler<float>(attr_idx, \n"
69" sample_info.sample_context); \n"
70" const float v = svol_sampler.fetch_sample( \n"
71" sample_info.sample_position_object_space); \n"
72" query_results.write_value<float>(0u /*user_value_idx*/, v); \n"
73" \n"
74" counter += 1; \n"
75" if (counter & 1) { \n"
76" // write every other counter as user value \n"
77" query_results.write_value<int>(1u /*user_value_idx*/, counter); \n"
78" } \n"
79" \n"
80//" const float3 normal = make_float3(1.f, 0.3f, 0.1f); \n"
81" const float3 normal = sample_info.scene_position; \n"
82" query_results.write_value<float3>(2u /*user_value_idx*/, normal); \n"
83" \n"
84" return NV_IDX_PROG_OK; \n"
85" \n"
86" // we can stop sampling: \n"
87" //return NV_IDX_PROG_TERMINATE_PROGRAM_INSTANCE; \n"
88" } \n"
89;
90
91
93{
94public:
95 const char* name() const { return "Sparse Volume"; }
96
97 void register_classes(nv::index::IIndex* index_interface) const
98 {
99 // empty
100 }
101
102 void add_arguments(std::map<std::string, std::string>& opt_map) const
103 {
104 opt_map["svol.input_file_base_name"] = "";
105 opt_map["svol.input_directory"] = "";
106 opt_map["svol.input_file_extension"] = ".idx_vraw";
107 opt_map["svol.format"] = "uint8";
108 opt_map["svol.size"] = "500 500 1500";
109 opt_map["svol.ordering"] = "xyz";
110 opt_map["svol.filter_mode"] = "nearest";
111 opt_map["svol.preintegration"] = "0";
112 opt_map["svol.lod_render"] = "0";
113 opt_map["svol.show_lod"] = "0";
114 }
115
116 void usage_info(std::ostream& os, const char* indent, std::map<std::string, std::string>& opt_map) const
117 {
118 os << indent << "[-svol.input_directory VOLUME_DIR]\n"
119 << indent << " the directory of the input volume. \n"
120 << indent << " (default: [" << opt_map["svol.input_directory"] << "])\n"
121 << indent << "[-svol.input_file_base_name VOLUME_BASE_NAME]\n"
122 << indent << " the file base-name of the input volume (without extension). \n"
123 << indent << " (default: [" << opt_map["svol.input_file_base_name"] << "])\n"
124 << indent << "[-svol.input_file_extension VOLUME_FILE_EXT]\n"
125 << indent << " the file extension of the input volume. \n"
126 << indent << " (default: [" << opt_map["svol.input_file_extension"] << "])\n"
127 << indent << "[-svol.format VOXEL_FORMAT]\n"
128 << indent << " the voxel format. 'uint8', 'float32' \n"
129 << indent << " (default: [" << opt_map["svol.format"] << "])\n"
130 << indent << "[-svol.size SIZE_X SIZE_Y SIZE_Z]\n"
131 << indent << " the volume size. \n"
132 << indent << " (default: [" << opt_map["svol.size"] << "])\n"
133 << indent << "[-svol.ordering ORDER_NAME]\n"
134 << indent << " the input volume axis-ordering. 'xyz', 'zyx'. \n"
135 << indent << " (default: [" << opt_map["svol.ordering"] << "])\n"
136 << indent << "[-svolfilter_mode FILTER_NAME]\n"
137 << indent << " the string filter_name defines the volume filter. 'nearest', 'trilinear' \n"
138 << indent << " (default: [" << opt_map["svol.filter_mode"] << "])\n"
139 << indent << "[-svol.preintegration BOOL]\n"
140 << indent << " render using the pre-integration-based volume\n"
141 << indent << " rendering technique.\n"
142 << indent << " (default: " << opt_map["svol.preintegration"] << ")\n"
143 << indent << "[-svol.lod_render BOOL]\n"
144 << indent << " render using the level-of-detail volume\n"
145 << indent << " rendering technique.\n"
146 << indent << " (default: " << opt_map["svol.lod_render"] << ")\n"
147 << indent << "[-svol.show_lod BOOL]\n"
148 << indent << " show the level-of-detail structure used but the volume\n"
149 << indent << " rendering technique.\n"
150 << indent << " (default: " << opt_map["svol.lod_render"] << ")\n";
151 }
152
153 const char* get_roi_string() const
154 {
155 return "0 0 0 500 500 500";
156 }
157
159 nv::index::app::IApplication_layer* app_layer,
160 Scene_info& scene_info,
161 const mi::math::Bbox< mi::Float32, 3>& roi_bbox,
162 const mi::neuraylib::Tag& session_tag,
163 std::map<std::string, std::string>& opt_map,
164 mi::neuraylib::IDice_transaction* transaction) const
165 {
166 check_success(session_tag.is_valid());
167 check_success(transaction != 0);
168
169 // Access the session instance from the database.
170 mi::base::Handle<const nv::index::ISession> session(
171 transaction->access<const nv::index::ISession>(session_tag));
172 check_success(session.is_valid_interface());
173
174 // Access (edit mode) the scene instance from the database.
175 mi::base::Handle<nv::index::IScene> scene(
176 transaction->edit<nv::index::IScene>(session->get_scene()));
177 check_success(scene.is_valid_interface());
178
179 // Create a color map using an external utility function.
180 const mi::Sint32 colormap_entry_id = 41; // same as demo application's colormap file 40
181 mi::neuraylib::Tag colormap_tag = create_colormap(colormap_entry_id, scene.get(), transaction);
182 check_success(colormap_tag.is_valid());
183
184 // Sparse volume rendering properties
185 mi::base::Handle<nv::index::ISparse_volume_rendering_properties> svol_render_prop(
186 scene->create_attribute<nv::index::ISparse_volume_rendering_properties>());
187
188 const std::string filter_mode = opt_map["svol.filter_mode"];
189 if (filter_mode == "nearest")
190 {
191 svol_render_prop->set_filter_mode(nv::index::SPARSE_VOLUME_FILTER_NEAREST);
192 }
193 else if (filter_mode == "trilinear")
194 {
195 svol_render_prop->set_filter_mode(nv::index::SPARSE_VOLUME_FILTER_TRILINEAR_POST);
196 }
197
198 const bool use_preint_render = nv::index::app::get_bool(opt_map["svol.preintegration"]);
199 svol_render_prop->set_preintegrated_volume_rendering(use_preint_render);
200
201 svol_render_prop->set_voxel_offsets(mi::math::Vector<mi::Float32, 3>(0.5f, 0.5f, 0.5f));
202
203 const bool use_lod_render = nv::index::app::get_bool(opt_map["svol.lod_render"]);
204 svol_render_prop->set_lod_rendering_enabled(use_lod_render);
205 svol_render_prop->set_lod_pixel_threshold(2.5f);
206
207 const bool show_lod_render = nv::index::app::get_bool(opt_map["svol.show_lod"]);
208 svol_render_prop->set_debug_visualization_option(show_lod_render ? 2u : 0u);
209
210 const mi::neuraylib::Tag svol_render_prop_tag = transaction->store_for_reference_counting(svol_render_prop.get());
211 check_success(svol_render_prop_tag.is_valid());
212
213 const mi::math::Vector<mi::Uint32, 3> svol_input_size = nv::index::app::get_vector_from_string<mi::Uint32,3>(opt_map["svol.size"]);
214 const mi::math::Bbox<mi::Float32, 3> svol_input_bbox = mi::math::Bbox<mi::Float32, 3>(0.0f, 0.0f, 0.0f,
215 static_cast<mi::Float32>(svol_input_size.x),
216 static_cast<mi::Float32>(svol_input_size.y),
217 static_cast<mi::Float32>(svol_input_size.z));
218
219 // sparse volume creation parameter
220 std::map<std::string, std::string> sparse_volume_opt;
221 sparse_volume_opt["type"] = "sparse_volume";
222 sparse_volume_opt["importer"] = "nv::index::plugin::base_importer.Sparse_volume_importer_raw";
223 sparse_volume_opt["input_directory"] = opt_map["svol.input_directory"];
224 sparse_volume_opt["input_file_base_name"] = opt_map["svol.input_file_base_name"];
225 sparse_volume_opt["input_file_extension"] = opt_map["svol.input_file_extension"];
226 sparse_volume_opt["convert_zyx_to_xyz"] = (opt_map["svol.ordering"] != "xyz") ? "true" : "false";
227 sparse_volume_opt["bbox"] = "0 0 0 " + opt_map["svol.size"];
228 sparse_volume_opt["voxel_format"] = opt_map["svol.format"];
229
230 nv::index::IDistributed_data_import_callback* importer_callback =
231 get_importer_from_application_layer(
232 app_layer,
233 "nv::index::plugin::base_importer.Sparse_volume_importer_raw",
234 sparse_volume_opt);
235
236 const mi::math::Matrix<mi::Float32, 4, 4> svol_transform(1.0f);
237 mi::base::Handle<nv::index::ISparse_volume_scene_element> svol_scene_elem(
238 scene->create_sparse_volume(svol_input_bbox, svol_transform, importer_callback, transaction));
239
240 const mi::neuraylib::Tag svol_scene_elem_tag = transaction->store_for_reference_counting(svol_scene_elem.get());
241 check_success(svol_scene_elem_tag.is_valid());
242
243 // create static group node for large data and append the volume
244 mi::base::Handle<nv::index::IStatic_scene_group> static_group(
245 scene->create_scene_group<nv::index::IStatic_scene_group>());
246 check_success(static_group.is_valid_interface());
247
248 // Append the volume sample program before the volume element
249 static_group->append(colormap_tag, transaction);
250 static_group->append(svol_render_prop_tag, transaction);
251
252 const int user_prg = nv::index::app::get_sint32(opt_map["user_program_mode"]);
253 if (user_prg) {
254 mi::base::Handle<nv::index::IRendering_kernel_program> sampling_program(
255 scene->create_attribute<nv::index::IVolume_sample_program>());
256 check_success(sampling_program.is_valid_interface());
257
258 std::string program_src = scene_info.rtc_program_source;
259 if (program_src.empty()) {
260 program_src = program_begin;
261 program_src.append(color_program);
262 if (user_prg > 1) {
263 program_src.append(inquire_program);
264 }
265 program_src.append(program_end);
266 scene_info.rtc_program_source = program_src;
267 }
268 sampling_program->set_program_source(program_src.c_str());
269 mi::neuraylib::Tag prog_tag = transaction->store_for_reference_counting(sampling_program.get());
270 check_success(prog_tag.is_valid());
271
272 static_group->append(prog_tag, transaction);
273 }
274
275 static_group->append(svol_scene_elem_tag, transaction);
276
277 mi::neuraylib::Tag static_group_tag =
278 transaction->store_for_reference_counting(static_group.get());
279 check_success(static_group_tag.is_valid());
280
281 // append the static scene group to the hierachical scene description.
282 scene->append(static_group_tag, transaction);
283
284 std::stringstream sstr;
285 sstr << "Created a LOD volume: size = ["
286 << svol_input_size.x << " " << svol_input_size.y << " " << svol_input_size.z
287 << "], tag = " << svol_scene_elem_tag.id;
288 INFO_LOG << sstr.str();
289
290 return true;
291 }
292
294 const mi::neuraylib::Tag& camera_tag,
295 mi::neuraylib::IDice_transaction* transaction) const
296 {
297 check_success(camera_tag.is_valid());
298 check_success(transaction != 0);
299 mi::base::Handle<nv::index::IPerspective_camera>
300 cam(transaction->edit<nv::index::IPerspective_camera>(camera_tag));
301 check_success(cam.is_valid_interface());
302
303 const mi::math::Vector< mi::Float32, 3 > from(-0.340944170951843f, -0.170461803674698f, 0.0734406113624573f);
304 const mi::math::Vector< mi::Float32, 3 > to (0.702501833438873f, 0.492376863956451f, -0.513864099979401f);
305 const mi::math::Vector< mi::Float32, 3 > up (0.371015131473541f, 0.362788468599319f, 0.854828774929047f);
306 mi::math::Vector<mi::Float32, 3> viewdir = to;// - from;
307 viewdir.normalize();
308
309 cam->set(from, viewdir, up);
310 cam->set_aperture(0.033f);
311 cam->set_aspect(1.0f);
312 cam->set_focal(0.03f);
313 cam->set_clip_min(0.001f);
314 cam->set_clip_max(400.0f);
315 }
316
317 virtual float get_scene_scaling() const { return 0.001f; }
318
319};
320
321
322}//sparse_volume
323
324#endif
325
bool create_scene(nv::index::app::IApplication_layer *app_layer, Scene_info &scene_info, const mi::math::Bbox< mi::Float32, 3 > &roi_bbox, const mi::neuraylib::Tag &session_tag, std::map< std::string, std::string > &opt_map, mi::neuraylib::IDice_transaction *transaction) const
void setup_camera(const mi::neuraylib::Tag &camera_tag, mi::neuraylib::IDice_transaction *transaction) const
void register_classes(nv::index::IIndex *index_interface) const
void add_arguments(std::map< std::string, std::string > &opt_map) const
void usage_info(std::ostream &os, const char *indent, std::map< std::string, std::string > &opt_map) const
static const char * inquire_program
static const char * program_begin
static const char * program_end
static const char * color_program
Scene setup interface for ray sampling example.
#define check_success(expr)