diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 7b5d3fd15..7376a9c3f 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -56,3 +56,8 @@ v1.4.0 - Infinigen Indoors - Add library of procedural generators for indoor objects & materials - Add indoor scene generation system, including constraint language and solver - Add HelloRoom.md & ExportingToSimulators.md + +v1.4.1 +- @David-Yan1 fix placeholder & ocmesher submodule version +- @lahavlipson fix bug in surface normals of wall meshes +- @araistrick bugfix example commands & other typos \ No newline at end of file diff --git a/docs/ConfiguringInfinigen.md b/docs/ConfiguringInfinigen.md index 1acc24d04..3561f1462 100644 --- a/docs/ConfiguringInfinigen.md +++ b/docs/ConfiguringInfinigen.md @@ -172,7 +172,7 @@ We recommend this command as a starting point for generating high quality videos ```` python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ - --cleanup big_files --warmup_sec 60000 --config video high_quality_terrain + --cleanup big_files --warmup_sec 60000 --config trailer_video high_quality_terrain ```` #### Creating large-scale stereo datasets @@ -218,7 +218,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_ ``` python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ - --cleanup big_files --warmup_sec 30000 --config video high_quality_terrain \ + --cleanup big_files --warmup_sec 30000 --config trailer_video high_quality_terrain \ --overrides camera.camera_pose_proposal.altitude=["uniform", 20, 30] ``` @@ -228,7 +228,7 @@ python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_ ``` python -m infinigen.datagen.manage_jobs --output_folder outputs/my_videos --num_scenes 500 \ --pipeline_config slurm monocular_video cuda_terrain opengl_gt \ - --cleanup big_files --warmup_sec 30000 --config video high_quality_terrain \ + --cleanup big_files --warmup_sec 30000 --config trailer_video high_quality_terrain \ --pipeline_overrides iterate_scene_tasks.frame_range=[1,25] ``` diff --git a/docs/HelloRoom.md b/docs/HelloRoom.md index bbace7ba0..244ed1cf6 100644 --- a/docs/HelloRoom.md +++ b/docs/HelloRoom.md @@ -76,12 +76,12 @@ We also provide an OpenGL-based ground truth extractor which offers additional g To generate a single scene in one command, you can run the following: ```bash -screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1000 --pipeline_configs local_256.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True +screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1000 --pipeline_configs local_256GB.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True ``` To create a large dataset of many random rooms, we recommend: ```bash -screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1000 --pipeline_configs local_256.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True +screen python -m infinigen.datagen.manage_jobs --output_folder outputs/my_dataset --num_scenes 1000 --pipeline_configs local_256GB.gin monocular.gin blender_gt.gin indoor_background_configs.gin --configs singleroom.gin --pipeline_overrides get_cmd.driver_script='infinigen_examples.generate_indoors' manage_datagen_jobs.num_concurrent=16 --overrides compose_indoors.restrict_single_supported_roomtype=True ``` You can inspect `outputs/my_dataset/SEED/` to see the running logs of the subprocesses and output results. diff --git a/docs/Installation.md b/docs/Installation.md index d5152facd..dc18dbc01 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -98,7 +98,7 @@ INFINIGEN_MINIMAL_INSTALL=True bash scripts/install/interactive_blender.sh bash scripts/install/interactive_blender.sh # Enable OpenGL GT -INFINIGEN_INSTALL_CUSTOMGT=True scripts/install/interactive_blender.sh +INFINIGEN_INSTALL_CUSTOMGT=True bash scripts/install/interactive_blender.sh ``` :exclamation: If you encounter any issues with the above, please add ` > logs.txt 2>&1` to the end of your command and run again, then provide the resulting logs.txt file as an attachment when making a Github Issue. diff --git a/infinigen/OcMesher b/infinigen/OcMesher index 4e5fad7b0..d3d1441ab 160000 --- a/infinigen/OcMesher +++ b/infinigen/OcMesher @@ -1 +1 @@ -Subproject commit 4e5fad7b0dd495444acf3ab2037bf08dd4b5d676 +Subproject commit d3d1441ab57c48db3ec40c621fc3d0c323579e8a diff --git a/infinigen/__init__.py b/infinigen/__init__.py index da6aaa4ca..9af1e6f0d 100644 --- a/infinigen/__init__.py +++ b/infinigen/__init__.py @@ -1,3 +1,3 @@ import logging -__version__ = "1.4.0" +__version__ = "1.4.1" diff --git a/infinigen/assets/shelves/kitchen_cabinet.py b/infinigen/assets/shelves/kitchen_cabinet.py index 6dea292a0..c9e9d1335 100644 --- a/infinigen/assets/shelves/kitchen_cabinet.py +++ b/infinigen/assets/shelves/kitchen_cabinet.py @@ -325,4 +325,4 @@ def sample_params(self): def create_placeholder(self, **kwargs) -> bpy.types.Object: x,y,z = self.dimensions - return new_bbox(-x/2 * 1.2, x/2 * 1.2, 0, y * 1.1, 0, (z + 0.06) * 1.03) + return new_bbox(-x/2 * 1.2, x/2 * 1.2, 0, y * 1.1, 0, (z + 0.06)) diff --git a/infinigen/assets/shelves/kitchen_space.py b/infinigen/assets/shelves/kitchen_space.py index 66c65efdb..4f4136ca5 100644 --- a/infinigen/assets/shelves/kitchen_space.py +++ b/infinigen/assets/shelves/kitchen_space.py @@ -145,7 +145,7 @@ def sample_parameters(self, dimensions): def create_placeholder(self, **kwargs) -> bpy.types.Object: x, y, z = self.dimensions - box = new_bbox(-x/2 * 1.08, x/2 * 1.08, 0, y, 0, self.cabinet_bottom_height + 0.13) + box = new_bbox(-x/2 * 1.08, x/2 * 1.08, 0, y, 0, self.cabinet_bottom_height + 0.095) surface.add_geomod(box, nodegroup_tag_cube, apply=True) if not self.island: diff --git a/infinigen/core/execute_tasks.py b/infinigen/core/execute_tasks.py index 0b082aaa9..d4f8b4b7d 100644 --- a/infinigen/core/execute_tasks.py +++ b/infinigen/core/execute_tasks.py @@ -81,7 +81,7 @@ pipeline, exporting ) -from infinigen.tools.export import export_scene +from infinigen.tools.export import export_scene, triangulate_meshes from infinigen.core.util.math import FixedSeed, int_hash from infinigen.core.util.logging import Timer, save_polycounts, create_text_file from infinigen.core.util.pipeline import RandomStageExecutor @@ -228,29 +228,14 @@ def render(scene_seed, output_folder, camera_id, render_image_func=render_image, with Timer('Render Frames'): render_image_func(frames_folder=Path(output_folder), camera_id=camera_id) -def triangulate_meshes(): - for obj in bpy.context.scene.objects: - if obj.type == 'MESH': - view_state = obj.hide_viewport - obj.hide_viewport = False - bpy.context.view_layer.objects.active = obj - obj.select_set(True) - bpy.ops.object.mode_set(mode='EDIT') - bpy.ops.mesh.select_all(action='SELECT') - logging.info(f"Triangulating {obj}") - bpy.ops.mesh.quads_convert_to_tris() - bpy.ops.object.mode_set(mode='OBJECT') - obj.select_set(False) - obj.hide_viewport = view_state - @gin.configurable def save_meshes(scene_seed, output_folder, frame_range, resample_idx=False): if resample_idx is not None and resample_idx > 0: resample_scene(int_hash((scene_seed, resample_idx))) - + triangulate_meshes() - + for obj in bpy.data.objects: obj.hide_viewport = obj.hide_render diff --git a/infinigen/core/placement/camera.py b/infinigen/core/placement/camera.py index ea92abba7..d641adbbf 100644 --- a/infinigen/core/placement/camera.py +++ b/infinigen/core/placement/camera.py @@ -667,7 +667,7 @@ def save_camera_parameters(camera_ids, output_folder, frame, use_dof=False): bpy.context.scene.render.resolution_y, bpy.context.scene.render.resolution_x )) - T = np.asarray(camera_obj.matrix_world, dtype=np.float64) @ np.diag((1.,-1.,-1.,1.)) + T = np.asarray(camera_obj.matrix_world, dtype=np.float64) @ np.diag((1.,-1.,-1.,1.)) # Y down Z forward (aka opencv) np.savez(output_file, K=np.asarray(K, dtype=np.float64), T=T, HW=height_width) if __name__ == "__main__": diff --git a/infinigen/datagen/customgt/camera_view.cpp b/infinigen/datagen/customgt/camera_view.cpp index 278de298d..6a050ae44 100644 --- a/infinigen/datagen/customgt/camera_view.cpp +++ b/infinigen/datagen/customgt/camera_view.cpp @@ -51,8 +51,8 @@ const Matrix4f FLIP_Y_Z = Eigen::Vector4f({1,-1,-1,1}).asDiagonal(); template Eigen::Matrix load_matrix(const npz &camview, const std::string &key){ - const auto blender_camera_pose_data = camview.read_data(key); - const auto tmp = Eigen::Matrix(blender_camera_pose_data.data()); + const auto opengl_camera_pose_data = camview.read_data(key); + const auto tmp = Eigen::Matrix(opengl_camera_pose_data.data()); return tmp.transpose().template cast(); } @@ -60,25 +60,22 @@ CameraView::CameraView(const std::string fstr, const fs::path input_dir, const i { // Current Frame const fs::path current_frame_cam_path = input_dir / ("camview_"+frame_string+".npz"); - const npz current_camview(current_frame_cam_path); - const Matrix4f blender_camera_pose = load_matrix<4, 4>(current_camview, "T") * FLIP_Y_Z; // TODO REMOVE - current_frame_view_matrix = glm::make_mat4(Matrix4f(blender_camera_pose.inverse()).data()); + const npz current_camview(current_frame_cam_path); // Y down Z forward (aka opencv) + const Matrix4f opengl_camera_pose = load_matrix<4, 4>(current_camview, "T") * FLIP_Y_Z; + current_frame_view_matrix = glm::make_mat4(Matrix4f(opengl_camera_pose.inverse()).data()); // Y up Z back (aka blender/opengl) // Next Frame const fs::path next_frame_cam_path = increment_int_substr({"frame_([0-9]{4})", "camview_[0-9]+_[0-9]+_([0-9]{4})"}, current_frame_cam_path); const npz next_camview(next_frame_cam_path); - const Matrix4f next_blender_camera_pose = load_matrix<4, 4>(next_camview, "T") * FLIP_Y_Z; // TODO REMOVE - next_frame_view_matrix = glm::make_mat4(Matrix4f(next_blender_camera_pose.inverse()).data()); + const Matrix4f next_opengl_camera_pose = load_matrix<4, 4>(next_camview, "T") * FLIP_Y_Z; + next_frame_view_matrix = glm::make_mat4(Matrix4f(next_opengl_camera_pose.inverse()).data()); - // Set Camera Position - position = glm::make_vec3(blender_camera_pose.block<3, 1>(0, 3).data()); - - // Set WC -> Img Transformation + // Set CC -> Img Transformation const Matrix3f K_mat3x3 = load_matrix<3, 3>(current_camview, "K"); Matrix4f K_mat = Matrix4f::Identity(); buffer_over_image = 2; K_mat.block<2,3>(0, 0) = buffer_over_image * K_mat3x3.block<2,3>(0, 0); - wc2img = glm::make_mat4(Matrix4f(K_mat * FLIP_Y_Z * blender_camera_pose.inverse()).data()); + cc2img = glm::make_mat4(K_mat.data()); // maps opencv wc to img fx = K_mat(0,0); fy = K_mat(1,1); @@ -107,10 +104,9 @@ CameraView::CameraView(const std::string fstr, const fs::path input_dir, const i void CameraView::activateShader(Shader &shader) const { shader.use(); shader.setMat4("projection", projection); - shader.setMat4("view", current_frame_view_matrix); + shader.setMat4("view", current_frame_view_matrix);// Y up Z back (aka blender/opengl) shader.setMat4("viewNext", next_frame_view_matrix); - shader.setMat4("wc2img", wc2img); - shader.setVec3("cameraPos", position); + shader.setMat4("cc2img", cc2img); } Tensor CameraView::project(const Tensor &cam_coords) const { diff --git a/infinigen/datagen/customgt/camera_view.hpp b/infinigen/datagen/customgt/camera_view.hpp index f120cb4be..801ab1a27 100644 --- a/infinigen/datagen/customgt/camera_view.hpp +++ b/infinigen/datagen/customgt/camera_view.hpp @@ -19,7 +19,7 @@ class CameraView // float calc_resolution_scale(const npz &camview) const; unsigned int create_framebuffer(); - glm::mat4 wc2img, projection, current_frame_view_matrix, next_frame_view_matrix; + glm::mat4 cc2img, projection, current_frame_view_matrix, next_frame_view_matrix; glm::vec3 position; double fx, fy, cx, cy; float buffer_over_image; // should be >= 1 diff --git a/infinigen/datagen/customgt/glsl/spine.geom b/infinigen/datagen/customgt/glsl/spine.geom index 994d9d6ad..5fdc511cd 100644 --- a/infinigen/datagen/customgt/glsl/spine.geom +++ b/infinigen/datagen/customgt/glsl/spine.geom @@ -3,10 +3,7 @@ layout (lines_adjacency) in; layout (line_strip, max_vertices = 2) out; -uniform mat4 wc2img; - -out vec3 normal; -out vec3 interp_pos_wc; +uniform mat4 cc2img; in VS_OUT { vec3 pos_wc; @@ -19,7 +16,7 @@ in VS_OUT { } gs_in[]; vec2 proj(vec3 v){ - vec4 h = wc2img * vec4(v, 1.0); + vec4 h = cc2img * vec4(v, 1.0); return vec2(h[0] / abs(h[2]), h[1] / abs(h[2])); } @@ -38,10 +35,10 @@ bool is_frontfacing(vec3 v1, vec3 v2, vec3 v3){ } void main() { - vec3 v1 = gs_in[0].pos_wc; // A - vec3 v2 = gs_in[1].pos_wc; // B - vec3 v3 = gs_in[2].pos_wc; - vec3 v4 = gs_in[3].pos_wc; + vec3 v1 = gs_in[0].pos_cc; // A + vec3 v2 = gs_in[1].pos_cc; // B + vec3 v3 = gs_in[2].pos_cc; + vec3 v4 = gs_in[3].pos_cc; bool draw_boundary = (is_frontfacing(v1, v2, v3) != is_frontfacing(v2, v1, v4)); diff --git a/infinigen/datagen/customgt/glsl/wings.frag b/infinigen/datagen/customgt/glsl/wings.frag index a1b5ce68c..633382369 100644 --- a/infinigen/datagen/customgt/glsl/wings.frag +++ b/infinigen/datagen/customgt/glsl/wings.frag @@ -2,14 +2,10 @@ #version 440 core -uniform vec3 cameraPos; - uniform int object_index; -in vec3 interp_pos_wc; in vec3 interp_pos_cc; in vec3 interp_pos_cc_next; -in vec3 normal; in vec3 cc_normal; in float tri_area; in float px_area; @@ -27,6 +23,12 @@ layout (location = 5) out ivec4 tag_segmentation; layout (location = 6) out ivec4 instance_segmentation; layout (location = 7) out vec4 geo_normal; +mat4 cv_to_sn_convention = mat4( + vec4(1., 0., 0., 0.), + vec4(0., -1., 0., 0.), + vec4(0., 0., -1., 0.), + vec4(0., 0., 0., 1.)); + /* // No longer used layout (location = 3) out vec4 faceSize; @@ -34,29 +36,29 @@ layout (location = 4) out vec4 pixelSize; */ void main() { - vec3 updated_normal = normal; - if (dot((cameraPos - interp_pos_wc), updated_normal) < 0){ - updated_normal = updated_normal * -1; - } - - rasterized_cc = vec4(interp_pos_cc, 1.0); - if (has_flow > 0.99) - next_rasterized_cc = vec4(interp_pos_cc_next, 1.0); - else - next_rasterized_cc = vec4(0.0, 0.0, -1.0, 1.0); - tag_segmentation = ivec4(tag, 0); - instance_segmentation = ivec4(instance_id[0], instance_id[1], instance_id[2], 1); - object_segmentation = ivec4(object_index, 0, 0, 1); - - geo_normal = vec4(normalize(-cc_normal), 1.0); - rasterized_occ_bounds = ivec4(0, 0, 0, 1); - - rasterized_face_id = vec4(face_id, 1.0); - - /* - // No longer used - faceSize = vec4(tri_area, 0.0, 0.0, 1.0); - pixelSize = vec4(px_area, 0.0, 0.0, 1.0); - */ + + rasterized_cc = vec4(interp_pos_cc, 1.0); + if (has_flow > 0.99) + next_rasterized_cc = vec4(interp_pos_cc_next, 1.0); + else + next_rasterized_cc = vec4(0.0, 0.0, -1.0, 1.0); + tag_segmentation = ivec4(tag, 0); + instance_segmentation = ivec4(instance_id[0], instance_id[1], instance_id[2], 1); + object_segmentation = ivec4(object_index, 0, 0, 1); + + if (dot(interp_pos_cc, cc_normal) < 0){ + geo_normal = cv_to_sn_convention * vec4(cc_normal, 1.0); + } else { + geo_normal = cv_to_sn_convention * vec4(-cc_normal, 1.0); + } + rasterized_occ_bounds = ivec4(0, 0, 0, 1); + + rasterized_face_id = vec4(face_id, 1.0); + + /* + // No longer used + faceSize = vec4(tri_area, 0.0, 0.0, 1.0); + pixelSize = vec4(px_area, 0.0, 0.0, 1.0); + */ } diff --git a/infinigen/datagen/customgt/glsl/wings.geom b/infinigen/datagen/customgt/glsl/wings.geom index 84ba3b52c..b7b150983 100644 --- a/infinigen/datagen/customgt/glsl/wings.geom +++ b/infinigen/datagen/customgt/glsl/wings.geom @@ -3,9 +3,7 @@ layout (lines_adjacency) in; layout (triangle_strip, max_vertices = 6) out; -out vec3 normal; out vec3 cc_normal; -out vec3 interp_pos_wc; out vec3 interp_pos_cc; out vec3 interp_pos_cc_next; out float tri_area; @@ -15,7 +13,7 @@ out ivec3 face_id; out ivec3 instance_id; out ivec3 tag; -uniform mat4 wc2img; +uniform mat4 cc2img; in VS_OUT { vec3 pos_wc; @@ -27,30 +25,14 @@ in VS_OUT { int tag; } gs_in[]; -vec2 proj(vec3 inp){ +vec2 proj(vec3 inp){ // expecting that inp is cam coordinates (ocv) vec4 tmp = vec4(inp, 1.0); - vec4 tmp2 = wc2img * tmp; + vec4 tmp2 = cc2img * tmp; return vec2(tmp2[0]/tmp2[2], tmp2[1]/tmp2[2]); } -bool is_frontfacing(vec3 v1, vec3 v2, vec3 v3){ - vec2 uv1 = proj(v1); - vec2 uv2 = proj(v2); - vec2 uv3 = proj(v3); - - vec2 a = (uv2-uv1); - vec2 b = (uv3-uv2); - float winding = a.x * b.y - b.x * a.y; - return winding > 0; -} - -vec3 to_z_B_y_U(vec3 v){ - return vec3(v.x, -v.y, -v.z); -} - void save(int i){ gl_Position = gl_in[i].gl_Position; - interp_pos_wc = gs_in[i].pos_wc; interp_pos_cc = gs_in[i].pos_cc; interp_pos_cc_next = gs_in[i].pos_cc_next; EmitVertex(); @@ -63,10 +45,10 @@ void main() { has_flow = float(gs_in[0].has_flow && gs_in[1].has_flow && gs_in[2].has_flow && gs_in[3].has_flow); - vec3 v1 = gs_in[0].pos_wc; - vec3 v2 = gs_in[1].pos_wc; - vec3 v3 = gs_in[2].pos_wc; - vec3 v4 = gs_in[3].pos_wc; + vec3 v1 = gs_in[0].pos_cc; // expecting that cc is in opencv [Y down Z forward] + vec3 v2 = gs_in[1].pos_cc; + vec3 v3 = gs_in[2].pos_cc; + vec3 v4 = gs_in[3].pos_cc; vec2 p1 = proj(v1); vec2 p2 = proj(v2); @@ -77,28 +59,22 @@ void main() { vec2 c = p1 - p4; // First triangle - cc_normal = normalize(cross(to_z_B_y_U(gs_in[0].pos_cc-gs_in[1].pos_cc), to_z_B_y_U(gs_in[0].pos_cc-gs_in[2].pos_cc))); tri_area = length(cross(v1 - v2, v1 - v3))/2; - normal = normalize(cross(v1-v2, v1-v3)); px_area = abs(a.x*b.y - a.y*b.x)/2; tag = ivec3(gs_in[0].tag, gs_in[1].tag, gs_in[2].tag); - if (!is_frontfacing(v1, v2, v3)) - cc_normal = -cc_normal; + cc_normal = normalize(cross(v1 - v2, v1 - v3)); save(0); save(1); save(2); // Second triangle - cc_normal = normalize(cross(to_z_B_y_U(gs_in[0].pos_cc-gs_in[1].pos_cc), to_z_B_y_U(gs_in[0].pos_cc-gs_in[3].pos_cc))); tri_area = length(cross(v1 - v2, v1 - v4))/2; - normal = normalize(cross(v1-v2, v1-v4)); px_area = abs(a.x*c.y - a.y*c.x)/2; tag = ivec3(gs_in[0].tag, gs_in[1].tag, gs_in[3].tag); - if (!is_frontfacing(v1, v2, v4)) - cc_normal = -cc_normal; + cc_normal = normalize(cross(v1 - v2, v1 - v4)); save(0); save(1); diff --git a/infinigen/datagen/customgt/glsl/wings.vert b/infinigen/datagen/customgt/glsl/wings.vert index 28b7f5630..76e5950c2 100644 --- a/infinigen/datagen/customgt/glsl/wings.vert +++ b/infinigen/datagen/customgt/glsl/wings.vert @@ -15,39 +15,42 @@ uniform mat4 view; uniform mat4 viewNext; mat4 opengl_to_cv = mat4( - vec4(1., 0., 0., 0.), - vec4(0., -1., 0., 0.), - vec4(0., 0., -1., 0.), - vec4(0., 0., 0., 1.)); + vec4(1., 0., 0., 0.), + vec4(0., -1., 0., 0.), + vec4(0., 0., -1., 0.), + vec4(0., 0., 0., 1.)); out VS_OUT { vec3 pos_wc; - vec3 pos_cc; - vec3 pos_cc_next; - bool has_flow; - int vertex_id; - ivec3 instance_id; - int tag; + vec3 pos_cc; + vec3 pos_cc_next; + bool has_flow; + int vertex_id; + ivec3 instance_id; + int tag; } vs_out; +vec3 pdiv(vec4 v){ + return v.xyz / v.w; +} + void main() { - vec4 pos_wc = instanceMatrix * vec4(aPos, 1.0); - vs_out.pos_wc = vec3(pos_wc); + vec4 pos_wc_opengl = instanceMatrix * vec4(aPos, 1.0); + vs_out.pos_wc = pdiv(opengl_to_cv * pos_wc_opengl); // expecting that wc is in opencv - vec4 pos_cc_opengl = view * pos_wc; - vec4 pos_cc = opengl_to_cv * pos_cc_opengl; - vs_out.pos_cc = pos_cc.xyz / pos_cc.w; + vec4 pos_cc_opengl = view * pos_wc_opengl; + vs_out.pos_cc = pdiv(opengl_to_cv * pos_cc_opengl); - vs_out.has_flow = (abs(instanceMatrixNext[3][3]) > 1e-4); + vs_out.has_flow = (abs(instanceMatrixNext[3][3]) > 1e-4); - vec4 pos_wc_next = instanceMatrixNext * vec4(aPos_next, 1.0); - vec4 pos_cc_next = opengl_to_cv * viewNext * pos_wc_next; - vs_out.pos_cc_next = pos_cc_next.xyz / pos_cc_next.w; + vec4 pos_wc_next = instanceMatrixNext * vec4(aPos_next, 1.0); + vec4 pos_cc_next = opengl_to_cv * viewNext * pos_wc_next; + vs_out.pos_cc_next = pdiv(pos_cc_next); - gl_Position = projection * pos_cc_opengl; + gl_Position = projection * pos_cc_opengl; - vs_out.vertex_id = gl_VertexID; - vs_out.instance_id = instance_id; + vs_out.vertex_id = gl_VertexID; + vs_out.instance_id = instance_id; - vs_out.tag = tag; + vs_out.tag = tag; } diff --git a/infinigen/datagen/customgt/main.cpp b/infinigen/datagen/customgt/main.cpp index 9f033384f..7079cfb84 100644 --- a/infinigen/datagen/customgt/main.cpp +++ b/infinigen/datagen/customgt/main.cpp @@ -32,7 +32,7 @@ #include "utils.hpp" #include "io.hpp" -#define VERSION "1.43" +#define VERSION "1.44" using std::cout, std::cerr, std::endl; @@ -111,7 +111,7 @@ int main(int argc, char *argv[]) { const fs::path input_dir(program.get("--input_dir")); const fs::path output_dir(program.get("--output_dir")); if (input_dir.stem().string() == "x") - exit(174); // Custom error code for checking if EGL is working + exit(0); // Custom error code for checking if EGL is working assert_exists(input_dir); if (!fs::exists(output_dir)) fs::create_directory(output_dir); diff --git a/infinigen/tools/export.py b/infinigen/tools/export.py index 5b04bf8b9..c6ae6080b 100644 --- a/infinigen/tools/export.py +++ b/infinigen/tools/export.py @@ -519,7 +519,7 @@ def skipBake(obj): return False def triangulate_meshes(): - logging.info("Triangulating Meshes") + logging.debug("Triangulating Meshes") for obj in bpy.context.scene.objects: if obj.type == 'MESH': view_state = obj.hide_viewport @@ -528,7 +528,7 @@ def triangulate_meshes(): obj.select_set(True) bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_all(action='SELECT') - logging.info(f"Triangulating {obj}") + logging.debug(f"Triangulating {obj}") bpy.ops.mesh.quads_convert_to_tris() bpy.ops.object.mode_set(mode='OBJECT') obj.select_set(False) diff --git a/infinigen_examples/configs_indoor/base.gin b/infinigen_examples/configs_indoor/base.gin index bfb6cff9b..b3130c227 100644 --- a/infinigen_examples/configs_indoor/base.gin +++ b/infinigen_examples/configs_indoor/base.gin @@ -50,7 +50,7 @@ nishita_lighting.sun_elevation = ("clip_gaussian", 40, 25, 6, 70) compose_indoors.lights_off_chance=0.2 compose_indoors.skirting_floor_chance=0.7 -compose_indoors.skirting_ceiling_chance=0.2 +compose_indoors.skirting_ceiling_chance=0.0 # should be 0.2 when implementation fixed compose_indoors.near_distance = 60 diff --git a/infinigen_examples/configs_nature/scene_types_fluidsim/simulated_river.gin b/infinigen_examples/configs_nature/scene_types_fluidsim/simulated_river.gin index f38852e70..12dc326e2 100644 --- a/infinigen_examples/configs_nature/scene_types_fluidsim/simulated_river.gin +++ b/infinigen_examples/configs_nature/scene_types_fluidsim/simulated_river.gin @@ -20,10 +20,10 @@ camera_pose_proposal.pitch = -120 camera_pose_proposal.roll = 180 camera_pose_proposal.yaw = -178 -assets.boulder.create_placeholder.boulder_scale = 1 +boulder.create_placeholder.boulder_scale = 1 LandTiles.land_process = None -core.render.hide_water = True +render.hide_water = True compute_base_views.min_candidates_ratio = 1 walk_same_altitude.ignore_missed_rays = True diff --git a/scripts/install/compile_opengl.sh b/scripts/install/compile_opengl.sh index 0f2158205..be5d62237 100644 --- a/scripts/install/compile_opengl.sh +++ b/scripts/install/compile_opengl.sh @@ -7,7 +7,7 @@ cd ./infinigen/datagen/customgt cmake -S . -Bbuild -DCMAKE_C_COMPILER=/usr/bin/gcc -DCMAKE_BUILD_TYPE=Release cmake --build build --target all ./build/customgt -in x -out x --frame 0 -if [ $? -eq 174 ]; then +if [ $? -eq 0 ]; then echo "OpenGL/EGL ground truth is working." else echo "WARNING: OpenGL/EGL is not supported on this machine. If you are running from a cluster head-node, this is likely not an issue."