[Runtime Bug]: Meshes loose their mesh normal information, appearing angular / unsmoothed
Describe the bug
During runtime, non-replaced meshes appear to be lacking mesh normals, causing them to appear angular / jagged.
On the left we have the toolkit, on the right is the same mesh imported to blender. In-game with remix active, the mesh looks angular like the one in the toolkit. The default mesh in the original game has smooth normals.
After importing the .usd model from a capture to blender, re-exporting it and using it as a replacement mesh, the in-game (runtime) mesh looked correct with smooth normals.
I suspect the runtime non-replaced meshes do not have normal information.
The first text file usda image is a captured usd converted to usda, otherwise untouched. It does not appear to have any information in the normals section. The second one is after importing the captured mesh to blender and back out again - it appears to have normals information.
How do you reproduce the bug?
Load into game without replacement meshes, observe meshes.
What is the expected behavior?
The runtime should render meshes with the same mesh normals / smoothing as the original mesh, or with a default or configurable smoothing option.
Version
1.0.0
Not sure if helpful, but a temporary (rather cumbersome and hdd space-heavy) brute force workaround was to generate the normals and jam them into the usd files, and then replace every single mesh with these updated ones. This uses a ton of hdd space and makes loading quite slow. This script kinda does that.
Python script to generate normals
BACK UP EVERYTHING and then put script in the captures/meshes folder and run.
import os
import shutil
from pxr import Usd, UsdGeom, Vt, Gf
import numpy as np
from concurrent.futures import ProcessPoolExecutor, as_completed
import multiprocessing
import time
import logging
logging.basicConfig(
filename='usd_processing.log',
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
def calculate_face_normals(points, face_indices, counts):
"""Calculate face normals for each triangle."""
face_normals = []
current_index = 0
try:
for face_count in counts:
if current_index + face_count > len(face_indices):
raise IndexError(f"Face indices exceed buffer: {current_index + face_count} > {len(face_indices)}")
# Get vertices for this face
vert_indices = face_indices[current_index:current_index + face_count]
v0 = Gf.Vec3f(points[vert_indices[0]])
v1 = Gf.Vec3f(points[vert_indices[1]])
v2 = Gf.Vec3f(points[vert_indices[2]])
# Calculate normal for triangle
edge1 = v1 - v0
edge2 = v2 - v0
normal = Gf.Cross(edge1, edge2)
# Handle degenerate triangles
if normal.GetLength() < 1e-6:
normal = Gf.Vec3f(0, 0, 1)
else:
normal.Normalize()
face_normals.append(normal)
current_index += face_count
except Exception as e:
raise RuntimeError(f"Error calculating face normals: {str(e)}")
return face_normals
def calculate_smooth_normals(points, face_indices, counts, face_normals):
"""Calculate smooth normals for each vertex of each face."""
smooth_normals = []
vertex_normals = [[] for _ in range(len(points))]
current_index = 0
try:
# First accumulate face normals for each vertex
for face_idx, count in enumerate(counts):
face_normal = face_normals[face_idx]
for i in range(count):
vert_idx = face_indices[current_index + i]
vertex_normals[vert_idx].append(face_normal)
current_index += count
# Now calculate smooth normals for each vertex in each face
current_index = 0
for face_idx, count in enumerate(counts):
for i in range(count):
vert_idx = face_indices[current_index + i]
normals = vertex_normals[vert_idx]
# Calculate weighted average normal
avg_normal = Gf.Vec3f(0, 0, 0)
for normal in normals:
avg_normal += normal
if len(normals) > 0:
avg_normal /= len(normals)
if avg_normal.GetLength() < 1e-6:
avg_normal = Gf.Vec3f(0, 0, 1)
else:
avg_normal.Normalize()
else:
avg_normal = Gf.Vec3f(0, 0, 1)
smooth_normals.append(avg_normal)
current_index += count
except Exception as e:
raise RuntimeError(f"Error calculating smooth normals: {str(e)}")
return smooth_normals
def process_mesh(stage, mesh_path):
"""Process a single mesh to smooth its normals."""
try:
mesh = UsdGeom.Mesh.Get(stage, mesh_path)
if not mesh:
return False
points = mesh.GetPointsAttr().Get()
counts = mesh.GetFaceVertexCountsAttr().Get()
indices = mesh.GetFaceVertexIndicesAttr().Get()
if not all(x is not None for x in [points, counts, indices]):
logging.warning(f"Missing required attributes in {mesh_path}")
return False
# Calculate face normals then smooth normals
face_normals = calculate_face_normals(points, indices, counts)
smooth_normals = calculate_smooth_normals(points, indices, counts, face_normals)
# Update the normals with faceVarying interpolation
normals_attr = mesh.GetNormalsAttr()
normals_attr.Set(smooth_normals)
mesh.SetNormalsInterpolation("faceVarying")
return True
except Exception as e:
logging.error(f"Error processing mesh {mesh_path}: {str(e)}")
return False
def process_usd_file(input_path, output_dir):
"""Process a single USD file."""
try:
# Setup paths
filename = os.path.basename(input_path)
output_path = os.path.join(output_dir, filename)
# Create output directory
os.makedirs(output_dir, exist_ok=True)
# Copy input file to output
shutil.copy2(input_path, output_path)
# Open stage for editing
stage = Usd.Stage.Open(output_path)
if not stage:
logging.error(f"Failed to open stage: {output_path}")
return (filename, 0, 0)
# Process all meshes
meshes_processed = 0
meshes_succeeded = 0
for prim in stage.Traverse():
if prim.IsA(UsdGeom.Mesh):
meshes_processed += 1
if process_mesh(stage, prim.GetPath()):
meshes_succeeded += 1
stage.Save()
return (filename, meshes_succeeded, meshes_processed)
except Exception as e:
logging.error(f"Error processing {input_path}: {str(e)}")
return (filename, 0, 0)
def main():
"""Main function optimized for processing many files."""
start_time = time.time()
# Setup paths
current_dir = os.getcwd()
output_dir = os.path.join(current_dir, "output")
# Get USD files in current directory only
usd_files = [os.path.join(current_dir, f) for f in os.listdir(current_dir)
if f.lower().endswith(('.usd', '.usda', '.usdc'))]
if not usd_files:
print("No USD files found in current directory")
return
# Configure parallel processing
num_cores = max(1, multiprocessing.cpu_count() - 1)
print(f"Processing {len(usd_files)} files using {num_cores} cores...")
# Process files in parallel
results = []
with ProcessPoolExecutor(max_workers=num_cores) as executor:
future_to_file = {
executor.submit(process_usd_file, f, output_dir): f
for f in usd_files
}
# Process results as they complete
for future in as_completed(future_to_file):
filename, succeeded, processed = future.result()
results.append((filename, succeeded, processed))
print(f"Completed {filename}: {succeeded}/{processed} meshes processed")
# Report results
total_files = len(results)
total_meshes = sum(p for _, _, p in results)
successful_meshes = sum(s for _, s, _ in results)
elapsed_time = time.time() - start_time
print(f"\nProcessing complete:")
print(f"Files processed: {total_files}")
print(f"Meshes processed: {total_meshes}")
print(f"Meshes succeeded: {successful_meshes}")
print(f"Total time: {elapsed_time:.2f} seconds")
print(f"Average time per file: {elapsed_time/total_files:.2f} seconds")
print(f"See usd_processing.log for detailed information")
if __name__ == "__main__":
main()
Python script to replace meshes in project
Back up everything, then put script in project folder (same folder as mod.usda) and run.
import os
import shutil
import hashlib
import json
import concurrent.futures
from pathlib import Path
from tqdm import tqdm
import logging
from typing import List, Tuple, Set, Dict
import sys
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('mesh_replacement.log'),
logging.StreamHandler(sys.stdout)
]
)
class FileProcessor:
def __init__(self, project_dir: str):
self.project_dir = Path(project_dir)
self.source_dir = self.project_dir / 'deps' / 'captures' / 'meshes' / 'output'
self.target_dir = self.project_dir / 'assets' / 'captured_meshes'
self.state_file = self.target_dir / '.processed_files.json'
self.processed_files: Set[str] = set()
self.failed_files: Set[str] = set()
self.existing_files: Dict[str, str] = {} # filename: hash
def load_state(self) -> None:
"""Load previously processed files from state file."""
try:
if self.state_file.exists():
with self.state_file.open('r') as f:
self.existing_files = json.load(f)
logging.info(f"Loaded {len(self.existing_files)} previously processed files")
except Exception as e:
logging.error(f"Failed to load state file: {str(e)}")
self.existing_files = {}
def save_state(self) -> None:
"""Save processed files state."""
try:
# Merge new processed files with existing state
for file in self.processed_files:
file_path = self.source_dir / file
self.existing_files[file] = self.get_file_hash(file_path)
# Create target directory if it doesn't exist
self.state_file.parent.mkdir(parents=True, exist_ok=True)
with self.state_file.open('w') as f:
json.dump(self.existing_files, f, indent=4)
except Exception as e:
logging.error(f"Failed to save state file: {str(e)}")
def verify_directories(self) -> bool:
"""Verify required directories exist and are accessible."""
try:
if not self.source_dir.exists():
logging.error(f"Source directory not found: {self.source_dir}")
return False
self.target_dir.mkdir(parents=True, exist_ok=True)
return True
except Exception as e:
logging.error(f"Directory verification failed: {str(e)}")
return False
def get_file_hash(self, file_path: Path) -> str:
"""Calculate MD5 hash of file using chunks for memory efficiency."""
hasher = hashlib.md5()
with file_path.open('rb') as f:
for chunk in iter(lambda: f.read(65536), b''):
hasher.update(chunk)
return hasher.hexdigest()
def process_single_file(self, mesh_file: Path) -> Tuple[bool, str]:
"""Process a single USD file and return success status and error message."""
try:
base_name = mesh_file.stem
replace_file = f"{base_name}_replace.usd"
target_path = self.target_dir / replace_file
# Copy file
shutil.copy2(mesh_file, target_path)
# Create meta file
file_hash = self.get_file_hash(target_path)
meta_content = {
"base_hash": file_hash,
"validation_passed": True,
"validation_extensions": [
{
"id": "omni.flux.validator.factory-2.7.1",
"package_id": "omni.flux.validator.factory-2.7.1",
"version": [2, 7, 1, "", ""],
"enabled": True,
"name": "omni.flux.validator.factory",
"title": "Flux Validator Factory"
}
]
}
meta_path = target_path.with_suffix('.usd.meta')
with meta_path.open('w') as f:
json.dump(meta_content, f, indent=4)
return True, ""
except Exception as e:
return False, str(e)
def get_new_files(self) -> List[Path]:
"""Get list of new or modified USD files that need processing."""
new_files = []
for file_path in self.source_dir.glob('*.usd'):
file_name = file_path.name
current_hash = self.get_file_hash(file_path)
# Check if file is new or modified
if (file_name not in self.existing_files or
self.existing_files[file_name] != current_hash):
new_files.append(file_path)
return new_files
def process_files_parallel(self) -> bool:
"""Process new USD files in parallel using a thread pool."""
try:
# Load previous state
self.load_state()
# Get new or modified files
mesh_files = self.get_new_files()
if not mesh_files:
logging.info("No new files to process")
return True
logging.info(f"Processing {len(mesh_files)} new/modified mesh files...")
# Create process pool with optimal number of workers
max_workers = min(32, os.cpu_count() * 2)
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = {executor.submit(self.process_single_file, file): file
for file in mesh_files}
with tqdm(total=len(mesh_files), desc="Processing files") as pbar:
for future in concurrent.futures.as_completed(futures):
file = futures[future]
try:
success, error = future.result()
if success:
self.processed_files.add(file.name)
else:
self.failed_files.add(file.name)
logging.error(f"Failed to process {file.name}: {error}")
except Exception as e:
self.failed_files.add(file.name)
logging.error(f"Exception processing {file.name}: {str(e)}")
pbar.update(1)
# Save new state
self.save_state()
return len(self.failed_files) == 0
except Exception as e:
logging.error(f"Parallel processing failed: {str(e)}")
return False
def create_captured_meshes_layer(self) -> bool:
"""Create the captured_meshes.usda layer file."""
try:
content = '''#usda 1.0
over "RootNode"
{
over "meshes"
{
'''
# Include all successfully processed files (both existing and new)
all_files = set(self.existing_files.keys()) | self.processed_files
for mesh_file in sorted(all_files):
base_name = Path(mesh_file).stem
content += f''' over "{base_name}" (
references = None
)
{{
def Xform "ref_{hashlib.md5(base_name.encode()).hexdigest()[:32]}" (
prepend references = @./assets/captured_meshes/{base_name}_replace.usd@
)
{{
custom bool IsRemixRef = 1
double3 xformOp:rotateXYZ = (0, 0, 0)
double3 xformOp:scale = (1, 1, 1)
double3 xformOp:translate = (0, 0, 0)
uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:rotateXYZ", "xformOp:scale"]
}}
}}
'''
content += ''' }
}
'''
layer_path = self.project_dir / 'captured_meshes.usda'
layer_path.write_text(content)
return True
except Exception as e:
logging.error(f"Failed to create captured_meshes layer: {str(e)}")
return False
def update_mod_usda(self) -> bool:
"""Update mod.usda to include the captured_meshes layer."""
try:
mod_path = self.project_dir / 'mod.usda'
if not mod_path.exists():
logging.error("mod.usda not found")
return False
content = mod_path.read_text()
if '@./captured_meshes.usda@' not in content:
# Find subLayers section and add our layer
if 'subLayers = [' in content:
content = content.replace('subLayers = [',
'subLayers = [\n @./captured_meshes.usda@,')
mod_path.write_text(content)
logging.info("Added captured_meshes.usda to mod.usda subLayers")
else:
logging.error("Could not find subLayers section in mod.usda")
return False
return True
except Exception as e:
logging.error(f"Failed to update mod.usda: {str(e)}")
return False
def main():
# Initialize processor with current directory
processor = FileProcessor(os.getcwd())
# Verify directories
if not processor.verify_directories():
return
# Process files in parallel
if not processor.process_files_parallel():
logging.error("Some files failed to process. Check the log for details.")
# Create/update layer files
if processor.processed_files or processor.existing_files:
if processor.create_captured_meshes_layer():
logging.info("Created captured_meshes.usda layer")
if processor.update_mod_usda():
logging.info("Updated mod.usda")
# Report results
if processor.processed_files:
logging.info(f"Processing complete: {len(processor.processed_files)} new files processed, "
f"{len(processor.failed_files)} failed")
if processor.failed_files:
logging.info("Failed files: " + ", ".join(sorted(processor.failed_files)))
else:
logging.info("No new files to process")
if __name__ == "__main__":
main()
Related: https://github.com/NVIDIAGameWorks/rtx-remix/issues/840
Hey @BinqAdams - can you give us a description of your workflow from asset creation to replacement in-game? Knowing that will help us pin down the issue. Thanks!
Hey @BinqAdams - can you give us a description of your workflow from asset creation to replacement in-game? Knowing that will help us pin down the issue. Thanks!
Hello! The issue only seems to affect non-replaced meshes.
Hey @BinqAdams, thanks for the update! We've filed REMIX-4487 for internal investigation.