From 7c750dcded7df1c758fbd51d3d4b3f096d11a743 Mon Sep 17 00:00:00 2001 From: Nathan Vegdahl Date: Tue, 2 Aug 2022 19:41:23 -0700 Subject: [PATCH] Directly specify bucket size instead of inferring from sample count. Since we aren't doing breadth-first ray tracing anymore, this makes a lot more sense. --- psychoblend/__init__.py | 6 +++--- psychoblend/render.py | 4 ++-- psychoblend/ui.py | 2 +- src/main.rs | 19 +++++++++--------- src/renderer.rs | 43 ++++++++++++++--------------------------- 5 files changed, 30 insertions(+), 44 deletions(-) diff --git a/psychoblend/__init__.py b/psychoblend/__init__.py index ab718f2..b0fd999 100644 --- a/psychoblend/__init__.py +++ b/psychoblend/__init__.py @@ -40,9 +40,9 @@ class RenderPsychopathSettingsScene(PropertyGroup): min=1, max=65536, default=16 ) - max_samples_per_bucket = IntProperty( - name="Max Samples Per Bucket", description="How many samples to simultaneously calculate per thread; indirectly determines bucket size", - min=1, max=2**28, soft_max=2**16, default=4096 + bucket_size = IntProperty( + name="Bucket Size", description="The height and width of each render bucket in pixels.", + min=1, max=4096, soft_max=256, default=32 ) dicing_rate = FloatProperty( diff --git a/psychoblend/render.py b/psychoblend/render.py index 3f559a5..f3eb576 100644 --- a/psychoblend/render.py +++ b/psychoblend/render.py @@ -45,9 +45,9 @@ class PsychopathRender(bpy.types.RenderEngine): if crop != None: args += ["--crop", str(crop[0]), str(self.size_y - crop[3]), str(crop[2] - 1), str(self.size_y - crop[1] - 1)] if use_stdin: - args += ["--spb", str(scene.psychopath.max_samples_per_bucket), "--serialized_output", "--use_stdin"] + args += ["--bucket_size", str(scene.psychopath.bucket_size), "--serialized_output", "--use_stdin"] else: - args += ["--spb", str(scene.psychopath.max_samples_per_bucket), "--serialized_output", "-i", psy_filepath] + args += ["--bucket_size", str(scene.psychopath.bucket_size), "--serialized_output", "-i", psy_filepath] # Start Rendering! try: diff --git a/psychoblend/ui.py b/psychoblend/ui.py index 6ebae8b..5bc2bae 100644 --- a/psychoblend/ui.py +++ b/psychoblend/ui.py @@ -47,7 +47,7 @@ class RENDER_PT_psychopath_render_settings(PsychopathPanel, bpy.types.Panel): col.prop(scene.psychopath, "shutter_end") col.label(text="Performance") - col.prop(scene.psychopath, "max_samples_per_bucket") + col.prop(scene.psychopath, "bucket_size") class RENDER_PT_psychopath_export_settings(PsychopathPanel, bpy.types.Panel): diff --git a/src/main.rs b/src/main.rs index e7f751e..45c9315 100644 --- a/src/main.rs +++ b/src/main.rs @@ -89,11 +89,11 @@ fn main() { }), ) .arg( - Arg::with_name("max_bucket_samples") + Arg::with_name("bucket_size") .short("b") - .long("spb") + .long("bucket_size") .value_name("N") - .help("Target number of samples per bucket (determines bucket size)") + .help("Height and width of each render bucket in pixels.") .takes_value(true) .validator(|s| { usize::from_str(&s) @@ -258,12 +258,11 @@ fn main() { r.spp = usize::from_str(spp).unwrap(); } - let max_samples_per_bucket = - if let Some(max_samples_per_bucket) = args.value_of("max_bucket_samples") { - u32::from_str(max_samples_per_bucket).unwrap() - } else { - 4096 - }; + let bucket_size = if let Some(bucket_size) = args.value_of("bucket_size") { + u32::from_str(bucket_size).unwrap() + } else { + 32 + }; let thread_count = if let Some(threads) = args.value_of("threads") { u32::from_str(threads).unwrap() @@ -279,7 +278,7 @@ fn main() { println!("Rendering scene with {} threads...", thread_count); } let (mut image, rstats) = r.render( - max_samples_per_bucket, + bucket_size, crop, thread_count, args.is_present("serialized_output"), diff --git a/src/renderer.rs b/src/renderer.rs index db654b6..781f6f7 100644 --- a/src/renderer.rs +++ b/src/renderer.rs @@ -59,7 +59,7 @@ impl RenderStats { impl<'a> Renderer<'a> { pub fn render( &self, - max_samples_per_bucket: u32, + bucket_size: u32, crop: Option<(u32, u32, u32, u32)>, thread_count: u32, do_blender_output: bool, @@ -82,13 +82,13 @@ impl<'a> Renderer<'a> { // Calculate dimensions and coordinates of what we're rendering. This // accounts for cropping. let (width, height, start_x, start_y) = if let Some((x1, y1, x2, y2)) = crop { - let x1 = min(x1 as usize, img_width - 1); - let y1 = min(y1 as usize, img_height - 1); - let x2 = min(x2 as usize, img_width - 1); - let y2 = min(y2 as usize, img_height - 1); + let x1 = min(x1, img_width as u32 - 1); + let y1 = min(y1, img_height as u32 - 1); + let x2 = min(x2, img_width as u32 - 1); + let y2 = min(y2, img_height as u32 - 1); (x2 - x1 + 1, y2 - y1 + 1, x1, y1) } else { - (img_width, img_height, 0, 0) + (img_width as u32, img_height as u32, 0, 0) }; // Render @@ -105,7 +105,7 @@ impl<'a> Renderer<'a> { jq, ajq, img, - width * height, + width as usize * height as usize, pixrenref, cstats, do_blender_output, @@ -117,23 +117,10 @@ impl<'a> Renderer<'a> { print!("0.00%"); let _ = io::stdout().flush(); - // Determine bucket size based on the per-thread maximum number of samples to - // calculate at a time. - let (bucket_w, bucket_h) = { - let target_pixels_per_bucket = max_samples_per_bucket as f64 / self.spp as f64; - let target_bucket_dim = if target_pixels_per_bucket.sqrt() < 1.0 { - 1usize - } else { - target_pixels_per_bucket.sqrt() as usize - }; - - (target_bucket_dim, target_bucket_dim) - }; - // Populate job queue let bucket_n = { - let bucket_count_x = ((width / bucket_w) + 1) as u32; - let bucket_count_y = ((height / bucket_h) + 1) as u32; + let bucket_count_x = (width / bucket_size) + 1; + let bucket_count_y = (height / bucket_size) + 1; let larger = cmp::max(bucket_count_x, bucket_count_y); let pow2 = upper_power_of_two(larger); pow2 * pow2 @@ -141,17 +128,17 @@ impl<'a> Renderer<'a> { for hilbert_d in 0..bucket_n { let (bx, by) = hilbert::decode(hilbert_d); - let x = bx as usize * bucket_w; - let y = by as usize * bucket_h; + let x = bx * bucket_size; + let y = by * bucket_size; let w = if width >= x { - min(bucket_w, width - x) + min(bucket_size, width - x) } else { - bucket_w + bucket_size }; let h = if height >= y { - min(bucket_h, height - y) + min(bucket_size, height - y) } else { - bucket_h + bucket_size }; if x < width && y < height && w > 0 && h > 0 { job_queue.push(BucketJob {