Implement simple screen-space blue-noise diffusion sampling.
From the paper "Screen-Space Blue-Noise Diffusion of Monte Carlo Sampling Error via Hierarchical Ordering of Pixels" by Ahmed et al.
This commit is contained in:
parent
ea4ba81110
commit
e2044e6579
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -589,9 +589,9 @@ checksum = "1d51f5df5af43ab3f1360b429fa5e0152ac5ce8c0bd6485cae490332e96846a8"
|
|||
|
||||
[[package]]
|
||||
name = "sobol_burley"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26e3528b09b1f1b1e152342a4462d1e80d568dc5623a0772252a6e584a53d550"
|
||||
checksum = "3441b32bbb896e372f1b8e7eb51a983713aef99599c32c0eb69183aa490cb6a0"
|
||||
|
||||
[[package]]
|
||||
name = "spectral_upsampling"
|
||||
|
|
|
@ -30,13 +30,15 @@ nom = "5"
|
|||
num_cpus = "1.8"
|
||||
openexr = "0.7"
|
||||
kioku = "0.3"
|
||||
sobol_burley = "0.3"
|
||||
sobol_burley = "0.4"
|
||||
png_encode_mini = "0.1.2"
|
||||
rustc-serialize = "0.3"
|
||||
scoped_threadpool = "0.1"
|
||||
time = "0.1"
|
||||
fastapprox = "0.3"
|
||||
|
||||
|
||||
|
||||
# Local crate dependencies
|
||||
[dependencies.bvh_order]
|
||||
path = "sub_crates/bvh_order"
|
||||
|
|
|
@ -240,8 +240,10 @@ impl<'a> Renderer<'a> {
|
|||
for x in bucket.x..(bucket.x + bucket.w) {
|
||||
for si in 0..self.spp {
|
||||
// Raw sample numbers.
|
||||
let (d0, d1, d2, d3) = get_sample_4d(si as u32, 0, (x, y), self.seed);
|
||||
let (d4, _, _, _) = get_sample_4d(si as u32, 1, (x, y), self.seed);
|
||||
let (d0, d1, d2, d3) =
|
||||
get_sample_4d(si as u32, 0, (x, y), self.seed, self.spp as u32);
|
||||
let (d4, _, _, _) =
|
||||
get_sample_4d(si as u32, 1, (x, y), self.seed, self.spp as u32);
|
||||
|
||||
// Calculate image plane x and y coordinates
|
||||
let (img_x, img_y) = {
|
||||
|
@ -262,9 +264,12 @@ impl<'a> Renderer<'a> {
|
|||
d0,
|
||||
map_0_1_to_wavelength(golden_ratio_sample(
|
||||
si as u32,
|
||||
hash_u32((x << 16) ^ y, self.seed),
|
||||
self.seed,
|
||||
(x, y),
|
||||
self.spp as u32,
|
||||
)),
|
||||
si as u32,
|
||||
self.spp as u32,
|
||||
);
|
||||
paths.push(path);
|
||||
rays.push(ray, false);
|
||||
|
@ -383,6 +388,8 @@ pub struct LightPath {
|
|||
light_attenuation: Float4,
|
||||
pending_color_addition: Float4,
|
||||
color: Float4,
|
||||
|
||||
max_samples: u32,
|
||||
}
|
||||
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
|
@ -396,6 +403,7 @@ impl LightPath {
|
|||
time: f32,
|
||||
wavelength: f32,
|
||||
sample_number: u32,
|
||||
max_samples: u32,
|
||||
) -> (LightPath, Ray) {
|
||||
(
|
||||
LightPath {
|
||||
|
@ -416,6 +424,8 @@ impl LightPath {
|
|||
light_attenuation: Float4::splat(1.0),
|
||||
pending_color_addition: Float4::splat(0.0),
|
||||
color: Float4::splat(0.0),
|
||||
|
||||
max_samples: max_samples,
|
||||
},
|
||||
scene.camera.generate_ray(
|
||||
image_plane_co.0,
|
||||
|
@ -441,6 +451,7 @@ impl LightPath {
|
|||
dimension,
|
||||
self.pixel_co,
|
||||
self.sampling_seed,
|
||||
self.max_samples,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -698,15 +709,22 @@ fn get_sample_4d(
|
|||
dimension_set: u32,
|
||||
pixel_co: (u32, u32),
|
||||
seed: u32,
|
||||
max_samples: u32,
|
||||
) -> (f32, f32, f32, f32) {
|
||||
// A unique seed for every pixel coordinate up to a resolution of
|
||||
// 65536 x 65536. Also incorperating the seed.
|
||||
let seed = pixel_co.0 ^ (pixel_co.1 << 16) ^ seed.wrapping_mul(0x736caf6f);
|
||||
|
||||
match dimension_set {
|
||||
ds if ds < sobol_burley::NUM_DIMENSION_SETS_4D as u32 => {
|
||||
// Sobol sampling.
|
||||
let n4 = sobol_burley::sample_4d(i, ds, seed);
|
||||
//
|
||||
// The `offset` is for screen-space blue noise distribution in the
|
||||
// spirit of the paper "Screen-Space Blue-Noise Diffusion of Monte
|
||||
// Carlo Sampling Error via Hierarchical Ordering of Pixels" by
|
||||
// Ahmed et al.
|
||||
// `sample_4d()` already does sample shuffling internally in a way
|
||||
// consistent with the paper, so we only have to arrange the pixels
|
||||
// according to a space filling curve to achieve the results from
|
||||
// the paper.
|
||||
let offset = hilbert::xy2d(pixel_co.0, pixel_co.1) * max_samples;
|
||||
let n4 = sobol_burley::sample_4d(offset + i, ds, seed);
|
||||
(n4[0], n4[1], n4[2], n4[3])
|
||||
}
|
||||
ds => {
|
||||
|
@ -723,15 +741,30 @@ fn get_sample_4d(
|
|||
}
|
||||
|
||||
/// Golden ratio sampler.
|
||||
fn golden_ratio_sample(i: u32, scramble: u32) -> f32 {
|
||||
// NOTE: use this for the wavelength dimension, because
|
||||
// due to the nature of hero wavelength sampling this ends up
|
||||
// being crazily more efficient than pretty much any other sampler,
|
||||
// and reduces variance by a huge amount.
|
||||
let n = i
|
||||
.wrapping_add(hash_u32(scramble, 0))
|
||||
.wrapping_mul(2654435769);
|
||||
n as f32 * (1.0 / (1u64 << 32) as f32)
|
||||
///
|
||||
// NOTE: use this for the wavelength dimension, because
|
||||
// due to the nature of hero wavelength sampling this ends up
|
||||
// being crazily more efficient than pretty much any other sampler,
|
||||
// and reduces variance by a huge amount.
|
||||
fn golden_ratio_sample(i: u32, seed: u32, pixel_co: (u32, u32), max_samples: u32) -> f32 {
|
||||
use sobol_burley::parts::{owen_scramble_rev, u32_to_f32_norm};
|
||||
|
||||
// Turns out the screen-space blue-noise diffusion trick mentioned
|
||||
// in `get_sample_4d()` works reasonably well on golden ratio
|
||||
// sampling as well! But we actually have to do the shuffling
|
||||
// ourselves here.
|
||||
let offset = owen_scramble_rev(
|
||||
hilbert::xy2d(pixel_co.0, pixel_co.1).reverse_bits(),
|
||||
hash_u32(seed, 6543266),
|
||||
)
|
||||
.reverse_bits()
|
||||
* max_samples;
|
||||
let i = i.wrapping_add(offset);
|
||||
|
||||
// The actual golden ratio sampling computation.
|
||||
let n = i.wrapping_mul(2654435769);
|
||||
|
||||
u32_to_f32_norm(n)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
Loading…
Reference in New Issue
Block a user