Compare commits

..

2 Commits

Author SHA1 Message Date
c5965ec874 Replace some custom math functions with stdlib functions.
Some of these didn't exist in stable Rust before, and some of them
used to be slower than the custom ones.
2022-05-15 23:00:49 -07:00
89d2831dc8 Update readme. 2022-03-31 10:27:47 -07:00
32 changed files with 2752 additions and 3300 deletions

5
Cargo.lock generated
View File

@ -155,10 +155,6 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" checksum = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19"
[[package]]
name = "data_tree"
version = "0.1.0"
[[package]] [[package]]
name = "fastapprox" name = "fastapprox"
version = "0.3.0" version = "0.3.0"
@ -357,7 +353,6 @@ dependencies = [
"compact", "compact",
"copy_in_place", "copy_in_place",
"crossbeam", "crossbeam",
"data_tree",
"fastapprox", "fastapprox",
"glam", "glam",
"half", "half",

View File

@ -3,7 +3,6 @@ members = [
"sub_crates/bvh_order", "sub_crates/bvh_order",
"sub_crates/color", "sub_crates/color",
"sub_crates/compact", "sub_crates/compact",
"sub_crates/data_tree",
"sub_crates/halton", "sub_crates/halton",
"sub_crates/math3d", "sub_crates/math3d",
"sub_crates/spectral_upsampling", "sub_crates/spectral_upsampling",
@ -48,11 +47,8 @@ path = "sub_crates/color"
[dependencies.compact] [dependencies.compact]
path = "sub_crates/compact" path = "sub_crates/compact"
[dependencies.data_tree]
path = "sub_crates/data_tree"
[dependencies.halton] [dependencies.halton]
path = "sub_crates/halton" path = "sub_crates/halton"
[dependencies.math3d] [dependencies.math3d]

View File

@ -13,7 +13,9 @@ efficiently handle very large data sets, complex shading, motion blur, color
management, etc. presents a much richer and more challenging problem space to management, etc. presents a much richer and more challenging problem space to
explore than just writing a basic path tracer. explore than just writing a basic path tracer.
## Building ## Building
Psychopath is written in [Rust](https://www.rust-lang.org), and is pretty Psychopath is written in [Rust](https://www.rust-lang.org), and is pretty
straightforward to build except for its OpenEXR dependency. straightforward to build except for its OpenEXR dependency.
@ -34,6 +36,7 @@ documented in the [OpenEXR-rs readme](https://github.com/cessen/openexr-rs/blob/
Once those environment variables are set, then you should be able to build using Once those environment variables are set, then you should be able to build using
the same simple cargo command above. the same simple cargo command above.
# PsychoBlend # PsychoBlend
Included in the repository is an add-on for [Blender](http://www.blender.org) Included in the repository is an add-on for [Blender](http://www.blender.org)
@ -50,15 +53,6 @@ doesn't support them yet.
- Exports dupligroups with full hierarchical instancing - Exports dupligroups with full hierarchical instancing
- Limited auto-detection of instanced meshes - Limited auto-detection of instanced meshes
# Contributing
I'm not looking for contributions right now, and I'm likely to reject pull
requests. This is currently a solo project and I like it that way.
However, if you're looking for projects _related_ to Psychopath to contribute to,
[OpenEXR-rs](https://github.com/cessen/openexr-rs) is definitely a
collaborative project that I would love more help with! And I fully expect more
such projects to come out of Psychopath in the future.
# License # License
@ -69,3 +63,13 @@ See LICENSE.md for details. But the gist is:
* Most crates under the `sub_crates` directory are dual-licensed under MIT and Apache 2.0 (but with some exceptions--see each crate for its respective licenses). * Most crates under the `sub_crates` directory are dual-licensed under MIT and Apache 2.0 (but with some exceptions--see each crate for its respective licenses).
The intent of this scheme is to keep Psychopath itself copyleft, while allowing smaller reusable components to be licensed more liberally. The intent of this scheme is to keep Psychopath itself copyleft, while allowing smaller reusable components to be licensed more liberally.
# Contributing
This is a personal, experimental, for-fun project, and I am specifically
not looking for contributions of any kind. All PRs will be rejected
without review.
However, feel free to fork this into an entirely new project, or examine
the code for ideas for a project of your own.

View File

@ -7,118 +7,112 @@ Scene $Scene_fr1 {
SamplesPerPixel [16] SamplesPerPixel [16]
Seed [1] Seed [1]
} }
Shaders { Camera {
SurfaceShader $Green { Fov [39.449188]
Type [Lambert] FocalDistance [10.620000]
Color [rec709, 0.117 0.4125 0.115] ApertureRadius [0.000000]
} Transform [1.000000 -0.000000 0.000000 0.000000 -0.000000 0.000000 1.000000 0.000000 0.000000 1.000000 -0.000000 0.000000 -2.779998 -8.000000 2.730010 1.000000]
SurfaceShader $Red {
Type [Lambert]
Color [rec709, 0.611 0.0555 0.062]
}
SurfaceShader $White {
Type [Lambert]
Color [rec709, 0.7295 0.7355 0.729]
}
} }
World { World {
BackgroundShader { BackgroundShader {
Type [Color] Type [Color]
Color [rec709, 0.0 0.0 0.0] Color [rec709, 0.000000 0.000000 0.000000]
} }
} }
Camera { Shaders {
Fov [39.449188] SurfaceShader $Green {
FocalDistance [10.62] Type [Lambert]
ApertureRadius [0.0] Color [rec709, 0.117000 0.412500 0.115000]
Transform [1.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 1.0 0.0 0.0 -2.779998 -8.000000 2.730010 1.0]
} }
Assembly { SurfaceShader $Red {
Object $__Area { Type [Lambert]
Instance { Color [rec709, 0.611000 0.055500 0.062000]
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 2.779475 -2.794788 -5.498045 1.0]
} }
RectangleLight { SurfaceShader $White {
Color [rec709, 84.300003 53.800003 18.5] Type [Lambert]
Dimensions [1.35 1.1] Color [rec709, 0.729500 0.735500 0.729000]
} }
} }
Object $__Plane.010_ { Objects {
Instance { RectangleLight $__Area {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0] Color [rec709, 84.300003 53.800003 18.500000]
Dimensions [1.350000 1.100000]
} }
MeshSurface { MeshSurface $__Plane.010_ {
SurfaceShaderBind [$White] SurfaceShaderBind [$White]
Vertices [-2.649998 2.959996 3.299997 -4.229996 2.469997 3.299997 -3.139998 4.559995 3.299997 -4.719996 4.059995 3.299997 -4.719996 4.059996 0.0 -3.139998 4.559995 0.0 -4.229996 2.469997 0.0 -2.649998 2.959997 0.0 ] Vertices [-2.649998 2.959996 3.299997 -4.229996 2.469997 3.299997 -3.139998 4.559995 3.299997 -4.719996 4.059995 3.299997 -4.719996 4.059996 0.000000 -3.139998 4.559995 0.000000 -4.229996 2.469997 0.000000 -2.649998 2.959997 0.000000 ]
FaceVertCounts [4 4 4 4 4 ] FaceVertCounts [4 4 4 4 4 ]
FaceVertIndices [0 1 3 2 1 0 7 6 3 1 6 4 2 3 4 5 0 2 5 7 ] FaceVertIndices [0 1 3 2 1 0 7 6 3 1 6 4 2 3 4 5 0 2 5 7 ]
} }
} MeshSurface $__Plane.008_ {
Object $__Plane.008_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$White] SurfaceShaderBind [$White]
Vertices [-1.299999 0.649999 1.649998 -0.820000 2.249998 1.649999 -2.899997 1.139998 1.649999 -2.399998 2.719997 1.649999 -1.299999 0.649999 0.0 -0.820000 2.249998 0.0 -2.899997 1.139998 0.0 -2.399998 2.719997 0.0 ] Vertices [-1.299999 0.649999 1.649998 -0.820000 2.249998 1.649999 -2.899997 1.139998 1.649999 -2.399998 2.719997 1.649999 -1.299999 0.649999 0.000000 -0.820000 2.249998 0.000000 -2.899997 1.139998 0.000000 -2.399998 2.719997 0.000000 ]
FaceVertCounts [4 4 4 4 4 ] FaceVertCounts [4 4 4 4 4 ]
FaceVertIndices [0 2 3 1 3 2 6 7 1 3 7 5 0 1 5 4 2 0 4 6 ] FaceVertIndices [0 2 3 1 3 2 6 7 1 3 7 5 0 1 5 4 2 0 4 6 ]
} }
} MeshSurface $__Plane.006_ {
Object $__Plane.006_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$Red] SurfaceShaderBind [$Red]
Vertices [-5.495996 5.591994 0.0 -5.527995 -0.000001 0.0 -5.559996 5.591993 5.487995 -5.559995 -0.000001 5.487995 ] Vertices [-5.495996 5.591994 0.000000 -5.527995 -0.000001 -0.000000 -5.559996 5.591993 5.487995 -5.559995 -0.000001 5.487995 ]
FaceVertCounts [4 ] FaceVertCounts [4 ]
FaceVertIndices [0 1 3 2 ] FaceVertIndices [0 1 3 2 ]
} }
} MeshSurface $__Plane.004_ {
Object $__Plane.004_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$Green] SurfaceShaderBind [$Green]
Vertices [-0.000001 5.591995 0.0 0.0 0.0 0.0 -0.000001 5.591994 5.487995 0.0 0.0 5.487995 ] Vertices [-0.000001 5.591995 0.000000 0.000000 0.000000 0.000000 -0.000001 5.591994 5.487995 0.000000 -0.000000 5.487995 ]
FaceVertCounts [4 ] FaceVertCounts [4 ]
FaceVertIndices [1 0 2 3 ] FaceVertIndices [1 0 2 3 ]
} }
} MeshSurface $__Plane.002_ {
Object $__Plane.002_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$White] SurfaceShaderBind [$White]
Vertices [-5.495996 5.591994 0.0 -0.000001 5.591995 0.0 -5.559996 5.591993 5.487995 -0.000001 5.591994 5.487995 ] Vertices [-5.495996 5.591994 0.000000 -0.000001 5.591995 0.000000 -5.559996 5.591993 5.487995 -0.000001 5.591994 5.487995 ]
FaceVertCounts [4 ] FaceVertCounts [4 ]
FaceVertIndices [0 1 3 2 ] FaceVertIndices [0 1 3 2 ]
} }
} MeshSurface $__Plane.001_ {
Object $__Plane.001_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$White] SurfaceShaderBind [$White]
Vertices [-5.559996 5.591993 5.487995 -0.000001 5.591994 5.487995 -5.559995 -0.000001 5.487995 0.0 0.0 5.487995 -3.429997 3.319996 5.487995 -2.129998 3.319996 5.487995 -3.429997 2.269997 5.487995 -2.129998 2.269997 5.487995 ] Vertices [-5.559996 5.591993 5.487995 -0.000001 5.591994 5.487995 -5.559995 -0.000001 5.487995 0.000000 -0.000000 5.487995 -3.429997 3.319996 5.487995 -2.129998 3.319996 5.487995 -3.429997 2.269997 5.487995 -2.129998 2.269997 5.487995 ]
FaceVertCounts [4 4 4 4 ] FaceVertCounts [4 4 4 4 ]
FaceVertIndices [1 5 4 0 0 4 6 2 2 6 7 3 7 5 1 3 ] FaceVertIndices [1 5 4 0 0 4 6 2 2 6 7 3 7 5 1 3 ]
} }
} MeshSurface $__Plane_ {
Object $__Plane_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$White] SurfaceShaderBind [$White]
Vertices [-5.495996 5.591994 0.0 -0.000001 5.591995 0.0 -5.527995 -0.000001 0.0 0.0 0.0 0.0 ] Vertices [-5.495996 5.591994 0.000000 -0.000001 5.591995 0.000000 -5.527995 -0.000001 -0.000000 0.000000 0.000000 0.000000 ]
FaceVertCounts [4 ] FaceVertCounts [4 ]
FaceVertIndices [0 1 3 2 ] FaceVertIndices [0 1 3 2 ]
} }
} }
Assembly {
Instance {
Data [$__Area]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 2.779475 -2.794788 -5.498045 1.000000]
}
Instance {
Data [$__Plane.010_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane.008_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane.006_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane.004_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane.002_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane.001_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
Instance {
Data [$__Plane_]
Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
}
} }
} }

View File

@ -7,11 +7,11 @@ Scene $Scene_fr1 {
SamplesPerPixel [16] SamplesPerPixel [16]
Seed [1] Seed [1]
} }
Shaders { Camera {
SurfaceShader $Material { Fov [49.134342]
Type [Lambert] FocalDistance [9.559999]
Color [rec709, 0.8 0.8 0.8] ApertureRadius [0.250000]
} Transform [0.685881 0.727634 -0.010817 0.000000 -0.317370 0.312469 0.895343 0.000000 -0.654862 0.610666 -0.445245 0.000000 7.481132 -6.507640 5.343665 1.000000]
} }
World { World {
BackgroundShader { BackgroundShader {
@ -19,43 +19,42 @@ Scene $Scene_fr1 {
Color [rec709, 0.050876 0.050876 0.050876] Color [rec709, 0.050876 0.050876 0.050876]
} }
} }
Camera { Shaders {
Fov [49.134342] SurfaceShader $Material {
FocalDistance [9.56] Type [Lambert]
ApertureRadius [0.25] Color [rec709, 0.800000 0.800000 0.800000]
Transform [0.685881 0.727634 -0.010817 0.0 -0.317370 0.312469 0.895343 0.0 -0.654862 0.610666 -0.445245 0.0 7.481132 -6.507640 5.343665 1.0]
} }
Assembly {
Object $__Plane_ {
Instance {
Transform [0.078868 0.0 0.0 0.0 0.0 0.078868 0.0 0.0 0.0 0.0 0.078868 0.00.0 0.0 0.0 1.0]
} }
MeshSurface { Objects {
MeshSurface $__Plane_ {
SurfaceShaderBind [$Material] SurfaceShaderBind [$Material]
Vertices [-1.0 -1.0 0.0 1.0 -1.0 0.0 -1.0 1.0 0.0 1.0 1.0 0.0] Vertices [-1.000000 -1.000000 0.000000 1.000000 -1.000000 0.000000 -1.000000 1.000000 0.000000 1.000000 1.000000 0.000000]
FaceVertCounts [4] FaceVertCounts [4 ]
FaceVertIndices [0 1 3 2] FaceVertIndices [0 1 3 2 ]
} }
} MeshSurface $__Cube_ {
Object $__Cube_ {
Instance {
Transform [1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 -1.0 1.0]
}
MeshSurface {
SurfaceShaderBind [$Material] SurfaceShaderBind [$Material]
Vertices [1.0 1.0 -1.0 1.0 -1.0 -1.0 -1.0 -1.0 -1.0 -1.0 1.0 -1.0 1.0 1.0 1.0 1.0 -1.0 1.0 -1.0 -1.0 1.0 -1.0 1.0 1.0 ] Vertices [1.000000 1.000000 -1.000000 1.000000 -1.000000 -1.000000 -1.000000 -1.000000 -1.000000 -1.000000 1.000000 -1.000000 1.000000 0.999999 1.000000 0.999999 -1.000001 1.000000 -1.000000 -1.000000 1.000000 -1.000000 1.000000 1.000000 ]
FaceVertCounts [4 4 4 4 4 4 ] FaceVertCounts [4 4 4 4 4 4 ]
FaceVertIndices [0 1 2 3 4 7 6 5 0 4 5 1 1 5 6 2 2 6 7 3 4 0 3 7 ] FaceVertIndices [0 1 2 3 4 7 6 5 0 4 5 1 1 5 6 2 2 6 7 3 4 0 3 7 ]
} }
SphereLight $__Lamp {
Color [rec709, 50.000000 50.000000 50.000000]
Radius [0.100000]
} }
Object $__Lamp { }
Assembly {
Instance { Instance {
Transform [0.019856 -0.060763 0.0 0.0 0.015191 0.079422 0.0 0.0 0.0 0.0 1.0 0.0 -0.026851 -0.125233 -4.432303 1.0] Data [$__Plane_]
Transform [0.078868 -0.000000 0.000000 -0.000000 -0.000000 0.078868 -0.000000 0.000000 0.000000 -0.000000 0.078868 -0.000000 -0.000000 0.000000 -0.000000 1.000000]
} }
SphereLight { Instance {
Color [rec709, 50.0 50.0 50.0] Data [$__Cube_]
Radius [0.1] Transform [1.000000 -0.000000 0.000000 -0.000000 -0.000000 1.000000 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.000000 0.000000 -1.000000 1.000000]
} }
Instance {
Data [$__Lamp]
Transform [0.019856 -0.060763 0.000000 -0.000000 0.015191 0.079422 -0.000000 0.000000 0.000000 -0.000000 1.000000 -0.000000 -0.026851 -0.125233 -4.432303 1.000000]
} }
} }
} }

View File

@ -1,6 +1,6 @@
import bpy import bpy
from .util import escape_name, mat2str, color2str, psycolor2str, needs_def_mb, needs_xform_mb, ExportCancelled from .util import escape_name, mat2str, needs_def_mb, needs_xform_mb, ExportCancelled
class Assembly: class Assembly:
def __init__(self, render_engine, objects, visible_layers, group_prefix="", translation_offset=(0,0,0)): def __init__(self, render_engine, objects, visible_layers, group_prefix="", translation_offset=(0,0,0)):
@ -14,7 +14,6 @@ class Assembly:
self.material_names = set() self.material_names = set()
self.mesh_names = set() self.mesh_names = set()
self.patch_names = set()
self.assembly_names = set() self.assembly_names = set()
# Collect all the objects, materials, instances, etc. # Collect all the objects, materials, instances, etc.
@ -41,8 +40,6 @@ class Assembly:
self.objects += [Assembly(self.render_engine, ob.dupli_group.objects, ob.dupli_group.layers, name, ob.dupli_group.dupli_offset*-1)] self.objects += [Assembly(self.render_engine, ob.dupli_group.objects, ob.dupli_group.layers, name, ob.dupli_group.dupli_offset*-1)]
elif ob.type == 'MESH': elif ob.type == 'MESH':
name = self.get_mesh(ob, group_prefix) name = self.get_mesh(ob, group_prefix)
elif ob.type == 'SURFACE':
name = self.get_bicubic_patch(ob, group_prefix)
elif ob.type == 'LAMP' and ob.data.type == 'POINT': elif ob.type == 'LAMP' and ob.data.type == 'POINT':
name = self.get_sphere_lamp(ob, group_prefix) name = self.get_sphere_lamp(ob, group_prefix)
elif ob.type == 'LAMP' and ob.data.type == 'AREA': elif ob.type == 'LAMP' and ob.data.type == 'AREA':
@ -135,32 +132,6 @@ class Assembly:
else: else:
return None return None
def get_bicubic_patch(self, ob, group_prefix):
# Figure out if we need to export or not and figure out what name to
# export with.
has_modifiers = len(ob.modifiers) > 0
deform_mb = needs_def_mb(ob)
if has_modifiers or deform_mb:
patch_name = group_prefix + escape_name("__" + ob.name + "__" + ob.data.name + "_")
else:
patch_name = group_prefix + escape_name("__" + ob.data.name + "_")
should_export_patch = patch_name not in self.patch_names
# Get patch
if should_export_patch:
self.patch_names.add(patch_name)
self.objects += [BicubicPatch(self.render_engine, ob, patch_name)]
# Get materials
for ms in ob.material_slots:
if ms != None:
if ms.material.name not in self.material_names:
self.material_names.add(ms.material.name)
self.materials += [Material(self.render_engine, ms.material)]
return patch_name
else:
return None
def get_sphere_lamp(self, ob, group_prefix): def get_sphere_lamp(self, ob, group_prefix):
name = group_prefix + "__" + escape_name(ob.name) name = group_prefix + "__" + escape_name(ob.name)
@ -209,21 +180,21 @@ class Mesh:
# Write vertices and (if it's smooth shaded) normals # Write vertices and (if it's smooth shaded) normals
for ti in range(len(self.time_meshes)): for ti in range(len(self.time_meshes)):
w.write("Vertices [") w.write("Vertices [")
w.write(" ".join(["{:.6} {:.6} {:.6}".format(vert.co[0], vert.co[1], vert.co[2]) for vert in self.time_meshes[ti].vertices]), False) w.write(" ".join([("%f" % i) for vert in self.time_meshes[ti].vertices for i in vert.co]), False)
w.write("]\n", False) w.write("]\n", False)
if self.time_meshes[0].polygons[0].use_smooth and self.ob.data.psychopath.is_subdivision_surface == False: if self.time_meshes[0].polygons[0].use_smooth and self.ob.data.psychopath.is_subdivision_surface == False:
w.write("Normals [") w.write("Normals [")
w.write(" ".join(["{:.6} {:.6} {:.6}".format(vert.normal[0], vert.normal[1], vert.normal[2]) for vert in self.time_meshes[ti].vertices]), False) w.write(" ".join([("%f" % i) for vert in self.time_meshes[ti].vertices for i in vert.normal]), False)
w.write("]\n", False) w.write("]\n", False)
# Write face vertex counts # Write face vertex counts
w.write("FaceVertCounts [") w.write("FaceVertCounts [")
w.write(" ".join(["{}".format(len(p.vertices)) for p in self.time_meshes[0].polygons]), False) w.write(" ".join([("%d" % len(p.vertices)) for p in self.time_meshes[0].polygons]), False)
w.write("]\n", False) w.write("]\n", False)
# Write face vertex indices # Write face vertex indices
w.write("FaceVertIndices [") w.write("FaceVertIndices [")
w.write(" ".join(["{}".format(v) for p in self.time_meshes[0].polygons for v in p.vertices]), False) w.write(" ".join([("%d"%v) for p in self.time_meshes[0].polygons for v in p.vertices]), False)
w.write("]\n", False) w.write("]\n", False)
# MeshSurface/SubdivisionSurface section end # MeshSurface/SubdivisionSurface section end
@ -231,40 +202,6 @@ class Mesh:
w.write("}\n") w.write("}\n")
class BicubicPatch:
""" Holds data for a patch to be exported.
"""
def __init__(self, render_engine, ob, name):
self.ob = ob
self.name = name
self.needs_mb = needs_def_mb(self.ob)
self.time_patches = []
def take_sample(self, render_engine, scene, time):
if len(self.time_patches) == 0 or self.needs_mb:
render_engine.update_stats("", "Psychopath: Collecting '{}' at time {}".format(self.ob.name, time))
self.time_patches += [self.ob.data.copy()]
def cleanup(self):
for patch in self.time_patches:
bpy.data.curves.remove(patch)
def export(self, render_engine, w):
render_engine.update_stats("", "Psychopath: Exporting %s" % self.ob.name)
# Write patch
w.write("BicubicPatch $%s {\n" % self.name)
w.indent()
for patch in self.time_patches:
verts = patch.splines[0].points
vstr = ""
for v in verts:
vstr += "{:.6} {:.6} {:.6} ".format(v.co[0], v.co[1], v.co[2])
w.write("Vertices [{}]\n".format(vstr[:-1]))
w.unindent()
w.write("}\n")
class SphereLamp: class SphereLamp:
""" Holds data for a sphere light to be exported. """ Holds data for a sphere light to be exported.
""" """
@ -295,9 +232,14 @@ class SphereLamp:
w.write("SphereLight $%s {\n" % self.name) w.write("SphereLight $%s {\n" % self.name)
w.indent() w.indent()
for col in self.time_col: for col in self.time_col:
w.write(color2str(col[0], col[1]) + "\n") if col[0] == 'Rec709':
w.write("Color [rec709, %f %f %f]\n" % (col[1][0], col[1][1], col[1][2]))
elif col[0] == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (col[1], col[2]))
elif col[0] == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (col[1], col[2]))
for rad in self.time_rad: for rad in self.time_rad:
w.write("Radius [{:.6}]\n".format(rad)) w.write("Radius [%f]\n" % rad)
w.unindent() w.unindent()
w.write("}\n") w.write("}\n")
@ -336,9 +278,14 @@ class RectLamp:
w.write("RectangleLight $%s {\n" % self.name) w.write("RectangleLight $%s {\n" % self.name)
w.indent() w.indent()
for col in self.time_col: for col in self.time_col:
w.write(color2str(col[0], col[1]) + "\n") if col[0] == 'Rec709':
w.write("Color [rec709, %f %f %f]\n" % (col[1][0], col[1][1], col[1][2]))
elif col[0] == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (col[1], col[2]))
elif col[0] == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (col[1], col[2]))
for dim in self.time_dim: for dim in self.time_dim:
w.write("Dimensions [{:.6} {:.6}]\n".format(dim[0], dim[1])) w.write("Dimensions [%f %f]\n" % dim)
w.unindent() w.unindent()
w.write("}\n") w.write("}\n")
@ -391,15 +338,57 @@ class Material:
w.indent() w.indent()
if self.mat.psychopath.surface_shader_type == 'Emit': if self.mat.psychopath.surface_shader_type == 'Emit':
w.write("Type [Emit]\n") w.write("Type [Emit]\n")
w.write(psycolor2str(self.mat.psychopath) + "\n") if self.mat.psychopath.color_type == 'Rec709':
col = self.mat.psychopath.color
w.write("Color [rec709, %f %f %f]\n" % (
col[0], col[1], col[2],
))
elif self.mat.psychopath.color_type == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
elif self.mat.psychopath.color_type == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
elif self.mat.psychopath.surface_shader_type == 'Lambert': elif self.mat.psychopath.surface_shader_type == 'Lambert':
w.write("Type [Lambert]\n") w.write("Type [Lambert]\n")
w.write(psycolor2str(self.mat.psychopath) + "\n") if self.mat.psychopath.color_type == 'Rec709':
col = self.mat.psychopath.color
w.write("Color [rec709, %f %f %f]\n" % (
col[0], col[1], col[2],
))
elif self.mat.psychopath.color_type == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
elif self.mat.psychopath.color_type == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
elif self.mat.psychopath.surface_shader_type == 'GGX': elif self.mat.psychopath.surface_shader_type == 'GGX':
w.write("Type [GGX]\n") w.write("Type [GGX]\n")
w.write(psycolor2str(self.mat.psychopath) + "\n") if self.mat.psychopath.color_type == 'Rec709':
w.write("Roughness [{:.6}]\n".format(self.mat.psychopath.roughness)) col = self.mat.psychopath.color
w.write("Fresnel [{:.6}]\n".format(self.mat.psychopath.fresnel)) w.write("Color [rec709, %f %f %f]\n" % (
col[0], col[1], col[2],
))
elif self.mat.psychopath.color_type == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
elif self.mat.psychopath.color_type == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (
self.mat.psychopath.color_blackbody_temp,
1.0,
))
w.write("Roughness [%f]\n" % self.mat.psychopath.roughness)
w.write("Fresnel [%f]\n" % self.mat.psychopath.fresnel)
else: else:
raise "Unsupported surface shader type '%s'" % self.mat.psychopath.surface_shader_type raise "Unsupported surface shader type '%s'" % self.mat.psychopath.surface_shader_type
w.unindent() w.unindent()

View File

@ -96,7 +96,7 @@ class PsychoExporter:
res_y = int(self.scene.render.resolution_y * (self.scene.render.resolution_percentage / 100)) res_y = int(self.scene.render.resolution_y * (self.scene.render.resolution_percentage / 100))
self.w.write('Resolution [%d %d]\n' % (res_x, res_y)) self.w.write('Resolution [%d %d]\n' % (res_x, res_y))
self.w.write("SamplesPerPixel [%d]\n" % self.scene.psychopath.spp) self.w.write("SamplesPerPixel [%d]\n" % self.scene.psychopath.spp)
self.w.write("DicingRate [{:.6}]\n".format(self.scene.psychopath.dicing_rate)) self.w.write("DicingRate [%f]\n" % self.scene.psychopath.dicing_rate)
self.w.write('Seed [%d]\n' % self.fr) self.w.write('Seed [%d]\n' % self.fr)
# RenderSettings section end # RenderSettings section end

View File

@ -11,41 +11,10 @@ def mat2str(m):
s = "" s = ""
for j in range(4): for j in range(4):
for i in range(4): for i in range(4):
s += " {:.6}".format(m[i][j]) s += (" %f" % m[i][j])
return s[1:] return s[1:]
def color2str(color_type, color_data):
if color_type == 'Rec709':
return "Color [rec709, {:.6} {:.6} {:.6}]".format(
color_data[0],
color_data[1],
color_data[2],
)
elif color_type == 'Blackbody':
return "Color [blackbody, {:.6} {:.6}]".format(
color_data[0],
color_data[1],
)
elif color_type == 'ColorTemperature':
return "Color [color_temperature, {:.6} {:.6}]".format(
color_data[0],
color_data[1],
)
def psycolor2str(psymat):
color_type = psymat.color_type
color_data = psymat.color
if color_type == 'Blackbody' or color_type == 'ColorTemperature':
# TODO: add the brightness multiplier to the Psychoblend material
# settings. Here we're just defaulting it to 1.0.
color_data = [psymat.color_blackbody_temp, 1.0]
return color2str(color_type, color_data)
def needs_def_mb(ob): def needs_def_mb(ob):
""" Determines if the given object needs to be exported with """ Determines if the given object needs to be exported with
deformation motion blur or not. deformation motion blur or not.

View File

@ -3,7 +3,7 @@ import bpy
from math import degrees, tan, atan from math import degrees, tan, atan
from mathutils import Vector, Matrix from mathutils import Vector, Matrix
from .util import escape_name, color2str, mat2str, ExportCancelled from .util import escape_name, mat2str, ExportCancelled
class World: class World:
def __init__(self, render_engine, scene, visible_layers, aspect_ratio): def __init__(self, render_engine, scene, visible_layers, aspect_ratio):
@ -90,16 +90,16 @@ class Camera:
w.indent() w.indent()
for fov in self.fovs: for fov in self.fovs:
w.write("Fov [{:.6}]\n".format(fov)) w.write("Fov [%f]\n" % fov)
for rad in self.aperture_radii: for rad in self.aperture_radii:
w.write("ApertureRadius [{:.6}]\n".format(rad)) w.write("ApertureRadius [%f]\n" % rad)
for dist in self.focal_distances: for dist in self.focal_distances:
w.write("FocalDistance [{:.6}]\n".format(dist)) w.write("FocalDistance [%f]\n" % dist)
for mat in self.xforms: for mat in self.xforms:
w.write("Transform [{}]\n".format(mat2str(mat))) w.write("Transform [%s]\n" % mat2str(mat))
w.unindent() w.unindent()
w.write("}\n") w.write("}\n")
@ -116,7 +116,7 @@ class BackgroundShader:
w.write("BackgroundShader {\n") w.write("BackgroundShader {\n")
w.indent(); w.indent();
w.write("Type [Color]\n") w.write("Type [Color]\n")
w.write("Color [rec709, {:.6} {:.6} {:.6}]\n".format(self.color[0], self.color[1], self.color[2])) w.write("Color [rec709, %f %f %f]\n" % self.color)
w.unindent() w.unindent()
w.write("}\n") w.write("}\n")
@ -147,11 +147,16 @@ class DistantDiskLamp:
w.write("DistantDiskLight $%s {\n" % self.name) w.write("DistantDiskLight $%s {\n" % self.name)
w.indent() w.indent()
for direc in self.time_dir: for direc in self.time_dir:
w.write("Direction [{:.6} {:.6} {:.6}]\n".format(direc[0], direc[1], direc[2])) w.write("Direction [%f %f %f]\n" % (direc[0], direc[1], direc[2]))
for col in self.time_col: for col in self.time_col:
w.write(color2str(col[0], col[1]) + "\n") if col[0] == 'Rec709':
w.write("Color [rec709, %f %f %f]\n" % (col[1][0], col[1][1], col[1][2]))
elif col[0] == 'Blackbody':
w.write("Color [blackbody, %f %f]\n" % (col[1], col[2]))
elif col[0] == 'ColorTemperature':
w.write("Color [color_temperature, %f %f]\n" % (col[1], col[2]))
for rad in self.time_rad: for rad in self.time_rad:
w.write("Radius [{:.6}]\n".format(rad)) w.write("Radius [%f]\n" % rad)
w.unindent() w.unindent()
w.write("}\n") w.write("}\n")

View File

@ -7,7 +7,7 @@ use std::{
use crate::{ use crate::{
lerp::{lerp, lerp_slice, Lerp}, lerp::{lerp, lerp_slice, Lerp},
math::{fast_minf32, Point, Transform, Vector}, math::{Point, Transform, Vector},
}; };
const BBOX_MAXT_ADJUST: f32 = 1.000_000_24; const BBOX_MAXT_ADJUST: f32 = 1.000_000_24;
@ -47,7 +47,7 @@ impl BBox {
// Find the far and near intersection // Find the far and near intersection
let far_t = t1.max(t2).extend(std::f32::INFINITY); let far_t = t1.max(t2).extend(std::f32::INFINITY);
let near_t = t1.min(t2).extend(0.0); let near_t = t1.min(t2).extend(0.0);
let far_hit_t = fast_minf32(far_t.min_element() * BBOX_MAXT_ADJUST, max_t); let far_hit_t = (far_t.min_element() * BBOX_MAXT_ADJUST).min(max_t);
let near_hit_t = near_t.max_element(); let near_hit_t = near_t.max_element();
// Did we hit? // Did we hit?

View File

@ -1,3 +1,5 @@
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign};
pub use color::{ pub use color::{
rec709_e_to_xyz, rec709_to_xyz, xyz_to_aces_ap0, xyz_to_aces_ap0_e, xyz_to_rec709, rec709_e_to_xyz, rec709_to_xyz, xyz_to_aces_ap0, xyz_to_aces_ap0_e, xyz_to_rec709,
xyz_to_rec709_e, xyz_to_rec709_e,
@ -42,7 +44,7 @@ fn wavelengths(hero_wavelength: f32) -> Vec4 {
//---------------------------------------------------------------- //----------------------------------------------------------------
#[derive(Debug, Copy, Clone, PartialEq)] #[derive(Debug, Copy, Clone)]
pub enum Color { pub enum Color {
XYZ(f32, f32, f32), XYZ(f32, f32, f32),
Blackbody { Blackbody {
@ -245,77 +247,9 @@ impl Color {
_ => unreachable!(), _ => unreachable!(),
} }
} }
pub fn scale_brightness(self, rhs: f32) -> Self {
match self {
Color::XYZ(x, y, z) => Color::XYZ(x * rhs, y * rhs, z * rhs),
Color::Blackbody {
temperature,
factor,
} => Color::Blackbody {
temperature: temperature,
factor: factor * rhs,
},
Color::Temperature {
temperature,
factor,
} => Color::Temperature {
temperature: temperature,
factor: factor * rhs,
},
}
}
} }
// Implemented for interpolation operations, not for any otherwise meaningful impl Mul<f32> for Color {
// notion of addition.
impl std::ops::Add<Color> for Color {
type Output = Self;
fn add(self, rhs: Self) -> Self {
match (self, rhs) {
(Color::XYZ(x1, y1, z1), Color::XYZ(x2, y2, z2)) => {
Color::XYZ(x1 + x2, y1 + y2, z1 + z2)
}
(
Color::Blackbody {
temperature: tmp1,
factor: fac1,
},
Color::Blackbody {
temperature: tmp2,
factor: fac2,
},
) => Color::Blackbody {
temperature: tmp1 + tmp2,
factor: fac1 + fac2,
},
(
Color::Temperature {
temperature: tmp1,
factor: fac1,
},
Color::Temperature {
temperature: tmp2,
factor: fac2,
},
) => Color::Temperature {
temperature: tmp1 + tmp2,
factor: fac1 + fac2,
},
_ => panic!("Cannot add colors with different representations."),
}
}
}
// Implemented for interpolation operations, not for any otherwise meaningful
// notion of multiplication.
impl std::ops::Mul<f32> for Color {
type Output = Self; type Output = Self;
fn mul(self, rhs: f32) -> Self { fn mul(self, rhs: f32) -> Self {
@ -326,7 +260,7 @@ impl std::ops::Mul<f32> for Color {
temperature, temperature,
factor, factor,
} => Color::Blackbody { } => Color::Blackbody {
temperature: temperature * rhs, temperature: temperature,
factor: factor * rhs, factor: factor * rhs,
}, },
@ -334,13 +268,19 @@ impl std::ops::Mul<f32> for Color {
temperature, temperature,
factor, factor,
} => Color::Temperature { } => Color::Temperature {
temperature: temperature * rhs, temperature: temperature,
factor: factor * rhs, factor: factor * rhs,
}, },
} }
} }
} }
impl MulAssign<f32> for Color {
fn mul_assign(&mut self, rhs: f32) {
*self = *self * rhs;
}
}
impl Lerp for Color { impl Lerp for Color {
/// Note that this isn't a proper lerp in spectral space. However, /// Note that this isn't a proper lerp in spectral space. However,
/// for our purposes that should be fine: all we care about is that /// for our purposes that should be fine: all we care about is that
@ -487,7 +427,7 @@ impl SpectralSample {
} }
} }
impl std::ops::Add for SpectralSample { impl Add for SpectralSample {
type Output = SpectralSample; type Output = SpectralSample;
fn add(self, rhs: SpectralSample) -> Self::Output { fn add(self, rhs: SpectralSample) -> Self::Output {
assert_eq!(self.hero_wavelength, rhs.hero_wavelength); assert_eq!(self.hero_wavelength, rhs.hero_wavelength);
@ -498,14 +438,14 @@ impl std::ops::Add for SpectralSample {
} }
} }
impl std::ops::AddAssign for SpectralSample { impl AddAssign for SpectralSample {
fn add_assign(&mut self, rhs: SpectralSample) { fn add_assign(&mut self, rhs: SpectralSample) {
assert_eq!(self.hero_wavelength, rhs.hero_wavelength); assert_eq!(self.hero_wavelength, rhs.hero_wavelength);
self.e = self.e + rhs.e; self.e = self.e + rhs.e;
} }
} }
impl std::ops::Mul for SpectralSample { impl Mul for SpectralSample {
type Output = SpectralSample; type Output = SpectralSample;
fn mul(self, rhs: SpectralSample) -> Self::Output { fn mul(self, rhs: SpectralSample) -> Self::Output {
assert_eq!(self.hero_wavelength, rhs.hero_wavelength); assert_eq!(self.hero_wavelength, rhs.hero_wavelength);
@ -516,14 +456,14 @@ impl std::ops::Mul for SpectralSample {
} }
} }
impl std::ops::MulAssign for SpectralSample { impl MulAssign for SpectralSample {
fn mul_assign(&mut self, rhs: SpectralSample) { fn mul_assign(&mut self, rhs: SpectralSample) {
assert_eq!(self.hero_wavelength, rhs.hero_wavelength); assert_eq!(self.hero_wavelength, rhs.hero_wavelength);
self.e = self.e * rhs.e; self.e = self.e * rhs.e;
} }
} }
impl std::ops::Mul<f32> for SpectralSample { impl Mul<f32> for SpectralSample {
type Output = SpectralSample; type Output = SpectralSample;
fn mul(self, rhs: f32) -> Self::Output { fn mul(self, rhs: f32) -> Self::Output {
SpectralSample { SpectralSample {
@ -533,13 +473,13 @@ impl std::ops::Mul<f32> for SpectralSample {
} }
} }
impl std::ops::MulAssign<f32> for SpectralSample { impl MulAssign<f32> for SpectralSample {
fn mul_assign(&mut self, rhs: f32) { fn mul_assign(&mut self, rhs: f32) {
self.e = self.e * rhs; self.e = self.e * rhs;
} }
} }
impl std::ops::Div<f32> for SpectralSample { impl Div<f32> for SpectralSample {
type Output = SpectralSample; type Output = SpectralSample;
fn div(self, rhs: f32) -> Self::Output { fn div(self, rhs: f32) -> Self::Output {
SpectralSample { SpectralSample {
@ -549,7 +489,7 @@ impl std::ops::Div<f32> for SpectralSample {
} }
} }
impl std::ops::DivAssign<f32> for SpectralSample { impl DivAssign<f32> for SpectralSample {
fn div_assign(&mut self, rhs: f32) { fn div_assign(&mut self, rhs: f32) {
self.e = self.e / rhs; self.e = self.e / rhs;
} }
@ -596,7 +536,7 @@ impl Lerp for XYZ {
} }
} }
impl std::ops::Add for XYZ { impl Add for XYZ {
type Output = XYZ; type Output = XYZ;
fn add(self, rhs: XYZ) -> Self::Output { fn add(self, rhs: XYZ) -> Self::Output {
XYZ { XYZ {
@ -607,7 +547,7 @@ impl std::ops::Add for XYZ {
} }
} }
impl std::ops::AddAssign for XYZ { impl AddAssign for XYZ {
fn add_assign(&mut self, rhs: XYZ) { fn add_assign(&mut self, rhs: XYZ) {
self.x += rhs.x; self.x += rhs.x;
self.y += rhs.y; self.y += rhs.y;
@ -615,7 +555,7 @@ impl std::ops::AddAssign for XYZ {
} }
} }
impl std::ops::Mul<f32> for XYZ { impl Mul<f32> for XYZ {
type Output = XYZ; type Output = XYZ;
fn mul(self, rhs: f32) -> Self::Output { fn mul(self, rhs: f32) -> Self::Output {
XYZ { XYZ {
@ -626,7 +566,7 @@ impl std::ops::Mul<f32> for XYZ {
} }
} }
impl std::ops::MulAssign<f32> for XYZ { impl MulAssign<f32> for XYZ {
fn mul_assign(&mut self, rhs: f32) { fn mul_assign(&mut self, rhs: f32) {
self.x *= rhs; self.x *= rhs;
self.y *= rhs; self.y *= rhs;
@ -634,7 +574,7 @@ impl std::ops::MulAssign<f32> for XYZ {
} }
} }
impl std::ops::Div<f32> for XYZ { impl Div<f32> for XYZ {
type Output = XYZ; type Output = XYZ;
fn div(self, rhs: f32) -> Self::Output { fn div(self, rhs: f32) -> Self::Output {
XYZ { XYZ {
@ -645,7 +585,7 @@ impl std::ops::Div<f32> for XYZ {
} }
} }
impl std::ops::DivAssign<f32> for XYZ { impl DivAssign<f32> for XYZ {
fn div_assign(&mut self, rhs: f32) { fn div_assign(&mut self, rhs: f32) {
self.x /= rhs; self.x /= rhs;
self.y /= rhs; self.y /= rhs;

View File

@ -315,8 +315,7 @@ impl<'a> Surface for RectangleLight<'a> {
let closure = { let closure = {
let inv_surface_area = (1.0 / (dim.0 as f64 * dim.1 as f64)) as f32; let inv_surface_area = (1.0 / (dim.0 as f64 * dim.1 as f64)) as f32;
let color = lerp_slice(self.colors, time) let color = lerp_slice(self.colors, time) * inv_surface_area;
.scale_brightness(inv_surface_area);
SurfaceClosure::Emit(color) SurfaceClosure::Emit(color)
}; };

View File

@ -321,7 +321,7 @@ impl<'a> Surface for SphereLight<'a> {
let closure = { let closure = {
let inv_surface_area = let inv_surface_area =
(1.0 / (4.0 * PI_64 * radius as f64 * radius as f64)) as f32; (1.0 / (4.0 * PI_64 * radius as f64 * radius as f64)) as f32;
let color = lerp_slice(self.colors, time).scale_brightness(inv_surface_area); let color = lerp_slice(self.colors, time) * inv_surface_area;
SurfaceClosure::Emit(color) SurfaceClosure::Emit(color)
}; };

View File

@ -31,36 +31,27 @@ mod math;
mod mis; mod mis;
mod parse; mod parse;
mod ray; mod ray;
// mod renderer; mod renderer;
mod sampling; mod sampling;
mod scene; mod scene;
mod shading; mod shading;
mod surface; mod surface;
mod timer; mod timer;
// mod tracer; mod tracer;
mod transform_stack; mod transform_stack;
use std::{ use std::{fs::File, io, io::Read, mem, path::Path, str::FromStr};
fs::File,
io,
io::{Read, Seek},
mem,
path::Path,
str::FromStr,
};
use clap::{App, Arg}; use clap::{App, Arg};
use nom::bytes::complete::take_until; use nom::bytes::complete::take_until;
use kioku::Arena; use kioku::Arena;
use data_tree::{DataTreeReader, Event};
use crate::{ use crate::{
accel::BVH4Node, accel::BVH4Node,
bbox::BBox, bbox::BBox,
parse::parse_scene, parse::{parse_scene, DataTree},
// renderer::LightPath, renderer::LightPath,
surface::SurfaceIntersection, surface::SurfaceIntersection,
timer::Timer, timer::Timer,
}; };
@ -172,7 +163,7 @@ fn main() {
"SurfaceIntersection size: {} bytes", "SurfaceIntersection size: {} bytes",
mem::size_of::<SurfaceIntersection>() mem::size_of::<SurfaceIntersection>()
); );
// println!("LightPath size: {} bytes", mem::size_of::<LightPath>()); println!("LightPath size: {} bytes", mem::size_of::<LightPath>());
println!("BBox size: {} bytes", mem::size_of::<BBox>()); println!("BBox size: {} bytes", mem::size_of::<BBox>());
// println!("BVHNode size: {} bytes", mem::size_of::<BVHNode>()); // println!("BVHNode size: {} bytes", mem::size_of::<BVHNode>());
println!("BVH4Node size: {} bytes", mem::size_of::<BVH4Node>()); println!("BVH4Node size: {} bytes", mem::size_of::<BVH4Node>());
@ -200,76 +191,64 @@ fn main() {
println!("Parsing scene file...",); println!("Parsing scene file...",);
} }
t.tick(); t.tick();
// let psy_contents = if args.is_present("use_stdin") { let psy_contents = if args.is_present("use_stdin") {
// // Read from stdin // Read from stdin
// let mut input = Vec::new(); let mut input = Vec::new();
// let tmp = std::io::stdin(); let tmp = std::io::stdin();
// let mut stdin = tmp.lock(); let mut stdin = tmp.lock();
// let mut buf = vec![0u8; 4096]; let mut buf = vec![0u8; 4096];
// loop { loop {
// let count = stdin let count = stdin
// .read(&mut buf) .read(&mut buf)
// .expect("Unexpected end of scene input."); .expect("Unexpected end of scene input.");
// let start = if input.len() < 11 { let start = if input.len() < 11 {
// 0 0
// } else { } else {
// input.len() - 11 input.len() - 11
// }; };
// let end = input.len() + count; let end = input.len() + count;
// input.extend(&buf[..count]); input.extend(&buf[..count]);
// let mut done = false; let mut done = false;
// let mut trunc_len = 0; let mut trunc_len = 0;
// if let nom::IResult::Ok((remaining, _)) = if let nom::IResult::Ok((remaining, _)) =
// take_until::<&str, &[u8], ()>("__PSY_EOF__")(&input[start..end]) take_until::<&str, &[u8], ()>("__PSY_EOF__")(&input[start..end])
// { {
// done = true; done = true;
// trunc_len = input.len() - remaining.len(); trunc_len = input.len() - remaining.len();
// } }
// if done { if done {
// input.truncate(trunc_len); input.truncate(trunc_len);
// break; break;
// } }
// } }
// String::from_utf8(input).unwrap() String::from_utf8(input).unwrap()
// } else { } else {
// // Read from file // Read from file
// let mut input = String::new(); let mut input = String::new();
// let fp = args.value_of("input").unwrap(); let fp = args.value_of("input").unwrap();
// let mut f = io::BufReader::new(File::open(fp).unwrap()); let mut f = io::BufReader::new(File::open(fp).unwrap());
// let _ = f.read_to_string(&mut input); let _ = f.read_to_string(&mut input);
// input input
// }; };
// let dt = DataTree::from_str(&psy_contents).unwrap(); let dt = DataTree::from_str(&psy_contents).unwrap();
// if !args.is_present("serialized_output") { if !args.is_present("serialized_output") {
// println!("\tParsed scene file in {:.3}s", t.tick()); println!("\tParsed scene file in {:.3}s", t.tick());
// } }
let file_path = args.value_of("input").unwrap();
let mut psy_file = io::BufReader::new(File::open(file_path).unwrap());
let mut events = DataTreeReader::new(&mut psy_file);
// Iterate through scenes and render them // Iterate through scenes and render them
loop { if let DataTree::Internal { ref children, .. } = dt {
for child in children {
t.tick(); t.tick();
match events.next_event() { if child.type_name() == "Scene" {
// Parse a scene and render it.
Ok(Event::InnerOpen {
type_name: "Scene",
ident,
..
}) => {
if !args.is_present("serialized_output") { if !args.is_present("serialized_output") {
println!("Building scene..."); println!("Building scene...");
} }
let arena = Arena::new().with_block_size((1 << 20) * 4); let arena = Arena::new().with_block_size((1 << 20) * 4);
let ident = ident.map(|v| v.to_string()); let mut r = parse_scene(&arena, child).unwrap_or_else(|e| {
let scene = e.print(&psy_contents);
parse_scene(&arena, &mut events, ident.as_deref()).unwrap_or_else(|e| {
print!("Parse error: ");
e.print(&mut io::BufReader::new(File::open(file_path).unwrap()));
panic!("Parse error."); panic!("Parse error.");
}); });
@ -277,7 +256,7 @@ fn main() {
if !args.is_present("serialized_output") { if !args.is_present("serialized_output") {
println!("\tOverriding scene spp: {}", spp); println!("\tOverriding scene spp: {}", spp);
} }
// r.spp = usize::from_str(spp).unwrap(); r.spp = usize::from_str(spp).unwrap();
} }
let max_samples_per_bucket = let max_samples_per_bucket =
@ -300,62 +279,58 @@ fn main() {
if !args.is_present("serialized_output") { if !args.is_present("serialized_output") {
println!("Rendering scene with {} threads...", thread_count); println!("Rendering scene with {} threads...", thread_count);
} }
let (mut image, rstats) = r.render(
max_samples_per_bucket,
crop,
thread_count,
args.is_present("serialized_output"),
);
// Print render stats
if !args.is_present("serialized_output") {
let rtime = t.tick();
let ntime = rtime as f64 / rstats.total_time;
println!("\tRendered scene in {:.3}s", rtime);
println!(
"\t\tTrace: {:.3}s",
ntime * rstats.trace_time
);
println!("\t\t\tRays traced: {}", rstats.ray_count);
println!(
"\t\t\tRays/sec: {}",
(rstats.ray_count as f64 / (ntime * rstats.trace_time) as f64) as u64
);
println!("\t\t\tRay/node tests: {}", rstats.accel_node_visits);
println!(
"\t\tInitial ray generation: {:.3}s",
ntime * rstats.initial_ray_generation_time
);
println!(
"\t\tRay generation: {:.3}s",
ntime * rstats.ray_generation_time
);
println!(
"\t\tSample writing: {:.3}s",
ntime * rstats.sample_writing_time
);
}
println!("{:#?}", scene); // Write to disk
if !args.is_present("serialized_output") {
println!("Writing image to disk into '{}'...", r.output_file);
if r.output_file.ends_with(".png") {
image
.write_png(Path::new(&r.output_file))
.expect("Failed to write png...");
} else if r.output_file.ends_with(".exr") {
image.write_exr(Path::new(&r.output_file));
} else {
panic!("Unknown output file extension.");
}
println!("\tWrote image in {:.3}s", t.tick());
}
println!("Didn't really render, because all that code is disabled! Done!"); // Print memory stats if stats are wanted.
// let (mut image, rstats) = r.render( if args.is_present("stats") {
// max_samples_per_bucket,
// crop,
// thread_count,
// args.is_present("serialized_output"),
// );
// // Print render stats
// if !args.is_present("serialized_output") {
// let rtime = t.tick();
// let ntime = rtime as f64 / rstats.total_time;
// println!("\tRendered scene in {:.3}s", rtime);
// println!(
// "\t\tTrace: {:.3}s",
// ntime * rstats.trace_time
// );
// println!("\t\t\tRays traced: {}", rstats.ray_count);
// println!(
// "\t\t\tRays/sec: {}",
// (rstats.ray_count as f64 / (ntime * rstats.trace_time) as f64) as u64
// );
// println!("\t\t\tRay/node tests: {}", rstats.accel_node_visits);
// println!(
// "\t\tInitial ray generation: {:.3}s",
// ntime * rstats.initial_ray_generation_time
// );
// println!(
// "\t\tRay generation: {:.3}s",
// ntime * rstats.ray_generation_time
// );
// println!(
// "\t\tSample writing: {:.3}s",
// ntime * rstats.sample_writing_time
// );
// }
// // Write to disk
// if !args.is_present("serialized_output") {
// println!("Writing image to disk into '{}'...", r.output_file);
// if r.output_file.ends_with(".png") {
// image
// .write_png(Path::new(&r.output_file))
// .expect("Failed to write png...");
// } else if r.output_file.ends_with(".exr") {
// image.write_exr(Path::new(&r.output_file));
// } else {
// panic!("Unknown output file extension.");
// }
// println!("\tWrote image in {:.3}s", t.tick());
// }
// // Print memory stats if stats are wanted.
// if args.is_present("stats") {
// let arena_stats = arena.stats(); // let arena_stats = arena.stats();
// let mib_occupied = arena_stats.0 as f64 / 1_048_576.0; // let mib_occupied = arena_stats.0 as f64 / 1_048_576.0;
// let mib_allocated = arena_stats.1 as f64 / 1_048_576.0; // let mib_allocated = arena_stats.1 as f64 / 1_048_576.0;
@ -375,21 +350,7 @@ fn main() {
// } // }
// println!("\tTotal blocks: {}", arena_stats.2); // println!("\tTotal blocks: {}", arena_stats.2);
// }
} }
Ok(Event::EOF) => {
break;
}
Err(e) => {
println!("Error: {:?}", e);
break;
}
Ok(_) => {
println!("Error: invalid scene in psy file.");
break;
} }
} }
} }

View File

@ -4,73 +4,16 @@ use std::f32;
pub use math3d::{cross, dot, CrossProduct, DotProduct, Normal, Point, Transform, Vector}; pub use math3d::{cross, dot, CrossProduct, DotProduct, Normal, Point, Transform, Vector};
/// Clamps a value between a min and max.
pub fn clamp<T: PartialOrd>(v: T, lower: T, upper: T) -> T {
if v < lower {
lower
} else if v > upper {
upper
} else {
v
}
}
// The stdlib min function is slower than a simple if statement for some reason.
pub fn fast_minf32(a: f32, b: f32) -> f32 {
if a < b {
a
} else {
b
}
}
// The stdlib max function is slower than a simple if statement for some reason.
pub fn fast_maxf32(a: f32, b: f32) -> f32 {
if a > b {
a
} else {
b
}
}
/// Rounds an integer up to the next power of two.
pub fn upper_power_of_two(mut v: u32) -> u32 {
v -= 1;
v |= v >> 1;
v |= v >> 2;
v |= v >> 4;
v |= v >> 8;
v |= v >> 16;
v + 1
}
/// Gets the log base 2 of the given integer /// Gets the log base 2 of the given integer
pub fn log2_64(mut value: u64) -> u64 { pub fn log2_64(n: u64) -> u64 {
// This works by doing a binary search for the largest non-zero binary // This works by finding the largest non-zero binary digit in the
// digit in the number. Its bit position is then the log2 of the integer. // number. Its bit position is then the log2 of the integer.
let mut log = 0; if n == 0 {
0
const POWERS: [(u64, u64); 6] = [
(32, (1 << 32) - 1),
(16, (1 << 16) - 1),
(8, (1 << 8) - 1),
(4, (1 << 4) - 1),
(2, (1 << 2) - 1),
(1, (1 << 1) - 1),
];
for &(i, j) in &POWERS {
let tmp = value >> i;
if tmp != 0 {
log += i;
value = tmp;
} else { } else {
value &= j; (63 - n.leading_zeros()) as u64
} }
}
log
} }
/// Creates a coordinate system from a single vector. /// Creates a coordinate system from a single vector.

116
src/parse/basics.rs Normal file
View File

@ -0,0 +1,116 @@
//! Some basic nom parsers
#![allow(dead_code)]
use std::str::{self, FromStr};
use nom::{
character::complete::{digit1, multispace0, one_of},
combinator::{map_res, opt, recognize},
number::complete::float,
sequence::{delimited, tuple},
IResult,
};
// ========================================================
pub fn ws_f32(input: &str) -> IResult<&str, f32, ()> {
delimited(multispace0, float, multispace0)(input)
}
pub fn ws_u32(input: &str) -> IResult<&str, u32, ()> {
map_res(delimited(multispace0, digit1, multispace0), u32::from_str)(input)
}
pub fn ws_usize(input: &str) -> IResult<&str, usize, ()> {
map_res(delimited(multispace0, digit1, multispace0), usize::from_str)(input)
}
pub fn ws_i32(input: &str) -> IResult<&str, i32, ()> {
map_res(
delimited(
multispace0,
recognize(tuple((opt(one_of("-")), digit1))),
multispace0,
),
i32::from_str,
)(input)
}
// ========================================================
#[cfg(test)]
mod test {
use super::*;
use nom::{combinator::all_consuming, sequence::tuple};
#[test]
fn ws_u32_1() {
assert_eq!(ws_u32("42"), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_u32("42 "), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_usize_1() {
assert_eq!(ws_usize("42"), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_usize("42 "), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_i32_1() {
assert_eq!(ws_i32("42"), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_i32("42 "), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_i32_2() {
assert_eq!(ws_i32("-42"), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42"), Ok((&""[..], -42)));
assert_eq!(ws_i32("-42 "), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42"), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42 53"), Ok((&"53"[..], -42)));
assert_eq!(ws_i32("--42").is_err(), true);
}
#[test]
fn ws_f32_1() {
assert_eq!(ws_f32("42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32("42 "), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42 53"), Ok((&"53"[..], 42.0)));
}
#[test]
fn ws_f32_2() {
assert_eq!(ws_f32("42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32("42.5 "), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5 53"), Ok((&"53"[..], 42.5)));
}
#[test]
fn ws_f32_3() {
assert_eq!(ws_f32("-42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32("-42.5 "), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5 53"), Ok((&"53"[..], -42.5)));
}
#[test]
fn ws_f32_4() {
assert_eq!(ws_f32("a1.0").is_err(), true);
assert_eq!(all_consuming(ws_f32)("0abc").is_err(), true);
assert_eq!(tuple((ws_f32, ws_f32))("0.abc 1.2").is_err(), true);
}
}

742
src/parse/data_tree.rs Normal file
View File

@ -0,0 +1,742 @@
#![allow(dead_code)]
use std::{iter::Iterator, result::Result, slice};
#[derive(Debug, Eq, PartialEq)]
pub enum DataTree<'a> {
Internal {
type_name: &'a str,
ident: Option<&'a str>,
children: Vec<DataTree<'a>>,
byte_offset: usize,
},
Leaf {
type_name: &'a str,
contents: &'a str,
byte_offset: usize,
},
}
impl<'a> DataTree<'a> {
pub fn from_str(source_text: &'a str) -> Result<DataTree<'a>, ParseError> {
let mut items = Vec::new();
let mut remaining_text = (0, source_text);
while let Some((item, text)) = parse_node(remaining_text)? {
remaining_text = text;
items.push(item);
}
remaining_text = skip_ws_and_comments(remaining_text);
if remaining_text.1.is_empty() {
return Ok(DataTree::Internal {
type_name: "ROOT",
ident: None,
children: items,
byte_offset: 0,
});
} else {
// If the whole text wasn't parsed, something went wrong.
return Err(ParseError::Other((0, "Failed to parse the entire string.")));
}
}
pub fn type_name(&'a self) -> &'a str {
match *self {
DataTree::Internal { type_name, .. } | DataTree::Leaf { type_name, .. } => type_name,
}
}
pub fn byte_offset(&'a self) -> usize {
match *self {
DataTree::Internal { byte_offset, .. } | DataTree::Leaf { byte_offset, .. } => {
byte_offset
}
}
}
pub fn is_internal(&self) -> bool {
match *self {
DataTree::Internal { .. } => true,
DataTree::Leaf { .. } => false,
}
}
pub fn is_leaf(&self) -> bool {
match *self {
DataTree::Internal { .. } => false,
DataTree::Leaf { .. } => true,
}
}
pub fn leaf_contents(&'a self) -> Option<&'a str> {
match *self {
DataTree::Internal { .. } => None,
DataTree::Leaf { contents, .. } => Some(contents),
}
}
pub fn iter_children(&'a self) -> slice::Iter<'a, DataTree<'a>> {
if let DataTree::Internal { ref children, .. } = *self {
children.iter()
} else {
[].iter()
}
}
pub fn iter_children_with_type(&'a self, type_name: &'static str) -> DataTreeFilterIter<'a> {
if let DataTree::Internal { ref children, .. } = *self {
DataTreeFilterIter {
type_name: type_name,
iter: children.iter(),
}
} else {
DataTreeFilterIter {
type_name: type_name,
iter: [].iter(),
}
}
}
pub fn iter_internal_children_with_type(
&'a self,
type_name: &'static str,
) -> DataTreeFilterInternalIter<'a> {
if let DataTree::Internal { ref children, .. } = *self {
DataTreeFilterInternalIter {
type_name: type_name,
iter: children.iter(),
}
} else {
DataTreeFilterInternalIter {
type_name: type_name,
iter: [].iter(),
}
}
}
pub fn iter_leaf_children_with_type(
&'a self,
type_name: &'static str,
) -> DataTreeFilterLeafIter<'a> {
if let DataTree::Internal { ref children, .. } = *self {
DataTreeFilterLeafIter {
type_name: type_name,
iter: children.iter(),
}
} else {
DataTreeFilterLeafIter {
type_name: type_name,
iter: [].iter(),
}
}
}
// For unit tests
fn internal_data_or_panic(&'a self) -> (&'a str, Option<&'a str>, &'a Vec<DataTree<'a>>) {
if let DataTree::Internal {
type_name,
ident,
ref children,
..
} = *self
{
(type_name, ident, children)
} else {
panic!("Expected DataTree::Internal, found DataTree::Leaf")
}
}
fn leaf_data_or_panic(&'a self) -> (&'a str, &'a str) {
if let DataTree::Leaf {
type_name,
contents,
..
} = *self
{
(type_name, contents)
} else {
panic!("Expected DataTree::Leaf, found DataTree::Internal")
}
}
}
/// An iterator over the children of a `DataTree` node that filters out the
/// children not matching a specified type name.
pub struct DataTreeFilterIter<'a> {
type_name: &'a str,
iter: slice::Iter<'a, DataTree<'a>>,
}
impl<'a> Iterator for DataTreeFilterIter<'a> {
type Item = &'a DataTree<'a>;
fn next(&mut self) -> Option<&'a DataTree<'a>> {
loop {
if let Some(dt) = self.iter.next() {
if dt.type_name() == self.type_name {
return Some(dt);
} else {
continue;
}
} else {
return None;
}
}
}
}
/// An iterator over the children of a `DataTree` node that filters out the
/// children that aren't internal nodes and that don't match a specified
/// type name.
pub struct DataTreeFilterInternalIter<'a> {
type_name: &'a str,
iter: slice::Iter<'a, DataTree<'a>>,
}
impl<'a> Iterator for DataTreeFilterInternalIter<'a> {
type Item = (&'a str, Option<&'a str>, &'a Vec<DataTree<'a>>, usize);
fn next(&mut self) -> Option<(&'a str, Option<&'a str>, &'a Vec<DataTree<'a>>, usize)> {
loop {
match self.iter.next() {
Some(&DataTree::Internal {
type_name,
ident,
ref children,
byte_offset,
}) => {
if type_name == self.type_name {
return Some((type_name, ident, children, byte_offset));
} else {
continue;
}
}
Some(&DataTree::Leaf { .. }) => {
continue;
}
None => {
return None;
}
}
}
}
}
/// An iterator over the children of a `DataTree` node that filters out the
/// children that aren't internal nodes and that don't match a specified
/// type name.
pub struct DataTreeFilterLeafIter<'a> {
type_name: &'a str,
iter: slice::Iter<'a, DataTree<'a>>,
}
impl<'a> Iterator for DataTreeFilterLeafIter<'a> {
type Item = (&'a str, &'a str, usize);
fn next(&mut self) -> Option<(&'a str, &'a str, usize)> {
loop {
match self.iter.next() {
Some(&DataTree::Internal { .. }) => {
continue;
}
Some(&DataTree::Leaf {
type_name,
contents,
byte_offset,
}) => {
if type_name == self.type_name {
return Some((type_name, contents, byte_offset));
} else {
continue;
}
}
None => {
return None;
}
}
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ParseError {
MissingOpener(usize),
MissingOpenInternal(usize),
MissingCloseInternal(usize),
MissingOpenLeaf(usize),
MissingCloseLeaf(usize),
MissingTypeName(usize),
UnexpectedIdent(usize),
UnknownToken(usize),
Other((usize, &'static str)),
}
// ================================================================
#[derive(Debug, PartialEq, Eq)]
enum Token<'a> {
OpenInner,
CloseInner,
OpenLeaf,
CloseLeaf,
TypeName(&'a str),
Ident(&'a str),
End,
Unknown,
}
type ParseResult<'a> = Result<Option<(DataTree<'a>, (usize, &'a str))>, ParseError>;
fn parse_node<'a>(source_text: (usize, &'a str)) -> ParseResult<'a> {
let (token, text1) = next_token(source_text);
if let Token::TypeName(type_name) = token {
match next_token(text1) {
// Internal with name
(Token::Ident(n), text2) => {
if let (Token::OpenInner, text3) = next_token(text2) {
let mut children = Vec::new();
let mut text_remaining = text3;
while let Some((node, text4)) = parse_node(text_remaining)? {
text_remaining = text4;
children.push(node);
}
if let (Token::CloseInner, text4) = next_token(text_remaining) {
return Ok(Some((
DataTree::Internal {
type_name: type_name,
ident: Some(n),
children: children,
byte_offset: text1.0,
},
text4,
)));
} else {
return Err(ParseError::MissingCloseInternal(text_remaining.0));
}
} else {
return Err(ParseError::MissingOpenInternal(text2.0));
}
}
// Internal without name
(Token::OpenInner, text2) => {
let mut children = Vec::new();
let mut text_remaining = text2;
while let Some((node, text3)) = parse_node(text_remaining)? {
text_remaining = text3;
children.push(node);
}
if let (Token::CloseInner, text3) = next_token(text_remaining) {
return Ok(Some((
DataTree::Internal {
type_name: type_name,
ident: None,
children: children,
byte_offset: text1.0,
},
text3,
)));
} else {
return Err(ParseError::MissingCloseInternal(text_remaining.0));
}
}
// Leaf
(Token::OpenLeaf, text2) => {
let (contents, text3) = parse_leaf_content(text2);
if let (Token::CloseLeaf, text4) = next_token(text3) {
return Ok(Some((
DataTree::Leaf {
type_name: type_name,
contents: contents,
byte_offset: text1.0,
},
text4,
)));
} else {
return Err(ParseError::MissingCloseLeaf(text3.0));
}
}
// Other
_ => {
return Err(ParseError::MissingOpener(text1.0));
}
}
} else {
return Ok(None);
}
}
fn parse_leaf_content(source_text: (usize, &str)) -> (&str, (usize, &str)) {
let mut si = 1;
let mut escaped = false;
let mut reached_end = true;
for (i, c) in source_text.1.char_indices() {
si = i;
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if c == ']' {
reached_end = false;
break;
}
}
if reached_end {
si = source_text.1.len();
}
return (
&source_text.1[0..si],
(source_text.0 + si, &source_text.1[si..]),
);
}
fn next_token<'a>(source_text: (usize, &'a str)) -> (Token<'a>, (usize, &'a str)) {
let text1 = skip_ws_and_comments(source_text);
if let Some(c) = text1.1.chars().nth(0) {
let text2 = (text1.0 + c.len_utf8(), &text1.1[c.len_utf8()..]);
match c {
'{' => {
return (Token::OpenInner, text2);
}
'}' => {
return (Token::CloseInner, text2);
}
'[' => {
return (Token::OpenLeaf, text2);
}
']' => {
return (Token::CloseLeaf, text2);
}
'$' => {
// Parse name
let mut si = 1;
let mut escaped = false;
let mut reached_end = true;
for (i, c) in text1.1.char_indices().skip(1) {
si = i;
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if !is_ident_char(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text1.1.len();
}
return (
Token::Ident(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]),
);
}
_ => {
if is_ident_char(c) {
// Parse type
let mut si = 0;
let mut reached_end = true;
for (i, c) in text1.1.char_indices() {
si = i;
if !is_ident_char(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text1.1.len();
}
return (
Token::TypeName(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]),
);
}
}
}
} else {
return (Token::End, text1);
}
return (Token::Unknown, text1);
}
fn is_ws(c: char) -> bool {
match c {
'\n' | '\r' | '\t' | ' ' => true,
_ => false,
}
}
fn is_nl(c: char) -> bool {
match c {
'\n' | '\r' => true,
_ => false,
}
}
fn is_reserved_char(c: char) -> bool {
match c {
'{' | '}' | '[' | ']' | '$' | '#' | '\\' => true,
_ => false,
}
}
fn is_ident_char(c: char) -> bool {
// Anything that isn't whitespace or a reserved character
!is_ws(c) && !is_reserved_char(c)
}
fn skip_ws(text: &str) -> &str {
let mut si = 0;
let mut reached_end = true;
for (i, c) in text.char_indices() {
si = i;
if !is_ws(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text.len();
}
return &text[si..];
}
fn skip_comment(text: &str) -> &str {
let mut si = 0;
if Some('#') == text.chars().nth(0) {
let mut reached_end = true;
for (i, c) in text.char_indices() {
si = i;
if is_nl(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text.len();
}
}
return &text[si..];
}
fn skip_ws_and_comments(text: (usize, &str)) -> (usize, &str) {
let mut remaining_text = text.1;
loop {
let tmp = skip_comment(skip_ws(remaining_text));
if tmp.len() == remaining_text.len() {
break;
} else {
remaining_text = tmp;
}
}
let offset = text.0 + text.1.len() - remaining_text.len();
return (offset, remaining_text);
}
// ================================================================
#[cfg(test)]
mod tests {
use super::*;
use super::{next_token, Token};
#[test]
fn tokenize_1() {
let input = (0, "Thing");
assert_eq!(next_token(input), (Token::TypeName("Thing"), (5, "")));
}
#[test]
fn tokenize_2() {
let input = (0, " \n# gdfgdf gfdg dggdf\\sg dfgsd \n Thing");
assert_eq!(next_token(input), (Token::TypeName("Thing"), (41, "")));
}
#[test]
fn tokenize_3() {
let input1 = (0, " Thing { }");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
assert_eq!((token1, input2.1), (Token::TypeName("Thing"), " { }"));
assert_eq!((token2, input3.1), (Token::OpenInner, " }"));
assert_eq!((token3, input4.1), (Token::CloseInner, ""));
}
#[test]
fn tokenize_4() {
let input = (0, " $hi_there ");
assert_eq!(next_token(input), (Token::Ident("$hi_there"), (10, " ")));
}
#[test]
fn tokenize_5() {
let input = (0, " $hi\\ t\\#he\\[re ");
assert_eq!(
next_token(input),
(Token::Ident("$hi\\ t\\#he\\[re"), (15, " "),)
);
}
#[test]
fn tokenize_6() {
let input1 = (0, " $hi the[re");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5);
assert_eq!((token1, input2), (Token::Ident("$hi"), (4, " the[re")));
assert_eq!((token2, input3), (Token::TypeName("the"), (8, "[re")));
assert_eq!((token3, input4), (Token::OpenLeaf, (9, "re")));
assert_eq!((token4, input5), (Token::TypeName("re"), (11, "")));
assert_eq!((token5, input6), (Token::End, (11, "")));
}
#[test]
fn tokenize_7() {
let input1 = (0, "Thing $yar { # A comment\n\tThing2 []\n}");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5);
let (token6, input7) = next_token(input6);
let (token7, input8) = next_token(input7);
let (token8, input9) = next_token(input8);
assert_eq!(
(token1, input2),
(
Token::TypeName("Thing"),
(5, " $yar { # A comment\n\tThing2 []\n}",)
)
);
assert_eq!(
(token2, input3),
(
Token::Ident("$yar"),
(10, " { # A comment\n\tThing2 []\n}",)
)
);
assert_eq!(
(token3, input4),
(Token::OpenInner, (12, " # A comment\n\tThing2 []\n}",))
);
assert_eq!(
(token4, input5),
(Token::TypeName("Thing2"), (32, " []\n}"))
);
assert_eq!((token5, input6), (Token::OpenLeaf, (34, "]\n}")));
assert_eq!((token6, input7), (Token::CloseLeaf, (35, "\n}")));
assert_eq!((token7, input8), (Token::CloseInner, (37, "")));
assert_eq!((token8, input9), (Token::End, (37, "")));
}
#[test]
fn parse_1() {
let input = r#"
Thing {}
"#;
let dt = DataTree::from_str(input).unwrap();
// Root
let (t, i, c) = dt.internal_data_or_panic();
assert_eq!(t, "ROOT");
assert_eq!(i, None);
assert_eq!(c.len(), 1);
// First (and only) child
let (t, i, c) = c[0].internal_data_or_panic();
assert_eq!(t, "Thing");
assert_eq!(i, None);
assert_eq!(c.len(), 0);
}
#[test]
fn iter_1() {
let dt = DataTree::from_str(
r#"
A {}
B {}
A []
A {}
B {}
"#,
)
.unwrap();
let i = dt.iter_children_with_type("A");
assert_eq!(i.count(), 3);
}
#[test]
fn iter_2() {
let dt = DataTree::from_str(
r#"
A {}
B {}
A []
A {}
B {}
"#,
)
.unwrap();
let i = dt.iter_internal_children_with_type("A");
assert_eq!(i.count(), 2);
}
#[test]
fn iter_3() {
let dt = DataTree::from_str(
r#"
A []
B {}
A {}
A []
B {}
"#,
)
.unwrap();
let i = dt.iter_leaf_children_with_type("A");
assert_eq!(i.count(), 2);
}
}

View File

@ -1,8 +1,9 @@
mod parse_utils; pub mod basics;
mod data_tree;
mod psy; mod psy;
mod psy_assembly; mod psy_assembly;
mod psy_light; mod psy_light;
mod psy_mesh_surface; mod psy_mesh_surface;
mod psy_surface_shader; mod psy_surface_shader;
pub use self::psy::parse_scene; pub use self::{data_tree::DataTree, psy::parse_scene};

View File

@ -1,353 +0,0 @@
//! Some basic nom parsers
#![allow(dead_code)]
use std::{
io::BufRead,
str::{self, FromStr},
};
use nom::{
character::complete::{digit1, multispace0, one_of},
combinator::{map_res, opt, recognize},
number::complete::float,
sequence::{delimited, tuple},
IResult,
};
use data_tree::{DataTreeReader, Event};
use super::psy::{PsyError, PsyResult};
// ========================================================
pub fn ws_f32(input: &str) -> IResult<&str, f32, ()> {
delimited(multispace0, float, multispace0)(input)
}
pub fn ws_u32(input: &str) -> IResult<&str, u32, ()> {
map_res(delimited(multispace0, digit1, multispace0), u32::from_str)(input)
}
pub fn ws_usize(input: &str) -> IResult<&str, usize, ()> {
map_res(delimited(multispace0, digit1, multispace0), usize::from_str)(input)
}
pub fn ws_i32(input: &str) -> IResult<&str, i32, ()> {
map_res(
delimited(
multispace0,
recognize(tuple((opt(one_of("-")), digit1))),
multispace0,
),
i32::from_str,
)(input)
}
//---------------------------------------------------------
/// Ensures that we encounter a InnerClose event, and returns a useful
/// error if we don't.
pub fn ensure_close(events: &mut DataTreeReader<impl BufRead>) -> PsyResult<()> {
match events.next_event()? {
Event::InnerClose { .. } => Ok(()),
Event::InnerOpen {
type_name,
byte_offset,
..
} => Err(PsyError::ExpectedInternalNodeClose(
byte_offset,
format!(
"Expected the node to be closed, but instead found a '{}'.",
type_name
),
)),
Event::Leaf {
type_name,
byte_offset,
..
} => Err(PsyError::ExpectedInternalNodeClose(
byte_offset,
format!(
"Expected the node to be closed, but instead found a '{}'.",
type_name
),
)),
_ => Err(PsyError::UnknownError(events.byte_offset())),
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Range {
Full,
From(usize),
To(usize),
Range(usize, usize),
}
impl Range {
pub fn contains(self, n: usize) -> bool {
match self {
Range::Full => true,
Range::From(start) => n >= start,
Range::To(end) => n < end,
Range::Range(start, end) => n >= start && n < end,
}
}
/// Checks if the value is within the upper bound of the range.
/// Ignores any lower bound.
pub fn contains_upper(self, n: usize) -> bool {
match self {
Range::Full | Range::From(_) => true,
Range::To(end) => n < end,
Range::Range(_, end) => n < end,
}
}
pub fn lower(self) -> usize {
match self {
Range::Full => 0,
Range::From(start) => start,
Range::To(_) => 0,
Range::Range(start, _) => start,
}
}
pub fn upper(self) -> usize {
match self {
Range::Full => std::usize::MAX,
Range::From(_) => std::usize::MAX,
Range::To(end) => end,
Range::Range(_, end) => end,
}
}
}
impl std::convert::From<std::ops::RangeFull> for Range {
fn from(_r: std::ops::RangeFull) -> Self {
Range::Full
}
}
impl std::convert::From<std::ops::RangeFrom<usize>> for Range {
fn from(r: std::ops::RangeFrom<usize>) -> Self {
Range::From(r.start)
}
}
impl std::convert::From<std::ops::RangeTo<usize>> for Range {
fn from(r: std::ops::RangeTo<usize>) -> Self {
Range::To(r.end)
}
}
impl std::convert::From<std::ops::Range<usize>> for Range {
fn from(r: std::ops::Range<usize>) -> Self {
Range::Range(r.start, r.end)
}
}
impl std::convert::From<usize> for Range {
fn from(r: usize) -> Self {
Range::Range(r, r + 1)
}
}
/// Acts as an intermediary for parsing, ensuring that the right number of the
/// right subsections are encountered. It loops over subsections, passing
/// through the `events` object untouched, so the passed closure needs to call
/// `next_event`.
///
/// Tracks a maximum of 64 different subsections.
pub fn ensure_subsections<F, DTR: BufRead>(
events: &mut DataTreeReader<DTR>,
subsections: &[(&str, bool, Range)], // (type name, is leaf, valid count range)
mut f: F,
) -> PsyResult<()>
where
F: FnMut(&mut DataTreeReader<DTR>) -> PsyResult<()>,
{
let mut counts = [0usize; 64];
// Loop through our events!
loop {
match events.peek_event()? {
Event::Leaf {
type_name,
byte_offset,
..
} => {
if let Some(idx) = subsections
.iter()
.position(|(n, l, _)| *l == true && n == &type_name)
{
// Increment count and make sure we're within the valid count
// range for this sub-sections.
counts[idx] += 1;
if !subsections[idx].2.contains_upper(counts[idx]) {
return Err(PsyError::WrongNodeCount(
byte_offset,
format!(
"Expected at most {} '{}' leaf nodes but found \
at least {}.",
subsections[idx].2.upper() - 1,
subsections[idx].0,
counts[idx],
),
));
}
// Call handler.
f(events)?
} else {
break;
}
}
Event::InnerOpen {
type_name,
byte_offset,
..
} => {
if let Some(idx) = subsections
.iter()
.position(|(n, l, _)| *l == false && n == &type_name)
{
// Increment count and make sure we're within the valid count
// range for this sub-sections.
counts[idx] += 1;
if !subsections[idx].2.contains_upper(counts[idx]) {
return Err(PsyError::WrongNodeCount(
byte_offset,
format!(
"Expected at most {} internal '{}' node(s) but \
found at least {}.",
subsections[idx].2.upper() - 1,
subsections[idx].0,
counts[idx],
),
));
}
// Call handler.
f(events)?
} else {
break;
}
}
Event::InnerClose { .. } => {
break;
}
Event::EOF => {
break;
}
}
}
// Validation.
for i in 0..subsections.len() {
if !subsections[i].2.contains(counts[i]) {
if subsections[i].1 {
return Err(PsyError::WrongNodeCount(
events.byte_offset(),
format!(
"Expected at least {} '{}' leaf node(s) but only found {}.",
subsections[i].2.lower(),
subsections[i].0,
counts[i],
),
));
} else {
return Err(PsyError::WrongNodeCount(
events.byte_offset(),
format!(
"Expected at least {} internal '{}' node(s) but only found {}.",
subsections[i].2.lower(),
subsections[i].0,
counts[i],
),
));
}
}
}
Ok(())
}
// ========================================================
#[cfg(test)]
mod test {
use super::*;
use nom::{combinator::all_consuming, sequence::tuple};
#[test]
fn ws_u32_1() {
assert_eq!(ws_u32("42"), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_u32("42 "), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_u32(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_usize_1() {
assert_eq!(ws_usize("42"), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_usize("42 "), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_usize(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_i32_1() {
assert_eq!(ws_i32("42"), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_i32("42 "), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42"), Ok((&""[..], 42)));
assert_eq!(ws_i32(" 42 53"), Ok((&"53"[..], 42)));
}
#[test]
fn ws_i32_2() {
assert_eq!(ws_i32("-42"), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42"), Ok((&""[..], -42)));
assert_eq!(ws_i32("-42 "), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42"), Ok((&""[..], -42)));
assert_eq!(ws_i32(" -42 53"), Ok((&"53"[..], -42)));
assert_eq!(ws_i32("--42").is_err(), true);
}
#[test]
fn ws_f32_1() {
assert_eq!(ws_f32("42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32("42 "), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42"), Ok((&""[..], 42.0)));
assert_eq!(ws_f32(" 42 53"), Ok((&"53"[..], 42.0)));
}
#[test]
fn ws_f32_2() {
assert_eq!(ws_f32("42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32("42.5 "), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5"), Ok((&""[..], 42.5)));
assert_eq!(ws_f32(" 42.5 53"), Ok((&"53"[..], 42.5)));
}
#[test]
fn ws_f32_3() {
assert_eq!(ws_f32("-42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32("-42.5 "), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5"), Ok((&""[..], -42.5)));
assert_eq!(ws_f32(" -42.5 53"), Ok((&"53"[..], -42.5)));
}
#[test]
fn ws_f32_4() {
assert_eq!(ws_f32("a1.0").is_err(), true);
assert_eq!(all_consuming(ws_f32)("0abc").is_err(), true);
assert_eq!(tuple((ws_f32, ws_f32))("0.abc 1.2").is_err(), true);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,151 +1,188 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::{io::BufRead, result::Result}; use std::result::Result;
use kioku::Arena; use kioku::Arena;
use data_tree::{DataTreeReader, Event}; use crate::scene::{Assembly, AssemblyBuilder, Object};
use crate::scene::{Assembly, Object, ObjectData};
use super::{ use super::{
parse_utils::{ensure_close, ensure_subsections}, psy::{parse_matrix, PsyParseError},
psy::{parse_matrix, PsyError, PsyResult},
psy_light::{parse_rectangle_light, parse_sphere_light}, psy_light::{parse_rectangle_light, parse_sphere_light},
psy_mesh_surface::parse_mesh_surface, psy_mesh_surface::parse_mesh_surface,
psy_surface_shader::parse_surface_shader,
DataTree,
}; };
pub fn parse_assembly<'a>( pub fn parse_assembly<'a>(
arena: &'a Arena, arena: &'a Arena,
events: &mut DataTreeReader<impl BufRead>, tree: &'a DataTree,
) -> PsyResult<Assembly<'a>> { ) -> Result<Assembly<'a>, PsyParseError> {
let mut assembly = Assembly::new(); let mut builder = AssemblyBuilder::new(arena);
ensure_subsections(events, &[("Object", false, (1..).into())], |events| {
if let Event::InnerOpen { if tree.is_internal() {
type_name: "Object", for child in tree.iter_children() {
ident, match child.type_name() {
byte_offset, // Sub-Assembly
} = events.next_event()? "Assembly" => {
if let DataTree::Internal {
ident: Some(ident), ..
} = *child
{ {
// Get object identifier. builder.add_assembly(ident, parse_assembly(arena, child)?);
let object_ident = if let Some(id) = ident {
id.to_string()
} else { } else {
return Err(PsyError::ExpectedIdent( return Err(PsyParseError::UnknownError(child.byte_offset()));
byte_offset, }
"\'Object\' types must have an identifier, but the identifier is missing." }
.into(),
)); // Instance
"Instance" => {
// Pre-conditions
if !child.is_internal() {
return Err(PsyParseError::UnknownError(child.byte_offset()));
}
// Get data name
let name = {
if child.iter_leaf_children_with_type("Data").count() != 1 {
return Err(PsyParseError::UnknownError(child.byte_offset()));
}
child.iter_leaf_children_with_type("Data").nth(0).unwrap().1
}; };
// Collect instances. // Get surface shader binding, if any.
let mut instance_xform_idxs = Vec::new(); let surface_shader_name = if child
while let Event::InnerOpen { .iter_leaf_children_with_type("SurfaceShaderBind")
type_name: "Instance", .count()
.. > 0
} = events.peek_event()?
{ {
events.next_event()?; Some(
let xforms_start_idx = assembly.xforms.len(); child
loop { .iter_leaf_children_with_type("SurfaceShaderBind")
match events.next_event()? { .nth(0)
Event::Leaf { .unwrap()
type_name: "Transform", .1,
contents, )
.. } else {
} => { None
assembly.xforms.push(parse_matrix(contents)?);
}
Event::InnerClose { .. } => {
break;
}
_ => {
todo!("Instances can only contain Transforms.");
// Return an error.
}
}
}
instance_xform_idxs.push(xforms_start_idx..assembly.xforms.len());
}
// Get object data.
let object_data = match events.next_event()? {
Event::InnerOpen {
type_name: "MeshSurface",
..
} => ObjectData::Surface(Box::new(parse_mesh_surface(
arena,
&instance_xform_idxs[..],
&assembly.xforms[..],
events,
)?)),
Event::InnerOpen {
type_name: "SphereLight",
..
} => ObjectData::Light(Box::new(parse_sphere_light(arena, events)?)),
Event::InnerOpen {
type_name: "RectangleLight",
..
} => ObjectData::Light(Box::new(parse_rectangle_light(arena, events)?)),
Event::InnerClose { byte_offset } => {
return Err(PsyError::MissingNode(
byte_offset,
"Object contains no object data.".into(),
));
}
Event::InnerOpen {
type_name,
byte_offset,
..
} => {
return Err(PsyError::UnknownVariant(
byte_offset,
format!("Unknown data type '{}' for Object.", type_name),
));
}
_ => {
return Err(PsyError::UnknownError(byte_offset));
}
}; };
// Close object node. // Get xforms
ensure_close(events)?; let mut xforms = Vec::new();
for (_, contents, _) in child.iter_leaf_children_with_type("Transform") {
xforms.push(parse_matrix(contents)?);
}
assembly.objects.insert( // Add instance
object_ident, if builder.name_exists(name) {
Object { builder.add_instance(name, surface_shader_name, Some(&xforms));
data: object_data, } else {
instance_xform_idxs: instance_xform_idxs, return Err(PsyParseError::InstancedMissingData(
}, child.iter_leaf_children_with_type("Data").nth(0).unwrap().2,
"Attempted to add \
instance for data with \
a name that doesn't \
exist.",
name.to_string(),
));
}
}
// SurfaceShader
"SurfaceShader" => {
if let DataTree::Internal {
ident: Some(ident), ..
} = *child
{
builder.add_surface_shader(ident, parse_surface_shader(arena, child)?);
} else {
// TODO: error condition of some kind, because no ident
panic!(
"SurfaceShader encountered that was a leaf, but SurfaceShaders cannot \
be a leaf: {}",
child.byte_offset()
);
}
}
// MeshSurface
"MeshSurface" => {
if let DataTree::Internal {
ident: Some(ident), ..
} = *child
{
builder.add_object(
ident,
Object::Surface(arena.alloc(parse_mesh_surface(arena, child)?)),
); );
} else { } else {
unreachable!() // TODO: error condition of some kind, because no ident
panic!(
"MeshSurface encountered that was a leaf, but MeshSurfaces cannot \
be a leaf: {}",
child.byte_offset()
);
}
} }
Ok(()) // Sphere Light
})?; "SphereLight" => {
if let DataTree::Internal {
ident: Some(ident), ..
} = *child
{
builder.add_object(
ident,
Object::SurfaceLight(arena.alloc(parse_sphere_light(arena, child)?)),
);
} else {
// No ident
return Err(PsyParseError::UnknownError(child.byte_offset()));
}
}
ensure_close(events)?; // Rectangle Light
"RectangleLight" => {
if let DataTree::Internal {
ident: Some(ident), ..
} = *child
{
builder.add_object(
ident,
Object::SurfaceLight(arena.alloc(parse_rectangle_light(arena, child)?)),
);
} else {
// No ident
return Err(PsyParseError::UnknownError(child.byte_offset()));
}
}
// if !tree.is_internal() { _ => {
// return Err(PsyError::UnknownError(tree.byte_offset())); // TODO: some kind of error, because not a known type name
} // // Bilinear Patch
// "BilinearPatch" => {
// assembly->add_object(child.name, parse_bilinear_patch(child));
// } // }
//
// for object in tree.iter_children() { // // Bicubic Patch
// if object.type_name() == "Object" { // else if (child.type == "BicubicPatch") {
// assembly->add_object(child.name, parse_bicubic_patch(child));
// } else {
// // TODO: error.
// } // }
//
// // Subdivision surface
// else if (child.type == "SubdivisionSurface") {
// assembly->add_object(child.name, parse_subdivision_surface(child));
// } // }
//
// // Sphere
// else if (child.type == "Sphere") {
// assembly->add_object(child.name, parse_sphere(child));
// }
}
}
} else {
return Err(PsyParseError::UnknownError(tree.byte_offset()));
}
return Ok(assembly); return Ok(builder.build());
} }

View File

@ -1,198 +1,184 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::{io::BufRead, result::Result}; use std::result::Result;
use nom::{combinator::all_consuming, sequence::tuple, IResult}; use nom::{combinator::all_consuming, sequence::tuple, IResult};
use kioku::Arena; use kioku::Arena;
use data_tree::{DataTreeReader, Event};
use crate::{ use crate::{
light::{DistantDiskLight, RectangleLight, SphereLight}, light::{DistantDiskLight, RectangleLight, SphereLight},
math::Vector, math::Vector,
}; };
use super::{ use super::{
parse_utils::{ensure_close, ensure_subsections, ws_f32}, basics::ws_f32,
psy::{parse_color, PsyError, PsyResult}, psy::{parse_color, PsyParseError},
DataTree,
}; };
pub fn parse_distant_disk_light<'a>( pub fn parse_distant_disk_light<'a>(
arena: &'a Arena, arena: &'a Arena,
events: &mut DataTreeReader<impl BufRead>, tree: &'a DataTree,
_ident: Option<&str>, ) -> Result<DistantDiskLight<'a>, PsyParseError> {
) -> PsyResult<DistantDiskLight<'a>> { if let DataTree::Internal { ref children, .. } = *tree {
let mut radii = Vec::new(); let mut radii = Vec::new();
let mut directions = Vec::new(); let mut directions = Vec::new();
let mut colors = Vec::new(); let mut colors = Vec::new();
// Parse // Parse
let valid_subsections = &[ for child in children.iter() {
("Radius", true, (1..).into()), match *child {
("Direction", true, (1..).into()), // Radius
("Color", true, (1..).into()), DataTree::Leaf {
]; type_name,
ensure_subsections(events, valid_subsections, |events| {
match events.next_event()? {
Event::Leaf {
type_name: "Radius",
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Radius" => {
if let IResult::Ok((_, radius)) = all_consuming(ws_f32)(&contents) { if let IResult::Ok((_, radius)) = all_consuming(ws_f32)(contents) {
radii.push(radius); radii.push(radius);
} else { } else {
// Found radius, but its contents is not in the right format // Found radius, but its contents is not in the right format
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset,
"Radius data isn't in the right format. It should \
contain a single floating point value."
.into(),
));
} }
} }
// Direction // Direction
Event::Leaf { DataTree::Leaf {
type_name: "Direction", type_name,
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Direction" => {
if let IResult::Ok((_, direction)) = if let IResult::Ok((_, direction)) =
all_consuming(tuple((ws_f32, ws_f32, ws_f32)))(&contents) all_consuming(tuple((ws_f32, ws_f32, ws_f32)))(contents)
{ {
directions.push(Vector::new(direction.0, direction.1, direction.2)); directions.push(Vector::new(direction.0, direction.1, direction.2));
} else { } else {
// Found direction, but its contents is not in the right format // Found direction, but its contents is not in the right format
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset,
"Direction data isn't in the right format. It should \
contain a single floating point value."
.into(),
));
} }
} }
// Color // Color
Event::Leaf { DataTree::Leaf {
type_name: "Color", type_name,
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Color" => {
colors.push(parse_color(byte_offset, &contents)?); if let Ok(color) = parse_color(contents) {
colors.push(color);
} else {
// Found color, but its contents is not in the right format
return Err(PsyParseError::UnknownError(byte_offset));
}
} }
_ => unreachable!(), _ => {}
}
} }
Ok(())
})?;
ensure_close(events)?;
return Ok(DistantDiskLight::new(arena, &radii, &directions, &colors)); return Ok(DistantDiskLight::new(arena, &radii, &directions, &colors));
} else {
return Err(PsyParseError::UnknownError(tree.byte_offset()));
}
} }
pub fn parse_sphere_light<'a>( pub fn parse_sphere_light<'a>(
arena: &'a Arena, arena: &'a Arena,
events: &mut DataTreeReader<impl BufRead>, tree: &'a DataTree,
) -> Result<SphereLight<'a>, PsyError> { ) -> Result<SphereLight<'a>, PsyParseError> {
if let DataTree::Internal { ref children, .. } = *tree {
let mut radii = Vec::new(); let mut radii = Vec::new();
let mut colors = Vec::new(); let mut colors = Vec::new();
// Parse // Parse
let valid_subsections = &[ for child in children.iter() {
("Radius", true, (1..).into()), match *child {
("Color", true, (1..).into()), // Radius
]; DataTree::Leaf {
ensure_subsections(events, valid_subsections, |events| { type_name,
match events.next_event()? {
Event::Leaf {
type_name: "Radius",
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Radius" => {
if let IResult::Ok((_, radius)) = all_consuming(ws_f32)(&contents) { if let IResult::Ok((_, radius)) = all_consuming(ws_f32)(contents) {
radii.push(radius); radii.push(radius);
} else { } else {
// Found radius, but its contents is not in the right format // Found radius, but its contents is not in the right format
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset,
"Radius data isn't in the right format. It should \
contain a single floating point value."
.into(),
));
} }
} }
// Color // Color
Event::Leaf { DataTree::Leaf {
type_name: "Color", type_name,
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Color" => {
colors.push(parse_color(byte_offset, &contents)?); if let Ok(color) = parse_color(contents) {
colors.push(color);
} else {
// Found color, but its contents is not in the right format
return Err(PsyParseError::UnknownError(byte_offset));
}
} }
_ => unreachable!(), _ => {}
}
} }
Ok(())
})?;
ensure_close(events)?;
return Ok(SphereLight::new(arena, &radii, &colors)); return Ok(SphereLight::new(arena, &radii, &colors));
} else {
return Err(PsyParseError::UnknownError(tree.byte_offset()));
}
} }
pub fn parse_rectangle_light<'a>( pub fn parse_rectangle_light<'a>(
arena: &'a Arena, arena: &'a Arena,
events: &mut DataTreeReader<impl BufRead>, tree: &'a DataTree,
) -> Result<RectangleLight<'a>, PsyError> { ) -> Result<RectangleLight<'a>, PsyParseError> {
if let DataTree::Internal { ref children, .. } = *tree {
let mut dimensions = Vec::new(); let mut dimensions = Vec::new();
let mut colors = Vec::new(); let mut colors = Vec::new();
// Parse // Parse
let valid_subsections = &[ for child in children.iter() {
("Dimensions", true, (1..).into()), match *child {
("Color", true, (1..).into()),
];
ensure_subsections(events, valid_subsections, |events| {
match events.next_event()? {
// Dimensions // Dimensions
Event::Leaf { DataTree::Leaf {
type_name: "Dimensions", type_name,
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Dimensions" => {
if let IResult::Ok((_, radius)) = all_consuming(tuple((ws_f32, ws_f32)))(&contents) if let IResult::Ok((_, radius)) =
all_consuming(tuple((ws_f32, ws_f32)))(contents)
{ {
dimensions.push(radius); dimensions.push(radius);
} else { } else {
// Found dimensions, but its contents is not in the right format // Found dimensions, but its contents is not in the right format
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset,
"Dimensions data isn't in the right format. It should \
contain two space-separated floating point values."
.into(),
));
} }
} }
// Color // Color
Event::Leaf { DataTree::Leaf {
type_name: "Color", type_name,
contents, contents,
byte_offset, byte_offset,
} => { } if type_name == "Color" => {
colors.push(parse_color(byte_offset, &contents)?); if let Ok(color) = parse_color(contents) {
colors.push(color);
} else {
// Found color, but its contents is not in the right format
return Err(PsyParseError::UnknownError(byte_offset));
}
} }
_ => unreachable!(), _ => {}
}
} }
Ok(())
})?;
ensure_close(events)?;
return Ok(RectangleLight::new(arena, &dimensions, &colors)); return Ok(RectangleLight::new(arena, &dimensions, &colors));
} else {
return Err(PsyParseError::UnknownError(tree.byte_offset()));
}
} }

View File

@ -1,153 +1,72 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::{io::BufRead, result::Result}; use std::result::Result;
use nom::{sequence::tuple, IResult}; use nom::{sequence::tuple, IResult};
use kioku::Arena; use kioku::Arena;
use data_tree::{DataTreeReader, Event};
use crate::{ use crate::{
math::{Normal, Point, Transform}, math::{Normal, Point},
surface::triangle_mesh::TriangleMesh, surface::triangle_mesh::TriangleMesh,
}; };
use super::{ use super::{
parse_utils::{ensure_close, ensure_subsections, ws_f32, ws_usize}, basics::{ws_f32, ws_usize},
psy::{PsyError, PsyResult}, psy::PsyParseError,
DataTree,
}; };
// pub struct TriangleMesh {
// time_samples: usize,
// geo: Vec<(Point, Point, Point)>,
// indices: Vec<usize>,
// accel: BVH,
// }
pub fn parse_mesh_surface<'a>( pub fn parse_mesh_surface<'a>(
arena: &'a Arena, arena: &'a Arena,
instance_xform_idxs: &[std::ops::Range<usize>], tree: &'a DataTree,
xforms: &[Transform], ) -> Result<TriangleMesh<'a>, PsyParseError> {
events: &mut DataTreeReader<impl BufRead>,
) -> PsyResult<TriangleMesh<'a>> {
let mut verts = Vec::new(); // Vec of vecs, one for each time sample let mut verts = Vec::new(); // Vec of vecs, one for each time sample
let mut normals = Vec::new(); // Vec of vecs, on for each time sample let mut normals = Vec::new(); // Vec of vecs, on for each time sample
let mut face_vert_counts = Vec::new(); let mut face_vert_counts = Vec::new();
let mut face_vert_indices = Vec::new(); let mut face_vert_indices = Vec::new();
let valid_subsections = &[ // TODO: make sure there are the right number of various children,
("SurfaceShaderBind", true, (1).into()), // and other validation.
("Vertices", true, (1..).into()),
("Normals", true, (..).into()),
("FaceVertCounts", true, (1).into()),
("FaceVertIndices", true, (1).into()),
];
ensure_subsections(events, valid_subsections, |events| {
match events.next_event()? {
Event::Leaf {
type_name: "SurfaceShaderBind",
..
} => {
// TODO
}
Event::Leaf { // Get verts
type_name: "Vertices", for (_, mut text, _) in tree.iter_leaf_children_with_type("Vertices") {
contents,
byte_offset,
} => {
// Collect verts for this time sample // Collect verts for this time sample
let mut text = contents;
let mut tverts = Vec::new(); let mut tverts = Vec::new();
while let IResult::Ok((remaining, vert)) = tuple((ws_f32, ws_f32, ws_f32))(text) { while let IResult::Ok((remaining, vert)) = tuple((ws_f32, ws_f32, ws_f32))(text) {
text = remaining; text = remaining;
tverts.push(Point::new(vert.0, vert.1, vert.2)); tverts.push(Point::new(vert.0, vert.1, vert.2));
} }
if !text.is_empty() {
return Err(PsyError::IncorrectLeafData(
byte_offset,
"Vertices are not in the right format. Each vertex \
must be specified by three decimal values."
.into(),
));
}
verts.push(tverts); verts.push(tverts);
} }
Event::Leaf { // Make sure all time samples have same vert count
type_name: "Normals", let vert_count = verts[0].len();
contents, for vs in &verts {
byte_offset, assert_eq!(vert_count, vs.len());
} => { }
// Get normals, if they exist
for (_, mut text, _) in tree.iter_leaf_children_with_type("Normals") {
// Collect normals for this time sample // Collect normals for this time sample
let mut text = contents;
let mut tnormals = Vec::new(); let mut tnormals = Vec::new();
while let IResult::Ok((remaining, nor)) = tuple((ws_f32, ws_f32, ws_f32))(text) { while let IResult::Ok((remaining, nor)) = tuple((ws_f32, ws_f32, ws_f32))(text) {
text = remaining; text = remaining;
tnormals.push(Normal::new(nor.0, nor.1, nor.2).normalized()); tnormals.push(Normal::new(nor.0, nor.1, nor.2).normalized());
} }
if !text.is_empty() {
return Err(PsyError::IncorrectLeafData(
byte_offset,
"Normals are not in the right format. Each normal \
must be specified by three decimal values."
.into(),
));
}
normals.push(tnormals); normals.push(tnormals);
} }
Event::Leaf { // Make sure normal's time samples and vert count match the vertices
type_name: "FaceVertCounts",
contents,
byte_offset,
} => {
let mut text = contents;
while let IResult::Ok((remaining, count)) = ws_usize(text) {
text = remaining;
face_vert_counts.push(count);
}
if !text.is_empty() {
return Err(PsyError::IncorrectLeafData(
byte_offset,
"FaceVertCounts are not in the right format. Should be \
a simple list of space-separated integers."
.into(),
));
}
}
Event::Leaf {
type_name: "FaceVertIndices",
contents,
byte_offset,
} => {
let mut text = contents;
while let IResult::Ok((remaining, index)) = ws_usize(text) {
text = remaining;
face_vert_indices.push(index);
}
if !text.is_empty() {
return Err(PsyError::IncorrectLeafData(
byte_offset,
"FaceVertCounts are not in the right format. Should be \
a simple list of space-separated integers."
.into(),
));
}
}
_ => unreachable!(),
}
Ok(())
})?;
ensure_close(events)?;
// Validation: make sure all time samples have same vert count.
let vert_count = verts[0].len();
for vs in &verts {
assert_eq!(vert_count, vs.len());
}
// Validation: make sure normal's time samples and vert count match
// the vertices.
if !normals.is_empty() { if !normals.is_empty() {
assert_eq!(normals.len(), verts.len()); assert_eq!(normals.len(), verts.len());
for ns in &normals { for ns in &normals {
@ -155,6 +74,24 @@ pub fn parse_mesh_surface<'a>(
} }
} }
// Get face vert counts
if let Some((_, mut text, _)) = tree.iter_leaf_children_with_type("FaceVertCounts").nth(0) {
while let IResult::Ok((remaining, count)) = ws_usize(text) {
text = remaining;
face_vert_counts.push(count);
}
}
// Get face vert indices
if let Some((_, mut text, _)) = tree.iter_leaf_children_with_type("FaceVertIndices").nth(0) {
while let IResult::Ok((remaining, index)) = ws_usize(text) {
text = remaining;
face_vert_indices.push(index);
}
}
// Build triangle mesh // Build triangle mesh
let mut tri_vert_indices = Vec::new(); let mut tri_vert_indices = Vec::new();
let mut ii = 0; let mut ii = 0;

View File

@ -1,18 +1,17 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::{io::BufRead, result::Result}; use std::result::Result;
use nom::{combinator::all_consuming, IResult}; use nom::{combinator::all_consuming, IResult};
use kioku::Arena; use kioku::Arena;
use data_tree::{DataTreeReader, Event};
use crate::shading::{SimpleSurfaceShader, SurfaceShader}; use crate::shading::{SimpleSurfaceShader, SurfaceShader};
use super::{ use super::{
parse_utils::{ensure_close, ensure_subsections, ws_f32}, basics::ws_f32,
psy::{parse_color, PsyError, PsyResult}, psy::{parse_color, PsyParseError},
DataTree,
}; };
// pub struct TriangleMesh { // pub struct TriangleMesh {
@ -22,151 +21,118 @@ use super::{
// accel: BVH, // accel: BVH,
// } // }
pub fn parse_surface_shader( pub fn parse_surface_shader<'a>(
_arena: &Arena, arena: &'a Arena,
events: &mut DataTreeReader<impl BufRead>, tree: &'a DataTree,
_ident: Option<&str>, ) -> Result<&'a dyn SurfaceShader, PsyParseError> {
) -> PsyResult<Box<dyn SurfaceShader>> { let type_name = if let Some((_, text, _)) = tree.iter_leaf_children_with_type("Type").nth(0) {
// Get shader type. text.trim()
let shader = match events.next_event()? {
Event::Leaf {
type_name: "Type",
contents: "Lambert",
..
} => {
let color = if let Event::Leaf {
type_name: "Color",
contents,
byte_offset,
} = events.next_event()?
{
parse_color(byte_offset, contents)?
} else { } else {
return Err(PsyError::MissingNode( return Err(PsyParseError::MissingNode(
events.byte_offset(), tree.byte_offset(),
"Expected a Color field in Lambert SurfaceShader.".into(), "Expected a Type field in SurfaceShader.",
)); ));
}; };
// Close shader node. let shader = match type_name {
ensure_close(events)?; "Lambert" => {
let color = if let Some((_, contents, byte_offset)) =
tree.iter_leaf_children_with_type("Color").nth(0)
{
if let Ok(color) = parse_color(contents) {
color
} else {
// Found color, but its contents is not in the right format
return Err(PsyParseError::UnknownError(byte_offset));
}
} else {
return Err(PsyParseError::MissingNode(
tree.byte_offset(),
"Expected a Color field in Lambert SurfaceShader.",
));
};
Box::new(SimpleSurfaceShader::Lambert { color: color }) arena.alloc(SimpleSurfaceShader::Lambert { color: color })
} }
Event::Leaf { "GGX" => {
type_name: "Type",
contents: "GGX",
..
} => {
let mut color = None;
let mut roughness = None;
let mut fresnel = None;
let valid_subsections = &[
("Color", true, (1).into()),
("Roughness", true, (1).into()),
("Fresnel", true, (1).into()),
];
ensure_subsections(events, valid_subsections, |events| {
match events.next_event()? {
// Color // Color
Event::Leaf { let color = if let Some((_, contents, byte_offset)) =
type_name: "Color", tree.iter_leaf_children_with_type("Color").nth(0)
contents, {
byte_offset, if let Ok(color) = parse_color(contents) {
} => { color
color = Some(parse_color(byte_offset, contents)?); } else {
// Found color, but its contents is not in the right format
return Err(PsyParseError::UnknownError(byte_offset));
} }
} else {
return Err(PsyParseError::MissingNode(
tree.byte_offset(),
"Expected a Color field in GTR SurfaceShader.",
));
};
// Roughness // Roughness
Event::Leaf { let roughness = if let Some((_, contents, byte_offset)) =
type_name: "Roughness", tree.iter_leaf_children_with_type("Roughness").nth(0)
contents, {
byte_offset, if let IResult::Ok((_, roughness)) = all_consuming(ws_f32)(contents) {
} => { roughness
if let IResult::Ok((_, rgh)) = all_consuming(ws_f32)(contents) {
roughness = Some(rgh);
} else { } else {
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset, }
"Roughness data isn't in the right format. It \ } else {
should contain a single floating point value." return Err(PsyParseError::MissingNode(
.into(), tree.byte_offset(),
"Expected a Roughness field in GTR SurfaceShader.",
)); ));
} };
}
// Fresnel // Fresnel
Event::Leaf { let fresnel = if let Some((_, contents, byte_offset)) =
type_name: "Fresnel", tree.iter_leaf_children_with_type("Fresnel").nth(0)
contents, {
byte_offset, if let IResult::Ok((_, fresnel)) = all_consuming(ws_f32)(contents) {
} => { fresnel
if let IResult::Ok((_, frs)) = all_consuming(ws_f32)(contents) {
fresnel = Some(frs);
} else { } else {
return Err(PsyError::IncorrectLeafData( return Err(PsyParseError::UnknownError(byte_offset));
byte_offset, }
"Fresnel data isn't in the right format. It \ } else {
should contain a single floating point value." return Err(PsyParseError::MissingNode(
.into(), tree.byte_offset(),
"Expected a Fresnel field in GTR SurfaceShader.",
)); ));
} };
}
_ => unreachable!(), arena.alloc(SimpleSurfaceShader::GGX {
} color: color,
Ok(()) roughness: roughness,
})?; fresnel: fresnel,
ensure_close(events)?;
Box::new(SimpleSurfaceShader::GGX {
color: color.unwrap(),
roughness: roughness.unwrap(),
fresnel: fresnel.unwrap(),
}) })
} }
Event::Leaf { "Emit" => {
type_name: "Type", let color = if let Some((_, contents, byte_offset)) =
contents: "Emit", tree.iter_leaf_children_with_type("Color").nth(0)
..
} => {
let color = if let Event::Leaf {
type_name: "Color",
contents,
byte_offset,
} = events.next_event()?
{ {
parse_color(byte_offset, contents)? if let Ok(color) = parse_color(contents) {
color
} else { } else {
return Err(PsyError::MissingNode( // Found color, but its contents is not in the right format
events.byte_offset(), return Err(PsyParseError::UnknownError(byte_offset));
"Expected a Color field in Emit SurfaceShader.".into(), }
} else {
return Err(PsyParseError::MissingNode(
tree.byte_offset(),
"Expected a Color field in Emit SurfaceShader.",
)); ));
}; };
// Close shader node. arena.alloc(SimpleSurfaceShader::Emit { color: color })
ensure_close(events)?;
Box::new(SimpleSurfaceShader::Emit { color: color })
} }
Event::Leaf { _ => unimplemented!(),
type_name: "Type",
byte_offset,
..
} => {
return Err(PsyError::MissingNode(
byte_offset,
"Unknown SurfaceShader type.".into(),
));
}
_ => {
todo!(); // Return error.
}
}; };
Ok(shader) Ok(shader)

View File

@ -18,7 +18,7 @@ use crate::{
hash::hash_u32, hash::hash_u32,
hilbert, hilbert,
image::Image, image::Image,
math::{probit, upper_power_of_two}, math::probit,
mis::power_heuristic, mis::power_heuristic,
ray::{Ray, RayBatch}, ray::{Ray, RayBatch},
scene::{Scene, SceneLightSample}, scene::{Scene, SceneLightSample},
@ -151,7 +151,7 @@ impl<'a> Renderer<'a> {
let bucket_count_x = ((width / bucket_w) + 1) as u32; let bucket_count_x = ((width / bucket_w) + 1) as u32;
let bucket_count_y = ((height / bucket_h) + 1) as u32; let bucket_count_y = ((height / bucket_h) + 1) as u32;
let larger = cmp::max(bucket_count_x, bucket_count_y); let larger = cmp::max(bucket_count_x, bucket_count_y);
let pow2 = upper_power_of_two(larger); let pow2 = larger.next_power_of_two();
pow2 * pow2 pow2 * pow2
}; };
for hilbert_d in 0..bucket_n { for hilbert_d in 0..bucket_n {

View File

@ -1,35 +1,417 @@
use std::{collections::HashMap, ops::Range}; use std::collections::HashMap;
use crate::{light::SurfaceLight, math::Transform, surface::Surface}; use kioku::Arena;
/// Stores the objects of a scene and its acceleration structures. use crate::{
#[derive(Debug)] accel::BVH4,
accel::{LightAccel, LightTree},
bbox::{transform_bbox_slice_from, BBox},
boundable::Boundable,
color::SpectralSample,
lerp::lerp_slice,
light::SurfaceLight,
math::{Normal, Point, Transform},
shading::SurfaceShader,
surface::{Surface, SurfaceIntersection},
transform_stack::TransformStack,
};
#[derive(Copy, Clone, Debug)]
pub struct Assembly<'a> { pub struct Assembly<'a> {
pub objects: HashMap<String, Object<'a>>, // Name, Object. // Instance list
pub xforms: Vec<Transform>, pub instances: &'a [Instance],
pub light_instances: &'a [Instance],
pub xforms: &'a [Transform],
// Surface shader list
pub surface_shaders: &'a [&'a dyn SurfaceShader],
// Object list
pub objects: &'a [Object<'a>],
// Assembly list
pub assemblies: &'a [Assembly<'a>],
// Object accel
pub object_accel: BVH4<'a>,
// Light accel
pub light_accel: LightTree<'a>,
} }
// TODO: actually fix this clippy warning, rather than `allow`ing it.
#[allow(clippy::type_complexity)]
impl<'a> Assembly<'a> { impl<'a> Assembly<'a> {
pub fn new() -> Assembly<'a> { // Returns (light_color, (sample_point, normal, point_err), pdf, selection_pdf)
Assembly { pub fn sample_lights(
objects: HashMap::new(), &self,
xform_stack: &mut TransformStack,
n: f32,
uvw: (f32, f32, f32),
wavelength: f32,
time: f32,
intr: &SurfaceIntersection,
) -> Option<(SpectralSample, (Point, Normal, f32), f32, f32)> {
if let SurfaceIntersection::Hit {
intersection_data: idata,
closure,
} = *intr
{
let sel_xform = if !xform_stack.top().is_empty() {
lerp_slice(xform_stack.top(), time)
} else {
Transform::new()
};
if let Some((light_i, sel_pdf, whittled_n)) = self.light_accel.select(
idata.incoming * sel_xform,
idata.pos * sel_xform,
idata.nor * sel_xform,
idata.nor_g * sel_xform,
&closure,
time,
n,
) {
let inst = self.light_instances[light_i];
match inst.instance_type {
InstanceType::Object => {
match self.objects[inst.data_index] {
Object::SurfaceLight(light) => {
// Get the world-to-object space transform of the light
let xform = if let Some((a, b)) = inst.transform_indices {
let pxforms = xform_stack.top();
let xform = lerp_slice(&self.xforms[a..b], time);
if !pxforms.is_empty() {
lerp_slice(pxforms, time) * xform
} else {
xform
}
} else {
let pxforms = xform_stack.top();
if !pxforms.is_empty() {
lerp_slice(pxforms, time)
} else {
Transform::new()
}
};
// Sample the light
let (color, sample_geo, pdf) = light.sample_from_point(
&xform, idata.pos, uvw.0, uvw.1, wavelength, time,
);
return Some((color, sample_geo, pdf, sel_pdf));
}
_ => unimplemented!(),
}
}
InstanceType::Assembly => {
// Push the world-to-object space transforms of the assembly onto
// the transform stack.
if let Some((a, b)) = inst.transform_indices {
xform_stack.push(&self.xforms[a..b]);
}
// Sample sub-assembly lights
let sample = self.assemblies[inst.data_index].sample_lights(
xform_stack,
whittled_n,
uvw,
wavelength,
time,
intr,
);
// Pop the assembly's transforms off the transform stack.
if inst.transform_indices.is_some() {
xform_stack.pop();
}
// Return sample
return sample.map(|(ss, v, pdf, spdf)| (ss, v, pdf, spdf * sel_pdf));
}
}
} else {
None
}
} else {
None
}
}
}
impl<'a> Boundable for Assembly<'a> {
fn bounds(&self) -> &[BBox] {
self.object_accel.bounds()
}
}
#[derive(Debug)]
pub struct AssemblyBuilder<'a> {
arena: &'a Arena,
// Instance list
instances: Vec<Instance>,
xforms: Vec<Transform>,
// Shader list
surface_shaders: Vec<&'a dyn SurfaceShader>,
surface_shader_map: HashMap<String, usize>, // map Name -> Index
// Object list
objects: Vec<Object<'a>>,
object_map: HashMap<String, usize>, // map Name -> Index
// Assembly list
assemblies: Vec<Assembly<'a>>,
assembly_map: HashMap<String, usize>, // map Name -> Index
}
impl<'a> AssemblyBuilder<'a> {
pub fn new(arena: &'a Arena) -> AssemblyBuilder<'a> {
AssemblyBuilder {
arena: arena,
instances: Vec::new(),
xforms: Vec::new(), xforms: Vec::new(),
surface_shaders: Vec::new(),
surface_shader_map: HashMap::new(),
objects: Vec::new(),
object_map: HashMap::new(),
assemblies: Vec::new(),
assembly_map: HashMap::new(),
} }
} }
pub fn add_surface_shader(&mut self, name: &str, shader: &'a dyn SurfaceShader) {
// Make sure the name hasn't already been used.
if self.surface_shader_map.contains_key(name) {
panic!("Attempted to add surface shader to assembly with a name that already exists.");
}
// Add shader
self.surface_shader_map
.insert(name.to_string(), self.surface_shaders.len());
self.surface_shaders.push(shader);
}
pub fn add_object(&mut self, name: &str, obj: Object<'a>) {
// Make sure the name hasn't already been used.
if self.name_exists(name) {
panic!("Attempted to add object to assembly with a name that already exists.");
}
// Add object
self.object_map.insert(name.to_string(), self.objects.len());
self.objects.push(obj);
}
pub fn add_assembly(&mut self, name: &str, asmb: Assembly<'a>) {
// Make sure the name hasn't already been used.
if self.name_exists(name) {
panic!(
"Attempted to add assembly to another assembly with a name that already \
exists."
);
}
// Add assembly
self.assembly_map
.insert(name.to_string(), self.assemblies.len());
self.assemblies.push(asmb);
}
pub fn add_instance(
&mut self,
name: &str,
surface_shader_name: Option<&str>,
xforms: Option<&[Transform]>,
) {
// Make sure name exists
if !self.name_exists(name) {
panic!("Attempted to add instance with a name that doesn't exist.");
}
// Map zero-length transforms to None
let xforms = if let Some(xf) = xforms {
if !xf.is_empty() {
Some(xf)
} else {
None
}
} else {
None
};
// Create instance
let instance = if self.object_map.contains_key(name) {
Instance {
instance_type: InstanceType::Object,
data_index: self.object_map[name],
surface_shader_index: surface_shader_name.map(|name| {
*self
.surface_shader_map
.get(name)
.unwrap_or_else(|| panic!("Unknown surface shader '{}'.", name))
}),
id: self.instances.len(),
transform_indices: xforms
.map(|xf| (self.xforms.len(), self.xforms.len() + xf.len())),
}
} else {
Instance {
instance_type: InstanceType::Assembly,
data_index: self.assembly_map[name],
surface_shader_index: surface_shader_name.map(|name| {
*self
.surface_shader_map
.get(name)
.unwrap_or_else(|| panic!("Unknown surface shader '{}'.", name))
}),
id: self.instances.len(),
transform_indices: xforms
.map(|xf| (self.xforms.len(), self.xforms.len() + xf.len())),
}
};
self.instances.push(instance);
// Store transforms
if let Some(xf) = xforms {
self.xforms.extend(xf);
}
}
pub fn name_exists(&self, name: &str) -> bool {
self.object_map.contains_key(name) || self.assembly_map.contains_key(name)
}
pub fn build(mut self) -> Assembly<'a> {
// Calculate instance bounds, used for building object accel and light accel.
let (bis, bbs) = self.instance_bounds();
// Build object accel
let object_accel = BVH4::from_objects(self.arena, &mut self.instances[..], 1, |inst| {
&bbs[bis[inst.id]..bis[inst.id + 1]]
});
// Get list of instances that are for light sources or assemblies that contain light
// sources.
let mut light_instances: Vec<_> = self
.instances
.iter()
.filter(|inst| match inst.instance_type {
InstanceType::Object => {
if let Object::SurfaceLight(_) = self.objects[inst.data_index] {
true
} else {
false
}
}
InstanceType::Assembly => {
self.assemblies[inst.data_index]
.light_accel
.approximate_energy()
> 0.0
}
})
.cloned()
.collect();
// Build light accel
let light_accel = LightTree::from_objects(self.arena, &mut light_instances[..], |inst| {
let bounds = &bbs[bis[inst.id]..bis[inst.id + 1]];
let energy = match inst.instance_type {
InstanceType::Object => {
if let Object::SurfaceLight(light) = self.objects[inst.data_index] {
light.approximate_energy()
} else {
0.0
}
}
InstanceType::Assembly => self.assemblies[inst.data_index]
.light_accel
.approximate_energy(),
};
(bounds, energy)
});
Assembly {
instances: self.arena.copy_slice(&self.instances),
light_instances: self.arena.copy_slice(&light_instances),
xforms: self.arena.copy_slice(&self.xforms),
surface_shaders: self.arena.copy_slice(&self.surface_shaders),
objects: self.arena.copy_slice(&self.objects),
assemblies: self.arena.copy_slice(&self.assemblies),
object_accel: object_accel,
light_accel: light_accel,
}
}
/// Returns a pair of vectors with the bounds of all instances.
/// This is used for building the assembly's BVH4.
fn instance_bounds(&self) -> (Vec<usize>, Vec<BBox>) {
let mut indices = vec![0];
let mut bounds = Vec::new();
for inst in &self.instances {
let mut bbs = Vec::new();
let mut bbs2 = Vec::new();
// Get bounding boxes
match inst.instance_type {
InstanceType::Object => {
// Push bounds onto bbs
let obj = &self.objects[inst.data_index];
match *obj {
Object::Surface(s) => bbs.extend(s.bounds()),
Object::SurfaceLight(l) => bbs.extend(l.bounds()),
}
}
InstanceType::Assembly => {
// Push bounds onto bbs
let asmb = &self.assemblies[inst.data_index];
bbs.extend(asmb.bounds());
}
}
// Transform the bounding boxes, if necessary
if let Some((xstart, xend)) = inst.transform_indices {
let xf = &self.xforms[xstart..xend];
transform_bbox_slice_from(&bbs, xf, &mut bbs2);
} else {
bbs2.clear();
bbs2.extend(bbs);
}
// Push transformed bounds onto vec
bounds.extend(bbs2);
indices.push(bounds.len());
}
(indices, bounds)
}
} }
#[derive(Debug)] #[derive(Copy, Clone, Debug)]
pub struct Object<'a> { pub enum Object<'a> {
pub data: ObjectData<'a>, Surface(&'a dyn Surface),
SurfaceLight(&'a dyn SurfaceLight),
// One range per instance, indexing into the assembly's xforms
// array. An empty Vec means a single instance with no transforms.
pub instance_xform_idxs: Vec<Range<usize>>,
} }
#[derive(Debug)] #[derive(Debug, Copy, Clone)]
pub enum ObjectData<'a> { pub struct Instance {
Empty, pub instance_type: InstanceType,
Surface(Box<dyn Surface + 'a>), pub data_index: usize,
Light(Box<dyn SurfaceLight + 'a>), pub surface_shader_index: Option<usize>,
pub id: usize,
pub transform_indices: Option<(usize, usize)>,
}
#[derive(Debug, Copy, Clone)]
pub enum InstanceType {
Object,
Assembly,
} }

View File

@ -1,19 +1,151 @@
mod assembly; mod assembly;
mod world; mod world;
use std::collections::HashMap; use crate::{
accel::LightAccel,
use crate::{camera::Camera, shading::SurfaceShader}; algorithm::weighted_choice,
camera::Camera,
color::SpectralSample,
math::{Normal, Point, Vector},
surface::SurfaceIntersection,
transform_stack::TransformStack,
};
pub use self::{ pub use self::{
assembly::{Assembly, Object, ObjectData}, assembly::{Assembly, AssemblyBuilder, InstanceType, Object},
world::World, world::World,
}; };
#[derive(Debug)] #[derive(Debug)]
pub struct Scene<'a> { pub struct Scene<'a> {
pub name: Option<String>,
pub camera: Camera<'a>, pub camera: Camera<'a>,
pub world: World<'a>, pub world: World<'a>,
pub shaders: HashMap<String, Box<dyn SurfaceShader>>, // Name, Shader pub root: Assembly<'a>,
pub root_assembly: Assembly<'a>, }
impl<'a> Scene<'a> {
pub fn sample_lights(
&self,
xform_stack: &mut TransformStack,
n: f32,
uvw: (f32, f32, f32),
wavelength: f32,
time: f32,
intr: &SurfaceIntersection,
) -> SceneLightSample {
// TODO: this just selects between world lights and local lights
// with a 50/50 chance. We should do something more sophisticated
// than this, accounting for the estimated impact of the lights
// on the point being lit.
// Calculate relative probabilities of traversing into world lights
// or local lights.
let wl_energy = if self
.world
.lights
.iter()
.fold(0.0, |energy, light| energy + light.approximate_energy())
<= 0.0
{
0.0
} else {
1.0
};
let ll_energy = if self.root.light_accel.approximate_energy() <= 0.0 {
0.0
} else {
1.0
};
let tot_energy = wl_energy + ll_energy;
// Decide either world or local lights, and select and sample a light.
if tot_energy <= 0.0 {
return SceneLightSample::None;
} else {
let wl_prob = wl_energy / tot_energy;
if n < wl_prob {
// World lights
let n = n / wl_prob;
let (i, p) = weighted_choice(self.world.lights, n, |l| l.approximate_energy());
let (ss, sv, pdf) =
self.world.lights[i].sample_from_point(uvw.0, uvw.1, wavelength, time);
return SceneLightSample::Distant {
color: ss,
direction: sv,
pdf: pdf,
selection_pdf: p * wl_prob,
};
} else {
// Local lights
let n = (n - wl_prob) / (1.0 - wl_prob);
if let Some((ss, sgeo, pdf, spdf)) =
self.root
.sample_lights(xform_stack, n, uvw, wavelength, time, intr)
{
return SceneLightSample::Surface {
color: ss,
sample_geo: sgeo,
pdf: pdf,
selection_pdf: spdf * (1.0 - wl_prob),
};
} else {
return SceneLightSample::None;
}
}
}
}
}
#[derive(Debug, Copy, Clone)]
pub enum SceneLightSample {
None,
Distant {
color: SpectralSample,
direction: Vector,
pdf: f32,
selection_pdf: f32,
},
Surface {
color: SpectralSample,
sample_geo: (Point, Normal, f32),
pdf: f32,
selection_pdf: f32,
},
}
impl SceneLightSample {
pub fn is_none(&self) -> bool {
if let SceneLightSample::None = *self {
true
} else {
false
}
}
pub fn color(&self) -> SpectralSample {
match *self {
SceneLightSample::None => panic!(),
SceneLightSample::Distant { color, .. } => color,
SceneLightSample::Surface { color, .. } => color,
}
}
pub fn pdf(&self) -> f32 {
match *self {
SceneLightSample::None => panic!(),
SceneLightSample::Distant { pdf, .. } => pdf,
SceneLightSample::Surface { pdf, .. } => pdf,
}
}
pub fn selection_pdf(&self) -> f32 {
match *self {
SceneLightSample::None => panic!(),
SceneLightSample::Distant { selection_pdf, .. } => selection_pdf,
SceneLightSample::Surface { selection_pdf, .. } => selection_pdf,
}
}
} }

View File

@ -7,7 +7,7 @@ use glam::Vec4;
use crate::{ use crate::{
color::{Color, SpectralSample}, color::{Color, SpectralSample},
lerp::{lerp, Lerp}, lerp::{lerp, Lerp},
math::{clamp, dot, zup_to_vec, Normal, Vector}, math::{dot, zup_to_vec, Normal, Vector},
sampling::cosine_sample_hemisphere, sampling::cosine_sample_hemisphere,
}; };
@ -250,59 +250,6 @@ impl SurfaceClosure {
} }
} }
// Implemented for interpolation operations, not for any otherwise meaningful
// notion of addition.
impl std::ops::Add<SurfaceClosure> for SurfaceClosure {
type Output = Self;
fn add(self, rhs: Self) -> Self {
match (self, rhs) {
(Lambert(col1), Lambert(col2)) => Lambert(col1 + col2),
(
GGX {
color: col1,
roughness: rgh1,
fresnel: frs1,
},
GGX {
color: col2,
roughness: rgh2,
fresnel: frs2,
},
) => GGX {
color: col1 + col2,
roughness: rgh1 + rgh2,
fresnel: frs1 + frs2,
},
(Emit(col1), Emit(col2)) => Emit(col1 + col2),
_ => panic!("Cannot add two different surface closure types."),
}
}
}
// Implemented for interpolation operations, not for any otherwise meaningful
// notion of multiplication.
impl std::ops::Mul<f32> for SurfaceClosure {
type Output = Self;
fn mul(self, rhs: f32) -> Self {
match self {
Lambert(col) => Lambert(col * rhs),
GGX {
color: col,
roughness: rgh,
fresnel: frs,
} => GGX {
color: col * rhs,
roughness: rgh * rhs,
fresnel: frs * rhs,
},
Emit(col) => Emit(col * rhs),
}
}
}
impl Lerp for SurfaceClosure { impl Lerp for SurfaceClosure {
fn lerp(self, other: SurfaceClosure, alpha: f32) -> SurfaceClosure { fn lerp(self, other: SurfaceClosure, alpha: f32) -> SurfaceClosure {
match (self, other) { match (self, other) {
@ -534,11 +481,11 @@ mod ggx_closure {
} }
// Calculate needed dot products // Calculate needed dot products
let na = clamp(dot(nn, aa), -1.0, 1.0); let na = dot(nn, aa).clamp(-1.0, 1.0);
let nb = clamp(dot(nn, bb), -1.0, 1.0); let nb = dot(nn, bb).clamp(-1.0, 1.0);
let ha = clamp(dot(hh, aa), -1.0, 1.0); let ha = dot(hh, aa).clamp(-1.0, 1.0);
let hb = clamp(dot(hh, bb), -1.0, 1.0); let hb = dot(hh, bb).clamp(-1.0, 1.0);
let nh = clamp(dot(nn, hh), -1.0, 1.0); let nh = dot(nn, hh).clamp(-1.0, 1.0);
// Calculate F - Fresnel // Calculate F - Fresnel
let col_f = { let col_f = {
@ -637,7 +584,7 @@ mod ggx_closure {
// Approximate method // Approximate method
let theta = cos_theta_max.acos(); let theta = cos_theta_max.acos();
let hh = (aa + bb).normalized(); let hh = (aa + bb).normalized();
let nh = clamp(dot(nn, hh), -1.0, 1.0); let nh = dot(nn, hh).clamp(-1.0, 1.0);
let fac = ggx_d(nh, (1.0f32).min(roughness.sqrt() + (2.0 * theta / PI_32))); let fac = ggx_d(nh, (1.0f32).min(roughness.sqrt() + (2.0 * theta / PI_32)));
fac * (1.0f32).min(1.0 - cos_theta_max) * INV_PI fac * (1.0f32).min(1.0 - cos_theta_max) * INV_PI

View File

@ -33,8 +33,8 @@ pub struct MicropolyBatch<'a> {
normals: &'a [Normal], normals: &'a [Normal],
// Per-vertex shading data. // Per-vertex shading data.
// TODO: time samples.
compressed_vertex_closure_size: usize, // Size in bites of a single compressed closure compressed_vertex_closure_size: usize, // Size in bites of a single compressed closure
vertex_closure_time_sample_count: usize,
compressed_vertex_closures: &'a [u8], // Packed compressed closures compressed_vertex_closures: &'a [u8], // Packed compressed closures
// Micro-triangle indices. Each element of the tuple specifies the index // Micro-triangle indices. Each element of the tuple specifies the index
@ -130,6 +130,7 @@ impl<'a> MicropolyBatch<'a> {
vertices: vertices, vertices: vertices,
normals: normals, normals: normals,
compressed_vertex_closure_size: 0, compressed_vertex_closure_size: 0,
vertex_closure_time_sample_count: 1,
compressed_vertex_closures: &[], compressed_vertex_closures: &[],
indices: indices, indices: indices,
accel: accel, accel: accel,
@ -319,19 +320,16 @@ impl<'a> MicropolyBatch<'a> {
}; };
// Calculate interpolated surface closure. // Calculate interpolated surface closure.
// TODO: time sampling. // TODO: actually interpolate.
let closure = { let closure = {
let get_closure = |index| { let start_byte = hit_tri_indices.0 as usize
let start_byte = index * self.compressed_vertex_closure_size; * self.compressed_vertex_closure_size
SurfaceClosure::from_compressed( * self.vertex_closure_time_sample_count;
&self.compressed_vertex_closures[start_byte..], let end_byte = start_byte + self.compressed_vertex_closure_size;
) let (closure, _) = SurfaceClosure::from_compressed(
.0 &self.compressed_vertex_closures[start_byte..end_byte],
}; );
let c0 = get_closure(hit_tri_indices.0 as usize); closure
let c1 = get_closure(hit_tri_indices.1 as usize);
let c2 = get_closure(hit_tri_indices.2 as usize);
(c0 * b0) + (c1 * b1) + (c2 * b2)
}; };
let intersection_data = SurfaceIntersectionData { let intersection_data = SurfaceIntersectionData {

View File

@ -1,10 +0,0 @@
[package]
name = "data_tree"
version = "0.1.0"
authors = ["Nathan Vegdahl <cessen@cessen.com>"]
edition = "2018"
license = "MIT"
[lib]
name = "data_tree"
path = "src/lib.rs"

View File

@ -1,207 +0,0 @@
mod parse;
use parse::{ParseError, ParseEvent, Parser};
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Event<'a> {
InnerOpen {
type_name: &'a str,
ident: Option<&'a str>,
byte_offset: usize,
},
InnerClose {
byte_offset: usize,
},
Leaf {
type_name: &'a str,
contents: &'a str,
byte_offset: usize,
},
EOF,
}
//----------------------------------------------------------------------------
#[derive(Debug)]
pub enum Error {
ExpectedTypeNameOrClose(usize),
ExpectedOpenOrIdent(usize),
ExpectedOpen(usize),
UnexpectedClose(usize),
UnexpectedIdent(usize),
UnexpectedEOF,
IO(std::io::Error),
}
impl std::error::Error for Error {}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
impl From<ParseError> for Error {
fn from(e: ParseError) -> Self {
match e {
ParseError::ExpectedTypeNameOrClose(byte_offset) => {
Error::ExpectedTypeNameOrClose(byte_offset)
}
ParseError::ExpectedOpenOrIdent(byte_offset) => Error::ExpectedOpenOrIdent(byte_offset),
ParseError::ExpectedOpen(byte_offset) => Error::ExpectedOpen(byte_offset),
ParseError::UnexpectedClose(byte_offset) => Error::UnexpectedClose(byte_offset),
ParseError::UnexpectedIdent(byte_offset) => Error::UnexpectedIdent(byte_offset),
}
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::IO(e)
}
}
//-------------------------------------------------------------
#[derive(Debug)]
pub struct DataTreeReader<R: std::io::BufRead> {
parser: Parser,
reader: R,
buf: String,
eof: bool,
}
impl<R: std::io::BufRead> DataTreeReader<R> {
pub fn new(reader: R) -> Self {
Self {
parser: Parser::new(),
reader: reader,
buf: String::new(),
eof: false,
}
}
pub fn next_event<'a>(&'a mut self) -> Result<Event<'a>, Error> {
loop {
let valid_end = match self.parser.next_event()? {
ParseEvent::ValidEnd => true,
ParseEvent::NeedMoreInput => false,
// The transmutes below are because the borrow checker is
// over-conservative about this. It thinks
// the liftime isn't valid, but since we aren't
// mutating self after returning (and in fact
// can't because of the borrow) there's no way for
// the references in this to become invalid.
ParseEvent::InnerOpen {
type_name,
ident,
byte_offset,
} => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::InnerOpen {
type_name,
ident,
byte_offset,
})
});
}
ParseEvent::InnerClose { byte_offset } => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::InnerClose { byte_offset })
});
}
ParseEvent::Leaf {
type_name,
contents,
byte_offset,
} => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::Leaf {
type_name,
contents,
byte_offset,
})
});
}
};
if !self.eof {
self.buf.clear();
let read = self.reader.read_line(&mut self.buf)?;
self.parser.push_data(&self.buf);
if read == 0 {
self.eof = true;
}
} else if !valid_end {
return Err(Error::UnexpectedEOF);
} else {
return Ok(Event::EOF);
}
}
}
pub fn peek_event<'a>(&'a mut self) -> Result<Event<'a>, Error> {
loop {
let valid_end = match self.parser.peek_event()? {
ParseEvent::ValidEnd => true,
ParseEvent::NeedMoreInput => false,
// The transmutes below are because the borrow checker is
// over-conservative about this. It thinks
// the liftime isn't valid, but since we aren't
// mutating self after returning (and in fact
// can't because of the borrow) there's no way for
// the references in this to become invalid.
ParseEvent::InnerOpen {
type_name,
ident,
byte_offset,
} => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::InnerOpen {
type_name,
ident,
byte_offset,
})
});
}
ParseEvent::InnerClose { byte_offset } => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::InnerClose { byte_offset })
});
}
ParseEvent::Leaf {
type_name,
contents,
byte_offset,
} => {
return Ok(unsafe {
std::mem::transmute::<Event, Event>(Event::Leaf {
type_name,
contents,
byte_offset,
})
});
}
};
if !self.eof {
self.buf.clear();
let read = self.reader.read_line(&mut self.buf)?;
self.parser.push_data(&self.buf);
if read == 0 {
self.eof = true;
}
} else if !valid_end {
return Err(Error::UnexpectedEOF);
} else {
return Ok(Event::EOF);
}
}
}
pub fn byte_offset(&self) -> usize {
self.parser.byte_offset()
}
}

View File

@ -1,911 +0,0 @@
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ParseError {
ExpectedTypeNameOrClose(usize),
ExpectedOpenOrIdent(usize),
ExpectedOpen(usize),
UnexpectedClose(usize),
UnexpectedIdent(usize),
}
impl std::error::Error for ParseError {}
impl std::fmt::Display for ParseError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{:?}", self)
}
}
//---------------------------------------------------------------------
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ParseEvent<'a> {
InnerOpen {
type_name: &'a str,
ident: Option<&'a str>,
byte_offset: usize,
},
InnerClose {
byte_offset: usize,
},
Leaf {
type_name: &'a str,
contents: &'a str,
byte_offset: usize,
},
NeedMoreInput,
ValidEnd, // All data so far is consumed, and this is a
// valid place to finish the parse.
}
impl<'a> ParseEvent<'a> {
fn add_to_byte_offset(&self, offset: usize) -> ParseEvent<'a> {
match *self {
ParseEvent::InnerOpen {
type_name,
ident,
byte_offset,
} => ParseEvent::InnerOpen {
type_name: type_name,
ident: ident,
byte_offset: byte_offset + offset,
},
ParseEvent::InnerClose { byte_offset } => ParseEvent::InnerClose {
byte_offset: byte_offset + offset,
},
ParseEvent::Leaf {
type_name,
contents,
byte_offset,
} => ParseEvent::Leaf {
type_name: type_name,
contents: contents,
byte_offset: byte_offset + offset,
},
ParseEvent::NeedMoreInput => *self,
ParseEvent::ValidEnd => *self,
}
}
}
//---------------------------------------------------------------------
#[derive(Debug)]
pub struct Parser {
buffer: String,
buf_consumed_idx: usize,
total_bytes_processed: usize,
inner_opens: usize,
}
impl Parser {
pub fn new() -> Parser {
Parser {
buffer: String::with_capacity(1024),
buf_consumed_idx: 0,
total_bytes_processed: 0,
inner_opens: 0,
}
}
pub fn push_data(&mut self, text: &str) {
// Remove any consumed data.
if self.buf_consumed_idx > 0 {
self.buffer.replace_range(..self.buf_consumed_idx, "");
self.buf_consumed_idx = 0;
}
// Add the new data.
self.buffer.push_str(text);
}
pub fn next_event<'a>(&'a mut self) -> Result<ParseEvent<'a>, ParseError> {
// Remove any consumed data.
if self.buf_consumed_idx > 0 {
self.buffer.replace_range(..self.buf_consumed_idx, "");
self.buf_consumed_idx = 0;
}
// Try to parse an event from the valid prefix.
match try_parse_event(&self.buffer) {
ParseEventParse::Ok(event, bytes_consumed) => {
// Update internal state.
if let ParseEvent::InnerOpen { .. } = event {
self.inner_opens += 1;
} else if let ParseEvent::InnerClose { byte_offset, .. } = event {
if self.inner_opens == 0 {
return Err(ParseError::UnexpectedClose(
byte_offset + self.total_bytes_processed,
));
} else {
self.inner_opens -= 1;
}
}
self.buf_consumed_idx += bytes_consumed;
self.total_bytes_processed += bytes_consumed;
Ok(event.add_to_byte_offset(self.total_bytes_processed - self.buf_consumed_idx))
}
ParseEventParse::ReachedEnd => {
// If we consumed all data, then if all nodes are properly
// closed we're done. Otherwise we need more input.
if self.inner_opens == 0 {
Ok(ParseEvent::ValidEnd)
} else {
Ok(ParseEvent::NeedMoreInput)
}
}
ParseEventParse::IncompleteData => Ok(ParseEvent::NeedMoreInput),
// Hard errors.
ParseEventParse::ExpectedTypeNameOrInnerClose(byte_offset) => Err(
ParseError::ExpectedTypeNameOrClose(byte_offset + self.total_bytes_processed),
),
ParseEventParse::ExpectedOpenOrIdent(byte_offset) => Err(
ParseError::ExpectedOpenOrIdent(byte_offset + self.total_bytes_processed),
),
ParseEventParse::ExpectedInnerOpen(byte_offset) => Err(ParseError::ExpectedOpen(
byte_offset + self.total_bytes_processed,
)),
ParseEventParse::UnexpectedIdent(byte_offset) => Err(ParseError::UnexpectedIdent(
byte_offset + self.total_bytes_processed,
)),
}
}
pub fn peek_event<'a>(&'a mut self) -> Result<ParseEvent<'a>, ParseError> {
// Remove any consumed data.
if self.buf_consumed_idx > 0 {
self.buffer.replace_range(..self.buf_consumed_idx, "");
self.buf_consumed_idx = 0;
}
// Try to parse an event from the valid prefix.
match try_parse_event(&self.buffer) {
ParseEventParse::Ok(event, _bytes_consumed) => {
if let ParseEvent::InnerClose { byte_offset, .. } = event {
if self.inner_opens == 0 {
return Err(ParseError::UnexpectedClose(
byte_offset + self.total_bytes_processed,
));
}
}
Ok(event.add_to_byte_offset(self.total_bytes_processed))
}
ParseEventParse::ReachedEnd => {
// If we consumed all data, then if all nodes are properly
// closed we're done. Otherwise we need more input.
if self.inner_opens == 0 {
Ok(ParseEvent::ValidEnd)
} else {
Ok(ParseEvent::NeedMoreInput)
}
}
ParseEventParse::IncompleteData => Ok(ParseEvent::NeedMoreInput),
// Hard errors.
ParseEventParse::ExpectedTypeNameOrInnerClose(byte_offset) => Err(
ParseError::ExpectedTypeNameOrClose(byte_offset + self.total_bytes_processed),
),
ParseEventParse::ExpectedOpenOrIdent(byte_offset) => Err(
ParseError::ExpectedOpenOrIdent(byte_offset + self.total_bytes_processed),
),
ParseEventParse::ExpectedInnerOpen(byte_offset) => Err(ParseError::ExpectedOpen(
byte_offset + self.total_bytes_processed,
)),
ParseEventParse::UnexpectedIdent(byte_offset) => Err(ParseError::UnexpectedIdent(
byte_offset + self.total_bytes_processed,
)),
}
}
pub fn byte_offset(&self) -> usize {
self.total_bytes_processed + self.buf_consumed_idx
}
}
//--------------------------------------------------------------------------
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum ParseEventParse<'a> {
Ok(ParseEvent<'a>, usize), // (event, bytes consumed)
ReachedEnd, // Reached the end of the buffer in a valid state, with no event.
IncompleteData, // Need more data to parse.
// ParseErrors.
ExpectedTypeNameOrInnerClose(usize),
ExpectedOpenOrIdent(usize),
ExpectedInnerOpen(usize),
UnexpectedIdent(usize),
}
fn try_parse_event<'a>(text: &'a str) -> ParseEventParse<'a> {
// Remove leading whitespace and comments.
let mut source_text = skip_ws_and_comments((0, text));
let start_idx = source_text.0;
// First token.
let type_name = match next_token(source_text) {
// Type name, record and continue.
(Token::TypeName(tn), tail) => {
source_text = tail;
tn
}
// Closing tag for inner node. Return.
(Token::CloseInner, tail) => {
return ParseEventParse::Ok(
ParseEvent::InnerClose {
byte_offset: start_idx,
},
tail.0,
);
}
// We consumed everything as whitespace and/or
// comments. Return.
(Token::End, _) => {
return ParseEventParse::ReachedEnd;
}
// Invalid.
_ => return ParseEventParse::ExpectedTypeNameOrInnerClose(start_idx),
};
// Skip whitespace and comments to get the start of the
// (possible) ident, for use later in error.
source_text = skip_ws_and_comments(source_text);
let ident_start_idx = source_text.0;
// Possible second token: optional ident.
let ident = if let (Token::Ident(id), tail) = next_token(source_text) {
source_text = tail;
Some(id)
} else {
None
};
// Skip whitespace and comments to get the start of the
// where there should be an open tag, for use later in error.
source_text = skip_ws_and_comments(source_text);
let open_start_idx = source_text.0;
// Last part of the event.
match next_token(source_text) {
// Begining of an inner node.
(Token::OpenInner, tail) => {
return ParseEventParse::Ok(
ParseEvent::InnerOpen {
type_name: type_name,
ident: ident,
byte_offset: start_idx,
},
tail.0,
);
}
// Try to parse entire leaf node.
(Token::OpenLeaf, tail) => {
if ident != None {
return ParseEventParse::UnexpectedIdent(ident_start_idx);
}
// Get contents.
let (contents, tail2) = parse_leaf_content(tail);
source_text = tail2;
// Try to get closing tag.
match next_token(source_text) {
// If it's a leaf closing tag, we're done!
// Return the leaf event.
(Token::CloseLeaf, tail) => {
return ParseEventParse::Ok(
ParseEvent::Leaf {
type_name: type_name,
contents: contents,
byte_offset: start_idx,
},
tail.0,
);
}
// Otherwise...
_ => {
if source_text.1.is_empty() {
// If there's no text left, we're just incomplete.
return ParseEventParse::IncompleteData;
} else {
// Otherwise, this would be a parse error...
// except that this shouldn't be reachable,
// since everything should be consumable for
// leaf content up until a close tag.
unreachable!("Expected leaf close tag.")
}
}
}
}
// We consumed everything else as whitespace
// and/or comments, so we're incomplete. Return.
(Token::End, _) => {
return ParseEventParse::IncompleteData;
}
// Invalid.
_ => {
if ident == None {
return ParseEventParse::ExpectedOpenOrIdent(open_start_idx);
} else {
return ParseEventParse::ExpectedInnerOpen(open_start_idx);
}
}
}
}
fn parse_leaf_content(source_text: (usize, &str)) -> (&str, (usize, &str)) {
let mut si = 1;
let mut escaped = false;
let mut reached_end = true;
for (i, c) in source_text.1.char_indices() {
si = i;
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if c == ']' {
reached_end = false;
break;
}
}
if reached_end {
si = source_text.1.len();
}
return (
&source_text.1[0..si],
(source_text.0 + si, &source_text.1[si..]),
);
}
//--------------------------------------------------------------------------
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum Token<'a> {
OpenInner,
CloseInner,
OpenLeaf,
CloseLeaf,
TypeName(&'a str),
Ident(&'a str),
End,
Unknown,
}
fn next_token<'a>(source_text: (usize, &'a str)) -> (Token<'a>, (usize, &'a str)) {
let text1 = skip_ws_and_comments(source_text);
if let Some(c) = text1.1.chars().nth(0) {
let text2 = (text1.0 + c.len_utf8(), &text1.1[c.len_utf8()..]);
match c {
'{' => {
return (Token::OpenInner, text2);
}
'}' => {
return (Token::CloseInner, text2);
}
'[' => {
return (Token::OpenLeaf, text2);
}
']' => {
return (Token::CloseLeaf, text2);
}
'$' => {
// Parse name
let mut si = 1;
let mut escaped = false;
let mut reached_end = true;
for (i, c) in text1.1.char_indices().skip(1) {
si = i;
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if !is_ident_char(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text1.1.len();
}
return (
Token::Ident(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]),
);
}
_ => {
if is_ident_char(c) {
// Parse type
let mut si = 0;
let mut reached_end = true;
for (i, c) in text1.1.char_indices() {
si = i;
if !is_ident_char(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text1.1.len();
}
return (
Token::TypeName(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]),
);
}
}
}
} else {
return (Token::End, text1);
}
return (Token::Unknown, text1);
}
fn is_ws(c: char) -> bool {
match c {
'\n' | '\r' | '\t' | ' ' => true,
_ => false,
}
}
fn is_nl(c: char) -> bool {
match c {
'\n' => true,
_ => false,
}
}
fn is_reserved_char(c: char) -> bool {
match c {
'{' | '}' | '[' | ']' | '$' | '#' | '\\' => true,
_ => false,
}
}
fn is_ident_char(c: char) -> bool {
// Anything that isn't whitespace or a reserved character
!is_ws(c) && !is_reserved_char(c)
}
fn skip_ws(text: &str) -> &str {
let mut si = 0;
let mut reached_end = true;
for (i, c) in text.char_indices() {
si = i;
if !is_ws(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text.len();
}
return &text[si..];
}
fn skip_comment(text: &str) -> &str {
let mut si = 0;
if Some('#') == text.chars().nth(0) {
let mut reached_end = true;
for (i, c) in text.char_indices() {
si = i;
if is_nl(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text.len();
}
}
return &text[si..];
}
fn skip_ws_and_comments(text: (usize, &str)) -> (usize, &str) {
let mut remaining_text = text.1;
loop {
let tmp = skip_comment(skip_ws(remaining_text));
if tmp.len() == remaining_text.len() {
break;
} else {
remaining_text = tmp;
}
}
let offset = text.0 + text.1.len() - remaining_text.len();
return (offset, remaining_text);
}
//--------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
use super::{next_token, Token};
#[test]
fn tokenize_1() {
let input = (0, "Thing");
assert_eq!(next_token(input), (Token::TypeName("Thing"), (5, "")));
}
#[test]
fn tokenize_2() {
let input = (0, " \n# gdfgdf gfdg dggdf\\sg dfgsd \n Thing");
assert_eq!(next_token(input), (Token::TypeName("Thing"), (41, "")));
}
#[test]
fn tokenize_3() {
let input1 = (0, " Thing { }");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
assert_eq!((token1, input2.1), (Token::TypeName("Thing"), " { }"));
assert_eq!((token2, input3.1), (Token::OpenInner, " }"));
assert_eq!((token3, input4.1), (Token::CloseInner, ""));
}
#[test]
fn tokenize_4() {
let input = (0, " $hi_there ");
assert_eq!(next_token(input), (Token::Ident("$hi_there"), (10, " ")));
}
#[test]
fn tokenize_5() {
let input = (0, " $hi\\ t\\#he\\[re ");
assert_eq!(
next_token(input),
(Token::Ident("$hi\\ t\\#he\\[re"), (15, " "),)
);
}
#[test]
fn tokenize_6() {
let input1 = (0, " $hi the[re");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5);
assert_eq!((token1, input2), (Token::Ident("$hi"), (4, " the[re")));
assert_eq!((token2, input3), (Token::TypeName("the"), (8, "[re")));
assert_eq!((token3, input4), (Token::OpenLeaf, (9, "re")));
assert_eq!((token4, input5), (Token::TypeName("re"), (11, "")));
assert_eq!((token5, input6), (Token::End, (11, "")));
}
#[test]
fn tokenize_7() {
let input1 = (0, "Thing $yar { # A comment\n\tThing2 []\n}");
let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5);
let (token6, input7) = next_token(input6);
let (token7, input8) = next_token(input7);
let (token8, input9) = next_token(input8);
assert_eq!(
(token1, input2),
(
Token::TypeName("Thing"),
(5, " $yar { # A comment\n\tThing2 []\n}",)
)
);
assert_eq!(
(token2, input3),
(
Token::Ident("$yar"),
(10, " { # A comment\n\tThing2 []\n}",)
)
);
assert_eq!(
(token3, input4),
(Token::OpenInner, (12, " # A comment\n\tThing2 []\n}",))
);
assert_eq!(
(token4, input5),
(Token::TypeName("Thing2"), (32, " []\n}"))
);
assert_eq!((token5, input6), (Token::OpenLeaf, (34, "]\n}")));
assert_eq!((token6, input7), (Token::CloseLeaf, (35, "\n}")));
assert_eq!((token7, input8), (Token::CloseInner, (37, "")));
assert_eq!((token8, input9), (Token::End, (37, "")));
}
#[test]
fn try_parse_event_01() {
assert_eq!(try_parse_event("H"), ParseEventParse::IncompleteData,);
}
#[test]
fn try_parse_event_02() {
assert_eq!(try_parse_event("Hello $"), ParseEventParse::IncompleteData,);
}
#[test]
fn try_parse_event_03() {
assert_eq!(
try_parse_event("Hello $id "),
ParseEventParse::IncompleteData,
);
}
#[test]
fn try_parse_event_04() {
assert_eq!(
try_parse_event("Hello $id {"),
ParseEventParse::Ok(
ParseEvent::InnerOpen {
type_name: "Hello",
ident: Some("$id"),
byte_offset: 0,
},
11
),
);
}
#[test]
fn try_parse_event_05() {
assert_eq!(
try_parse_event(" Hello $id {"),
ParseEventParse::Ok(
ParseEvent::InnerOpen {
type_name: "Hello",
ident: Some("$id"),
byte_offset: 2,
},
13
),
);
}
#[test]
fn try_parse_event_06() {
assert_eq!(
try_parse_event("Hello {"),
ParseEventParse::Ok(
ParseEvent::InnerOpen {
type_name: "Hello",
ident: None,
byte_offset: 0,
},
7
),
);
}
#[test]
fn try_parse_event_07() {
assert_eq!(
try_parse_event("Hello { "),
ParseEventParse::Ok(
ParseEvent::InnerOpen {
type_name: "Hello",
ident: None,
byte_offset: 0,
},
7
),
);
}
#[test]
fn try_parse_event_08() {
assert_eq!(try_parse_event("Hello ["), ParseEventParse::IncompleteData,);
}
#[test]
fn try_parse_event_09() {
assert_eq!(
try_parse_event("Hello [some contents"),
ParseEventParse::IncompleteData,
);
}
#[test]
fn try_parse_event_10() {
assert_eq!(
try_parse_event("Hello [some contents]"),
ParseEventParse::Ok(
ParseEvent::Leaf {
type_name: "Hello",
contents: "some contents",
byte_offset: 0,
},
21
),
);
}
#[test]
fn try_parse_event_11() {
assert_eq!(
try_parse_event("Hello [some contents] "),
ParseEventParse::Ok(
ParseEvent::Leaf {
type_name: "Hello",
contents: "some contents",
byte_offset: 0,
},
21
),
);
}
#[test]
fn try_parse_event_12() {
assert_eq!(
try_parse_event(" # A comment\n\n "),
ParseEventParse::ReachedEnd,
);
}
#[test]
fn parser_01() {
let mut parser = Parser::new();
parser.push_data("Hello");
assert_eq!(parser.next_event(), Ok(ParseEvent::NeedMoreInput));
parser.push_data("{");
assert_eq!(
parser.next_event(),
Ok(ParseEvent::InnerOpen {
type_name: "Hello",
ident: None,
byte_offset: 0,
})
);
assert_eq!(parser.next_event(), Ok(ParseEvent::NeedMoreInput));
parser.push_data("}");
assert_eq!(
parser.next_event(),
Ok(ParseEvent::InnerClose { byte_offset: 6 })
);
assert_eq!(parser.next_event(), Ok(ParseEvent::ValidEnd));
}
#[test]
fn parser_02() {
let mut parser = Parser::new();
parser.push_data("Hello");
assert_eq!(parser.next_event(), Ok(ParseEvent::NeedMoreInput));
parser.push_data("[");
assert_eq!(parser.next_event(), Ok(ParseEvent::NeedMoreInput));
parser.push_data("1.0 2.0 3.");
assert_eq!(parser.next_event(), Ok(ParseEvent::NeedMoreInput));
parser.push_data("0]");
assert_eq!(
parser.next_event(),
Ok(ParseEvent::Leaf {
type_name: "Hello",
contents: "1.0 2.0 3.0",
byte_offset: 0,
})
);
assert_eq!(parser.next_event(), Ok(ParseEvent::ValidEnd));
}
#[test]
fn parser_03() {
let mut parser = Parser::new();
parser.push_data("Hello $big_boy { World [1.0 2.0 3.0] }");
assert_eq!(
parser.next_event(),
Ok(ParseEvent::InnerOpen {
type_name: "Hello",
ident: Some("$big_boy"),
byte_offset: 0,
})
);
assert_eq!(
parser.next_event(),
Ok(ParseEvent::Leaf {
type_name: "World",
contents: "1.0 2.0 3.0",
byte_offset: 17,
})
);
assert_eq!(
parser.next_event(),
Ok(ParseEvent::InnerClose { byte_offset: 37 })
);
// Make sure repeated calls are stable.
assert_eq!(parser.next_event(), Ok(ParseEvent::ValidEnd));
assert_eq!(parser.next_event(), Ok(ParseEvent::ValidEnd));
assert_eq!(parser.next_event(), Ok(ParseEvent::ValidEnd));
}
#[test]
fn parser_04() {
let mut parser = Parser::new();
parser.push_data("$Hello");
assert_eq!(
parser.next_event(),
Err(ParseError::ExpectedTypeNameOrClose(0))
);
}
#[test]
fn parser_05() {
let mut parser = Parser::new();
parser.push_data("Hello]");
assert_eq!(parser.next_event(), Err(ParseError::ExpectedOpenOrIdent(5)));
}
#[test]
fn parser_06() {
let mut parser = Parser::new();
parser.push_data("Hello}");
assert_eq!(parser.next_event(), Err(ParseError::ExpectedOpenOrIdent(5)));
}
#[test]
fn parser_07() {
let mut parser = Parser::new();
parser.push_data("Hello $yar [");
assert_eq!(parser.next_event(), Err(ParseError::UnexpectedIdent(6)));
}
#[test]
fn parser_08() {
let mut parser = Parser::new();
parser.push_data("}");
assert_eq!(parser.next_event(), Err(ParseError::UnexpectedClose(0)));
}
}