Compare commits

..

No commits in common. "bidirectional" and "master" have entirely different histories.

51 changed files with 339 additions and 2157 deletions

View file

@ -1,2 +0,0 @@
[build]
rustflags = ["-C", "target-cpu=native"]

View file

@ -52,6 +52,7 @@ use_field_init_shorthand = false
force_explicit_abi = true
condense_wildcard_suffixes = false
color = "Auto"
required_version = "1.4.12"
unstable_features = false
disable_all_formatting = false
skip_children = false

View file

@ -4,7 +4,3 @@ members = [
"beevee",
"pathtracer",
]
[profile.release]
lto = true
codegen-units = 1

View file

@ -7,8 +7,5 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# Linear algebra basic operations and types
nalgebra = "0.20"
# High performance quicksort/quickselect
pdqselect = "0.1.0"

View file

@ -1,39 +0,0 @@
use super::Intersected;
use crate::aabb::Bounded;
use crate::ray::Ray;
/// The trait for any mesh-like object to be used in the [`BVH`]. If your object is not an
/// aggregate, you should instead implement [`Intersected`] which derives this trait automatically.
///
/// This trait is there to accomodate for aggregate objects inside the [`BVH`]: you can implement a
/// faster look-up of information using a [`BVH`] in a mesh for example, returning directly the
/// reference to a hit triangle. This enables us to return this triangle instead of returning a
/// reference to the whole mesh.
///
/// [`BVH`]: struct.BVH.html
/// [`Intersected`]: struct.Intersected.html
pub trait Accelerated: Bounded {
/// The type contained in your [`Accelerated`] structure
///
/// [`Accelerated`]: struct.Accelerated.html
type Output;
/// Return None if no intersection happens with the ray, or a tuple of distance along the ray
/// and a reference to the object that was hit.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)>;
}
/// The automatic implementation for any [`Intersected`] object to be used in the [`BVH`].
///
/// [`BVH`]: struct.BVH.html
impl<T> Accelerated for T
where
T: Intersected,
{
type Output = Self;
/// Return a reference to `self` when a distance was found.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)> {
self.intersect(ray).map(|t| (t, self))
}
}

View file

@ -1,11 +1,8 @@
use crate::aabb::Bounded;
use crate::ray::Ray;
/// The trait for any object to be used in the [`BVH`]. Its derivation for [`Accelerated`] is
/// automatically derived to return a reference to itself. If this not the intended semantics, see
/// [`Accelerated`].
/// The trait for any object to be used in the [`BVH`].
///
/// [`Accelerated`]: struct.Accelerated.html
/// [`BVH`]: struct.BVH.html
pub trait Intersected: Bounded {
/// Return None if there is no intersection, or the distance along the ray to the closest

View file

@ -1,8 +1,5 @@
//! The Boudning Volume Hiearchy
mod accelerated;
pub use accelerated::*;
mod intersected;
pub use intersected::*;

View file

@ -1,4 +1,4 @@
use super::Accelerated;
use super::Intersected;
use crate::aabb::AABB;
use crate::ray::Ray;
use crate::Axis;
@ -23,9 +23,9 @@ struct Node {
}
/// The BVH containing all the objects of type O.
/// This type must implement [`Accelerated`].
/// This type must implement [`Intersected`].
///
/// [`Accelerated`]: trait.Accelerated.html
/// [`Intersected`]: trait.Intersected.html
#[derive(Clone, Debug, PartialEq)]
pub struct BVH {
tree: Node,
@ -92,7 +92,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::build(spheres);
/// ```
pub fn build<O: Accelerated>(objects: &mut [O]) -> Self {
pub fn build<O: Intersected>(objects: &mut [O]) -> Self {
Self::with_max_capacity(objects, 32)
}
@ -157,7 +157,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::with_max_capacity(spheres, 32);
/// ```
pub fn with_max_capacity<O: Accelerated>(objects: &mut [O], max_cap: usize) -> Self {
pub fn with_max_capacity<O: Intersected>(objects: &mut [O], max_cap: usize) -> Self {
let tree = build_node(objects, 0, objects.len(), max_cap);
Self { tree }
}
@ -226,8 +226,8 @@ impl BVH {
/// let bvh = BVH::with_max_capacity(spheres, 32);
/// assert!(bvh.is_sound(spheres));
/// ```
pub fn is_sound<O: Accelerated>(&self, objects: &[O]) -> bool {
fn check_node<O: Accelerated>(objects: &[O], node: &Node) -> bool {
pub fn is_sound<O: Intersected>(&self, objects: &[O]) -> bool {
fn check_node<O: Intersected>(objects: &[O], node: &Node) -> bool {
if node.begin > node.end {
return false;
}
@ -322,21 +322,17 @@ impl BVH {
/// assert_eq!(dist, 0.5);
/// assert_eq!(obj, &spheres[0]);
/// ```
pub fn walk<'o, O: Accelerated>(
&self,
ray: &Ray,
objects: &'o [O],
) -> Option<(f32, &'o O::Output)> {
pub fn walk<'o, O: Intersected>(&self, ray: &Ray, objects: &'o [O]) -> Option<(f32, &'o O)> {
walk_rec_helper(ray, objects, &self.tree, std::f32::INFINITY)
}
}
fn walk_rec_helper<'o, O: Accelerated>(
fn walk_rec_helper<'o, O: Intersected>(
ray: &Ray,
objects: &'o [O],
node: &Node,
min: f32,
) -> Option<(f32, &'o O::Output)> {
) -> Option<(f32, &'o O)> {
use std::cmp::Ordering;
match &node.kind {
@ -344,7 +340,7 @@ fn walk_rec_helper<'o, O: Accelerated>(
NodeEnum::Leaf => objects[node.begin..node.end]
.iter()
// This turns the Option<f32> of an intersection into an Option<(f32, &O)>
.filter_map(|o| o.intersect(ray))
.filter_map(|o| o.intersect(ray).map(|d| (d, o)))
// Discard values that are too far away
.filter(|(dist, _)| dist < &min)
// Only keep the minimum value, if there is one
@ -386,14 +382,14 @@ fn walk_rec_helper<'o, O: Accelerated>(
}
}
fn bounds_from_slice<O: Accelerated>(objects: &[O]) -> AABB {
fn bounds_from_slice<O: Intersected>(objects: &[O]) -> AABB {
objects
.iter()
.map(|o| o.aabb())
.fold(AABB::empty(), |acc, other| acc.union(&other))
}
fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node {
fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node {
let aabb = bounds_from_slice(objects);
// Don't split nodes under capacity
if objects.len() <= max_cap {
@ -405,7 +401,7 @@ fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_c
};
}
// Calculate the SAH heuristic for this slice
let (split, axis, cost) = compute_sah(objects, aabb.surface(), max_cap);
let (split, axis, cost) = compute_sah(&mut objects[begin..end], aabb.surface(), max_cap);
// Only split if the heuristic shows that it is worth it
if cost >= objects.len() as f32 {
return Node {
@ -415,11 +411,11 @@ fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_c
kind: NodeEnum::Leaf,
};
}
// Avoid degenerate cases
let split = if split <= 1 || split >= (objects.len() - 1) {
(end - begin) / 2
// Avoid degenerate cases, and recenter the split inside [begin, end)
let split = if split == 0 || split >= (end - begin - 1) {
begin + (end - begin) / 2
} else {
split
begin + split
};
// Project along chosen axis
pdqselect::select_by(objects, split, |lhs, rhs| {
@ -428,18 +424,8 @@ fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_c
.expect("Can't use Nans in the SAH computation")
});
// Construct children recurivsely on [begin, split) and [split, end)
let left = Box::new(build_node(
&mut objects[0..split],
begin,
begin + split,
max_cap,
));
let right = Box::new(build_node(
&mut objects[split..],
begin + split,
end,
max_cap,
));
let left = Box::new(build_node(objects, begin, split, max_cap));
let right = Box::new(build_node(objects, split, end, max_cap));
// Build the node recursivelly
Node {
bounds: aabb,
@ -451,7 +437,7 @@ fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_c
/// Returns the index at which to split for SAH, the Axis along which to split, and the calculated
/// cost.
fn compute_sah<O: Accelerated>(
fn compute_sah<O: Intersected>(
objects: &mut [O],
surface: f32,
max_cap: usize,
@ -495,7 +481,7 @@ fn compute_sah<O: Accelerated>(
let cost = 1. / max_cap as f32
+ (left_count as f32 * left_surfaces[left_count - 1]
+ right_count as f32 * right_surfaces[right_count - 1])
+ right_count as f32 * right_surfaces[right_count])
/ surface;
if cost < min {

View file

@ -19,44 +19,20 @@ name = "pathtracer"
path = "src/main.rs"
[dependencies]
# Our own BVH implementation
beevee = { path = "../beevee" }
# Macro to implement arithmetic operators automagically
derive_more = "0.99.3"
# Transform interfaces into enums for better performance than dynamic dispatch
enum_dispatch = "0.2.1"
# Save an image to PNG
image = "0.23.0"
# Random implementation, not part of the standard library in Rust
indicatif = "0.14.0"
rand = "0.7"
# Parallelism utility functions
rayon = "1.3.0"
# YAML deserialization
serde_yaml = "0.8"
# Command-line argument parsing utilities
structopt = "0.3"
# OBJ format parser
tobj = "1.0"
# Fancy terminal progress bar
[dependencies.indicatif]
version = "0.14"
features = ["with_rayon"]
# Linear algebra basic operations and types
[dependencies.nalgebra]
version = "0.20.0"
features = ["serde-serialize"]
# YAML deserialization
[dependencies.serde]
version = "1.0"
features = ["derive"]

View file

@ -1,5 +1,5 @@
# Optional field
shot_rays: 10
aliasing_limit: 10
# Optional field
reflection_limit: 5

View file

@ -1,30 +0,0 @@
reflection_limit: 5
shot_rays: 50
camera:
origin: [0.0, 1.0, 0.0]
forward: [ 0.0, 0.0, 1.0]
up: [0.0, 1.0, 0.0]
fov: 60.0
distance_to_image: 1.0
x: 1080
y: 1080
lights:
ambients:
- color: {r: 0.1, g: 0.1, b: 0.1}
points:
- position: [0.0, 1.95, 3.2]
color: {r: 1.0, g: 1.0, b: 1.0}
meshes:
# FIXME: make the path relative to the YAML in some way?
# Easiest solution would be to chdir to the YAML's directory
- obj_file: "pathtracer/examples/objs/cornell-box-no-emission.obj"
translation: [0.0, 0.0, 2.8]
rotation: [0, 180, 0]
steps:
- 10
- 25

View file

@ -1,29 +0,0 @@
reflection_limit: 5
shot_rays: 50
camera:
origin: [0.0, 1.0, 0.0]
forward: [ 0.0, 0.0, 1.0]
up: [0.0, 1.0, 0.0]
fov: 60.0
distance_to_image: 1.0
x: 1080
y: 1080
lights:
ambients:
- color: {r: 0.1, g: 0.1, b: 0.1}
points:
- position: [0.0, 1.95, 3.2]
color: {r: 1.0, g: 1.0, b: 1.0}
meshes:
# FIXME: make the path relative to the YAML in some way?
# Easiest solution would be to chdir to the YAML's directory
- obj_file: "pathtracer/examples/objs/cornell-box.obj"
translation: [0.0, 0.0, 2.8]
rotation: [0, 180, 0]
steps:
- 10
- 25

View file

@ -1,88 +0,0 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
newmtl leftWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.63 0.065 0.05 # Red
Kd 0.63 0.065 0.05
Ks 0 0 0
Ke 0 0 0
newmtl rightWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.14 0.45 0.091 # Green
Kd 0.14 0.45 0.091
Ks 0 0 0
Ke 0 0 0
newmtl floor
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl ceiling
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl backWall
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl shortBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl tallBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl light
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.78 0.78 0.78 # White
Kd 0.78 0.78 0.78
Ks 0 0 0
Ke 0 0 0

View file

@ -1,168 +0,0 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
mtllib cornell-box-no-emission.mtl
## Object floor
v -1.01 0.00 0.99
v 1.00 0.00 0.99
v 1.00 0.00 -1.04
v -0.99 0.00 -1.04
g floor
usemtl floor
f -4 -3 -2 -1
## Object ceiling
v -1.02 1.99 0.99
v -1.02 1.99 -1.04
v 1.00 1.99 -1.04
v 1.00 1.99 0.99
g ceiling
usemtl ceiling
f -4 -3 -2 -1
## Object backwall
v -0.99 0.00 -1.04
v 1.00 0.00 -1.04
v 1.00 1.99 -1.04
v -1.02 1.99 -1.04
g backWall
usemtl backWall
f -4 -3 -2 -1
## Object rightwall
v 1.00 0.00 -1.04
v 1.00 0.00 0.99
v 1.00 1.99 0.99
v 1.00 1.99 -1.04
g rightWall
usemtl rightWall
f -4 -3 -2 -1
## Object leftWall
v -1.01 0.00 0.99
v -0.99 0.00 -1.04
v -1.02 1.99 -1.04
v -1.02 1.99 0.99
g leftWall
usemtl leftWall
f -4 -3 -2 -1
## Object shortBox
usemtl shortBox
# Top Face
v 0.53 0.60 0.75
v 0.70 0.60 0.17
v 0.13 0.60 0.00
v -0.05 0.60 0.57
f -4 -3 -2 -1
# Left Face
v -0.05 0.00 0.57
v -0.05 0.60 0.57
v 0.13 0.60 0.00
v 0.13 0.00 0.00
f -4 -3 -2 -1
# Front Face
v 0.53 0.00 0.75
v 0.53 0.60 0.75
v -0.05 0.60 0.57
v -0.05 0.00 0.57
f -4 -3 -2 -1
# Right Face
v 0.70 0.00 0.17
v 0.70 0.60 0.17
v 0.53 0.60 0.75
v 0.53 0.00 0.75
f -4 -3 -2 -1
# Back Face
v 0.13 0.00 0.00
v 0.13 0.60 0.00
v 0.70 0.60 0.17
v 0.70 0.00 0.17
f -4 -3 -2 -1
# Bottom Face
v 0.53 0.00 0.75
v 0.70 0.00 0.17
v 0.13 0.00 0.00
v -0.05 0.00 0.57
f -12 -11 -10 -9
g shortBox
usemtl shortBox
## Object tallBox
usemtl tallBox
# Top Face
v -0.53 1.20 0.09
v 0.04 1.20 -0.09
v -0.14 1.20 -0.67
v -0.71 1.20 -0.49
f -4 -3 -2 -1
# Left Face
v -0.53 0.00 0.09
v -0.53 1.20 0.09
v -0.71 1.20 -0.49
v -0.71 0.00 -0.49
f -4 -3 -2 -1
# Back Face
v -0.71 0.00 -0.49
v -0.71 1.20 -0.49
v -0.14 1.20 -0.67
v -0.14 0.00 -0.67
f -4 -3 -2 -1
# Right Face
v -0.14 0.00 -0.67
v -0.14 1.20 -0.67
v 0.04 1.20 -0.09
v 0.04 0.00 -0.09
f -4 -3 -2 -1
# Front Face
v 0.04 0.00 -0.09
v 0.04 1.20 -0.09
v -0.53 1.20 0.09
v -0.53 0.00 0.09
f -4 -3 -2 -1
# Bottom Face
v -0.53 0.00 0.09
v 0.04 0.00 -0.09
v -0.14 0.00 -0.67
v -0.71 0.00 -0.49
f -8 -7 -6 -5
g tallBox
usemtl tallBox
## Object light
v -0.24 1.98 0.16
v -0.24 1.98 -0.22
v 0.23 1.98 -0.22
v 0.23 1.98 0.16
g light
usemtl light
f -4 -3 -2 -1

View file

@ -1,88 +0,0 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
newmtl leftWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.63 0.065 0.05 # Red
Kd 0.63 0.065 0.05
Ks 0 0 0
Ke 0 0 0
newmtl rightWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.14 0.45 0.091 # Green
Kd 0.14 0.45 0.091
Ks 0 0 0
Ke 0 0 0
newmtl floor
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl ceiling
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl backWall
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl shortBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl tallBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl light
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.78 0.78 0.78 # White
Kd 0.78 0.78 0.78
Ks 0 0 0
Ke 17 12 4

View file

@ -1,168 +0,0 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
mtllib cornell-box.mtl
## Object floor
v -1.01 0.00 0.99
v 1.00 0.00 0.99
v 1.00 0.00 -1.04
v -0.99 0.00 -1.04
g floor
usemtl floor
f -4 -3 -2 -1
## Object ceiling
v -1.02 1.99 0.99
v -1.02 1.99 -1.04
v 1.00 1.99 -1.04
v 1.00 1.99 0.99
g ceiling
usemtl ceiling
f -4 -3 -2 -1
## Object backwall
v -0.99 0.00 -1.04
v 1.00 0.00 -1.04
v 1.00 1.99 -1.04
v -1.02 1.99 -1.04
g backWall
usemtl backWall
f -4 -3 -2 -1
## Object rightwall
v 1.00 0.00 -1.04
v 1.00 0.00 0.99
v 1.00 1.99 0.99
v 1.00 1.99 -1.04
g rightWall
usemtl rightWall
f -4 -3 -2 -1
## Object leftWall
v -1.01 0.00 0.99
v -0.99 0.00 -1.04
v -1.02 1.99 -1.04
v -1.02 1.99 0.99
g leftWall
usemtl leftWall
f -4 -3 -2 -1
## Object shortBox
usemtl shortBox
# Top Face
v 0.53 0.60 0.75
v 0.70 0.60 0.17
v 0.13 0.60 0.00
v -0.05 0.60 0.57
f -4 -3 -2 -1
# Left Face
v -0.05 0.00 0.57
v -0.05 0.60 0.57
v 0.13 0.60 0.00
v 0.13 0.00 0.00
f -4 -3 -2 -1
# Front Face
v 0.53 0.00 0.75
v 0.53 0.60 0.75
v -0.05 0.60 0.57
v -0.05 0.00 0.57
f -4 -3 -2 -1
# Right Face
v 0.70 0.00 0.17
v 0.70 0.60 0.17
v 0.53 0.60 0.75
v 0.53 0.00 0.75
f -4 -3 -2 -1
# Back Face
v 0.13 0.00 0.00
v 0.13 0.60 0.00
v 0.70 0.60 0.17
v 0.70 0.00 0.17
f -4 -3 -2 -1
# Bottom Face
v 0.53 0.00 0.75
v 0.70 0.00 0.17
v 0.13 0.00 0.00
v -0.05 0.00 0.57
f -12 -11 -10 -9
g shortBox
usemtl shortBox
## Object tallBox
usemtl tallBox
# Top Face
v -0.53 1.20 0.09
v 0.04 1.20 -0.09
v -0.14 1.20 -0.67
v -0.71 1.20 -0.49
f -4 -3 -2 -1
# Left Face
v -0.53 0.00 0.09
v -0.53 1.20 0.09
v -0.71 1.20 -0.49
v -0.71 0.00 -0.49
f -4 -3 -2 -1
# Back Face
v -0.71 0.00 -0.49
v -0.71 1.20 -0.49
v -0.14 1.20 -0.67
v -0.14 0.00 -0.67
f -4 -3 -2 -1
# Right Face
v -0.14 0.00 -0.67
v -0.14 1.20 -0.67
v 0.04 1.20 -0.09
v 0.04 0.00 -0.09
f -4 -3 -2 -1
# Front Face
v 0.04 0.00 -0.09
v 0.04 1.20 -0.09
v -0.53 1.20 0.09
v -0.53 0.00 0.09
f -4 -3 -2 -1
# Bottom Face
v -0.53 0.00 0.09
v 0.04 0.00 -0.09
v -0.14 0.00 -0.67
v -0.71 0.00 -0.49
f -8 -7 -6 -5
g tallBox
usemtl tallBox
## Object light
v -0.24 1.98 0.16
v -0.24 1.98 -0.22
v 0.23 1.98 -0.22
v 0.23 1.98 0.16
g light
usemtl light
f -4 -3 -2 -1

View file

@ -1,4 +1,4 @@
shot_rays: 10
aliasing_limit: 10
reflection_limit: 5
background: {r: 0.5, g: 0.5, b: 0.5}

View file

@ -2,18 +2,13 @@
use super::film::Film;
use crate::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
use serde::{Deserialize, Deserializer};
/// Represent an abstract camera to observe the scene.
#[serde(from = "SerializedCamera")]
#[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, PartialEq)]
pub struct Camera {
/// Where the camera is set in the scene (i.e: the center of the lens).
/// Where the camera is set in the scene (i.e: its focal point).
origin: Point,
/// How far away is the camera's focal plane.
distance_to_image: f32,
/// The film to represent each pixel in the scene.
film: Film,
}
@ -42,20 +37,15 @@ impl Camera {
forward: Vector,
up: Vector,
fov: f32,
distance_to_image: f32,
dist_to_image: f32,
x: u32,
y: u32,
) -> Self {
let right = forward.cross(&up);
let screen_size = 2. * f32::tan(fov / 2.);
// Construct the film behind the camera, upside down
let center = origin - forward.normalize();
let film = Film::new(x, y, screen_size, center, -up, -right);
Camera {
origin,
distance_to_image,
film,
}
let center = origin + forward.normalize() * dist_to_image;
let screen_size = 2. * f32::tan(fov / 2.) * dist_to_image;
let film = Film::new(x, y, screen_size, center, up, right);
Camera { origin, film }
}
/// Get the `Camera`'s [`Film`].
@ -88,24 +78,6 @@ impl Camera {
pub fn origin(&self) -> &Point {
&self.origin
}
/// Get the Ray coming out of the camera at a given ratio on the image.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::Camera;
/// # use pathtracer::Point;
/// #
/// let cam = Camera::default();
/// let ray_ul = cam.ray_with_ratio(0., 0.); // Ray coming out of the upper-left pixel
/// let ray_ul = cam.ray_with_ratio(1., 1.); // Ray coming out of the lower-right pixel
/// ```
pub fn ray_with_ratio(&self, x: f32, y: f32) -> Ray {
let pixel = self.film().pixel_at_ratio(x, y);
let direction = Unit::new_normalize(self.origin() - pixel);
Ray::new(pixel, direction)
}
}
impl Default for Camera {
@ -168,6 +140,16 @@ impl From<SerializedCamera> for Camera {
}
}
impl<'de> Deserialize<'de> for Camera {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedCamera = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;
@ -175,7 +157,7 @@ mod test {
#[test]
fn new_works() {
let cam = Camera::new(
Point::new(1., 0., 0.),
Point::new(-1., 0., 0.),
Vector::new(1., 0., 0.),
Vector::new(0., 1., 0.),
2. * f32::atan(1.), /* 90° in radian */
@ -186,15 +168,14 @@ mod test {
assert_eq!(
cam,
Camera {
origin: Point::new(1., 0., 0.),
distance_to_image: 1.,
origin: Point::new(-1., 0., 0.),
film: Film::new(
1080,
1080,
2.,
Point::origin(),
-Vector::new(0., 1., 0.),
-Vector::new(0., 0., 1.),
Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.),
)
}
)
@ -203,7 +184,7 @@ mod test {
#[test]
fn deserialization_works() {
let yaml = r#"
origin: [1.0, 0.0, 0.0]
origin: [-1.0, 0.0, 0.0]
forward: [ 1.0, 0.0, 0.0]
up: [0.0, 1.0, 0.0]
fov: 90.0
@ -215,15 +196,14 @@ mod test {
assert_eq!(
cam,
Camera {
origin: Point::new(1., 0., 0.),
distance_to_image: 1.0,
origin: Point::new(-1., 0., 0.),
film: Film::new(
1080,
1080,
2.,
Point::origin(),
-Vector::new(0., 1., 0.),
-Vector::new(0., 0., 1.),
Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.),
)
}
)

View file

@ -70,25 +70,6 @@ impl LinearColor {
LinearColor { r, g, b }
}
/// Creates a new `Color` from a slice.
///
/// Panics if slice has less than 3 elements.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::LinearColor;
/// #
/// let color = LinearColor::from_slice(&[1.0, 0.0, 0.0]); // bright red!
/// ```
pub fn from_slice(s: &[f32]) -> Self {
LinearColor {
r: s[0],
g: s[1],
b: s[2],
}
}
#[must_use]
/// Clamps the color's RGB components between 0.0 and 1.0.
///

View file

@ -33,9 +33,6 @@ pub struct LightProperties {
/// The transparency or reflectivity properties.
#[serde(flatten)]
pub refl_trans: Option<ReflTransEnum>,
/// The emitted light from this object, only used for path-tracing rendering techniques
#[serde(default)]
pub emitted: LinearColor,
}
impl LightProperties {
@ -51,20 +48,17 @@ impl LightProperties {
/// LinearColor::new(0.25, 0.5, 1.),
/// LinearColor::new(0.75, 0.375, 0.125),
/// Some(ReflTransEnum::Reflectivity { coef: 0.5 }),
/// LinearColor::new(0., 0., 0.),
/// );
/// ```
pub fn new(
diffuse: LinearColor,
specular: LinearColor,
refl_trans: Option<ReflTransEnum>,
emitted: LinearColor,
) -> Self {
LightProperties {
diffuse,
specular,
refl_trans,
emitted,
}
}
}
@ -78,20 +72,14 @@ mod test {
let diffuse = LinearColor::new(0.25, 0.5, 1.);
let specular = LinearColor::new(0.75, 0.375, 0.125);
let refl_trans = Some(ReflTransEnum::Reflectivity { coef: 0.5 });
let emitted = LinearColor::new(0., 1., 0.);
let properties = LightProperties::new(
diffuse.clone(),
specular.clone(),
refl_trans.clone(),
emitted.clone(),
);
let properties =
LightProperties::new(diffuse.clone(), specular.clone(), refl_trans.clone());
assert_eq!(
properties,
LightProperties {
diffuse,
specular,
refl_trans,
emitted,
}
)
}
@ -108,8 +96,7 @@ mod test {
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
None,
LinearColor::black(),
None
)
)
}
@ -131,8 +118,7 @@ mod test {
Some(ReflTransEnum::Transparency {
coef: 0.5,
index: 1.5
}),
LinearColor::black(),
})
)
)
}
@ -150,27 +136,7 @@ mod test {
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
Some(ReflTransEnum::Reflectivity { coef: 0.25 }),
LinearColor::black(),
)
)
}
#[test]
fn deserialization_with_emitted_works() {
let yaml = r#"
diffuse: {r: 1.0, g: 0.5, b: 0.25}
specular: {r: 0.25, g: 0.125, b: 0.75}
emitted: {r: 0.25, g: 0.5, b: 1.0}
"#;
let properties: LightProperties = serde_yaml::from_str(yaml).unwrap();
assert_eq!(
properties,
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
None,
LinearColor::new(0.25, 0.5, 1.0),
Some(ReflTransEnum::Reflectivity { coef: 0.25 })
)
)
}

View file

@ -12,7 +12,6 @@ pub mod core;
pub mod light;
pub mod material;
pub mod render;
pub mod scene;
pub mod serialize;
pub mod shape;
pub mod texture;

View file

@ -27,10 +27,6 @@ impl AmbientLight {
impl Light for AmbientLight {
fn illumination(&self, _: &Point) -> LinearColor {
self.luminance()
}
fn luminance(&self) -> LinearColor {
self.color.clone()
}
}

View file

@ -34,10 +34,6 @@ impl DirectionalLight {
impl Light for DirectionalLight {
fn illumination(&self, _: &Point) -> LinearColor {
self.luminance()
}
fn luminance(&self) -> LinearColor {
self.color.clone()
}
}

View file

@ -2,16 +2,12 @@
use super::core::LinearColor;
use super::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Unit;
/// Represent a light in the scene being rendered.
pub trait Light: std::fmt::Debug {
/// Get the illumination of that light on that point.
fn illumination(&self, point: &Point) -> LinearColor;
/// Get the raw luminance of that light
fn luminance(&self) -> LinearColor;
}
/// Represent a light which has an abstract position in the scene being rendered.
@ -20,26 +16,6 @@ pub trait SpatialLight: Light {
fn to_source(&self, origin: &Point) -> (Unit<Vector>, f32);
}
/// Represent a light from which we can sample a random `Ray`.
pub trait SampleLight: Light {
/// Uniformly sample a ray from the point-light in a random direction.
///
/// # Examles
///
///```
/// # use pathtracer::light::{PointLight, SampleLight};
/// # use pathtracer::core::color::LinearColor;
/// # use pathtracer::Point;
/// #
/// let dir_light = PointLight::new(
/// Point::origin(),
/// LinearColor::new(1.0, 0.0, 1.0),
/// );
/// let sampled = dir_light.sample_ray();
/// ```
fn sample_ray(&self) -> Ray;
}
mod ambient_light;
pub use ambient_light::*;

View file

@ -1,9 +1,7 @@
use super::{Light, SampleLight, SpatialLight};
use super::{Light, SpatialLight};
use crate::core::LinearColor;
use crate::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Unit;
use rand::{distributions::Uniform, Rng};
use serde::Deserialize;
/// Represent a light emanating from a point in space, following the square distance law.
@ -36,11 +34,7 @@ impl PointLight {
impl Light for PointLight {
fn illumination(&self, point: &Point) -> LinearColor {
let dist = (self.position - point).norm();
self.luminance() / dist
}
fn luminance(&self) -> LinearColor {
self.color.clone()
self.color.clone() / dist
}
}
@ -52,23 +46,6 @@ impl SpatialLight for PointLight {
}
}
impl SampleLight for PointLight {
fn sample_ray(&self) -> Ray {
let mut rng = rand::thread_rng();
// Sample sphere uniformly
// See <https://mathworld.wolfram.com/SpherePointPicking.html>
let theta = rng.gen_range(0., std::f32::consts::PI * 2.);
let y = rng.sample(Uniform::new(-1., 1.)); // Inclusive for the poles
let dir = Unit::new_unchecked(Vector::new(
// this vector is already of unit length
f32::sqrt(1. - y * y) * f32::cos(theta),
y,
f32::sqrt(1. - y * y) * f32::sin(theta),
));
Ray::new(self.position, dir)
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -1,15 +1,13 @@
use super::{Light, SampleLight, SpatialLight};
use super::{Light, SpatialLight};
use crate::core::LinearColor;
use crate::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Rotation3;
use nalgebra::Unit;
use rand::{distributions::Uniform, Rng};
use serde::Deserialize;
use serde::{Deserialize, Deserializer};
/// Represent a light emanating from a directed light-source, outputting rays in a cone.
#[serde(from = "SerializedSpotLight")]
#[derive(Debug, PartialEq, Deserialize)]
///
/// The illumination cone cannot have an FOV over 180°.
#[derive(Debug, PartialEq)]
pub struct SpotLight {
position: Point,
direction: Unit<Vector>,
@ -54,15 +52,11 @@ impl Light for SpotLight {
let delt = point - self.position;
let cos = self.direction.dot(&delt.normalize());
if cos >= self.cosine_value {
self.luminance() / delt.norm_squared()
self.color.clone() / delt.norm_squared()
} else {
LinearColor::black()
}
}
fn luminance(&self) -> LinearColor {
self.color.clone()
}
}
impl SpatialLight for SpotLight {
@ -73,36 +67,6 @@ impl SpatialLight for SpotLight {
}
}
impl SampleLight for SpotLight {
fn sample_ray(&self) -> Ray {
let mut rng = rand::thread_rng();
// Sample cap at Z-pole uniformly
// See <https://math.stackexchange.com/questions/56784>
let theta = rng.gen_range(0., std::f32::consts::PI * 2.);
let z = rng.sample(Uniform::new(self.cosine_value, 1.)); // Inclusive for the poles
let dir = Unit::new_unchecked(Vector::new(
// this vector is already of unit length
f32::sqrt(1. - z * z) * f32::cos(theta),
f32::sqrt(1. - z * z) * f32::sin(theta),
z,
));
let dir =
if let Some(rotate) = Rotation3::rotation_between(&Vector::z_axis(), &self.direction) {
// Rotate the direction if needed
rotate * dir
} else if self.direction.dot(&dir) < 0. {
// Special case if the direction is directly opposite, its rotation axis is
// undefined, but we don't care about a special axis to perform the rotation
-dir
} else {
dir
};
// We should now be oriented the right way
debug_assert!(self.direction.dot(&dir) >= self.cosine_value);
Ray::new(self.position, dir)
}
}
#[derive(Debug, Deserialize)]
struct SerializedSpotLight {
position: Point,
@ -118,6 +82,16 @@ impl From<SerializedSpotLight> for SpotLight {
}
}
impl<'de> Deserialize<'de> for SpotLight {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedSpotLight = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -1,19 +1,7 @@
use pathtracer::render::{BidirectionalPathtracer, Pathtracer, Raytracer};
use pathtracer::scene::Scene;
use pathtracer::render::Scene;
use std::path::PathBuf;
use std::str;
use structopt::clap::arg_enum;
use structopt::StructOpt;
arg_enum! {
#[derive(Debug)]
enum RenderOption {
Raytracer,
Pathtracer,
Bidirectional,
}
}
#[derive(StructOpt, Debug)]
struct Options {
/// Input description for the scene to be rendered.
@ -22,15 +10,6 @@ struct Options {
/// Output image for the rendered scene.
#[structopt(short, long, parse(from_os_str), default_value = "scene.png")]
output: PathBuf,
/// Which renderer should be used on the input scene.
#[structopt(
short,
long,
possible_values = &RenderOption::variants(),
case_insensitive = true,
default_value = "Raytracer"
)]
renderer: RenderOption,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
@ -38,11 +17,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let f = std::fs::File::open(options.input)?;
let scene: Scene = serde_yaml::from_reader(f)?;
let image = match options.renderer {
RenderOption::Raytracer => Raytracer::new(scene).render(),
RenderOption::Pathtracer => Pathtracer::new(scene).render(),
RenderOption::Bidirectional => BidirectionalPathtracer::new(scene).render(),
};
let image = scene.render();
image.save(options.output)?;
Ok(())

View file

@ -9,11 +9,10 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[derive(Debug, PartialEq, Deserialize)]
pub enum MaterialEnum {
#[serde(rename = "uniform")]
UniformMaterial,
TriangleMaterial,
}
/// Represent the physical light properties of an object in the scene;
@ -23,8 +22,5 @@ pub trait Material: std::fmt::Debug {
fn properties(&self, point: Point2D) -> LightProperties;
}
mod triangle;
pub use triangle::*;
mod uniform;
pub use uniform::*;

View file

@ -1,33 +0,0 @@
use super::Material;
use crate::core::{LightProperties, LinearColor, ReflTransEnum};
use crate::Point2D;
use serde::Deserialize;
/// Represent a material which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleMaterial {
/// The diffuse components.
diffuse: [LinearColor; 3],
/// The specular components.
specular: [LinearColor; 3],
/// The transparency or reflectivity properties, this is not interpolated.
#[serde(flatten)]
refl_trans: Option<ReflTransEnum>,
/// The amount of light emitted by the material, only used during path-tracing rendering.
emitted: [LinearColor; 3],
}
impl Material for TriangleMaterial {
fn properties(&self, point: Point2D) -> LightProperties {
let (u, v) = (point.x, point.y);
let sample = |param: &[LinearColor; 3]| -> LinearColor {
param[0].clone() * (1. - u - v) + param[1].clone() * u + param[2].clone() * v
};
let diffuse = sample(&self.diffuse);
let specular = sample(&self.specular);
let emitted = sample(&self.emitted);
LightProperties::new(diffuse, specular, self.refl_trans.clone(), emitted)
}
}
// FIXME: tests

View file

@ -24,7 +24,6 @@ impl UniformMaterial {
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// );
/// ```
@ -51,7 +50,6 @@ mod test {
diffuse: LinearColor::new(0., 0.5, 0.),
specular: LinearColor::new(1., 1., 1.),
refl_trans: None,
emitted: LinearColor::black(),
};
let mat = UniformMaterial::new(properties.clone());
assert_eq!(mat, UniformMaterial { properties })
@ -63,7 +61,6 @@ mod test {
LinearColor::new(0., 0.5, 0.),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
);
let mat = UniformMaterial::new(properties.clone());
assert_eq!(mat.properties(Point2D::origin()), properties)
@ -82,8 +79,7 @@ mod test {
UniformMaterial::new(LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
Some(ReflTransEnum::Reflectivity { coef: 0.25 }),
LinearColor::black(),
Some(ReflTransEnum::Reflectivity { coef: 0.25 })
))
)
}

View file

@ -1,197 +0,0 @@
use super::super::Renderer;
use super::path::*;
use crate::core::LinearColor;
use crate::material::Material;
use crate::render::utils::{buffer_to_image, sample_hemisphere};
use crate::scene::{Object, Scene};
use crate::shape::Shape;
use crate::{Point, Vector};
use beevee::ray::Ray;
use image::RgbImage;
use indicatif::ProgressIterator;
use nalgebra::Unit;
use rayon::prelude::*;
/// Render the [`Scene`] using Bidirectional-Pathtracing
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct BidirectionalPathtracer {
#[allow(unused)]
scene: Scene,
}
impl BidirectionalPathtracer {
/// Create a [`BidirectionalPathtracer`] renderer with the given [`Scene`]
///
/// [`BidirectionalPathtracer`]: struct.BidirectionalPathtracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
BidirectionalPathtracer { scene }
}
/// Render the [`Scene`] using Bidirectional-Pathtracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
let (width, height) = (
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let total = width * height;
let p = super::super::progress::get_passes_progressbar(self.scene.shot_rays);
let (img_buf, _) = (0..self.scene.shot_rays.max(1))
.progress_with(p)
.map(|_| {
let mut buffer: Vec<LinearColor> = Vec::new();
buffer.resize_with(total as usize, LinearColor::black);
buffer
.par_chunks_mut(width as usize)
.enumerate()
.for_each(|(y, row)| {
for x in 0..width {
row[x as usize] = self.pixel_ray(x as f32, y as f32);
}
});
buffer
})
.fold(
{
let mut vec = Vec::new();
vec.resize_with(total as usize, LinearColor::black);
let count = 0usize;
(vec, count)
},
|(mut acc, count), buf| {
for (i, pixel) in buf.into_iter().enumerate() {
acc[i] += pixel;
}
let count = count + 1; // Because count is 0-indexed
if self.scene.steps.contains(&count) {
let image = buffer_to_image(&acc, count as u32, width, height);
image
.save(format!("{}_passes.png", count))
.expect("writing image failed!");
}
(acc, count) // Count has been updated previously
},
);
buffer_to_image(&img_buf, self.scene.shot_rays, width, height)
}
fn pixel_ray(&self, x: f32, y: f32) -> LinearColor {
let light_paths = self
.scene
.lights
.sample_lights_iter()
.map(|l| {
let light_ray = l.sample_ray();
self.construct_light_path(light_ray.origin, light_ray.direction, l.luminance())
})
.collect::<Vec<_>>();
let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let ray = self.scene.camera.ray_with_ratio(x, y);
self.cast_ray(ray).map_or_else(
|| self.scene.background.clone(),
|(t, obj)| self.radiance(ray, t, obj, &light_paths, self.scene.reflection_limit),
)
}
fn radiance(
&self,
ray: Ray,
t: f32,
obj: &Object,
light_paths: &[Path],
limit: u32,
) -> LinearColor {
let hit_pos = ray.origin + ray.direction.as_ref() * t;
let texel = obj.shape.project_texel(&hit_pos);
let properties = obj.material.properties(texel);
let mut light_samples = LinearColor::black();
for path in light_paths {
for point in &path.points {
light_samples += point.luminance.clone() / (hit_pos - point.point).norm();
}
}
if limit == 0 {
return properties.emitted;
}
let brdf = properties.diffuse;
let normal = obj.shape.normal(&hit_pos);
let new_direction = sample_hemisphere(normal);
let new_ray = Ray::new(hit_pos + new_direction.as_ref() * 0.001, new_direction);
let incoming = self
.cast_ray(new_ray)
.map_or_else(LinearColor::black, |(t, obj)| {
self.radiance(new_ray, t, obj, light_paths, limit - 1)
});
light_samples + properties.emitted + (brdf * incoming)
}
#[allow(unused)]
fn construct_light_path(
&self,
mut origin: Point,
mut direction: Unit<Vector>,
luminance: LinearColor,
) -> Path {
let mut res = Path::new(origin);
let mut previous_luminance = luminance.clone();
let light_point = PathPoint::new(origin, luminance);
res.push_point(light_point);
for _ in 0..self.scene.reflection_limit {
let ray = Ray::new(origin, direction);
match self.cast_ray(ray) {
Some((distance, obj)) => {
let hit_pos = origin + direction.as_ref() * distance;
let texel = obj.shape.project_texel(&hit_pos);
let properties = obj.material.properties(texel);
let emitted = properties.emitted;
let diffuse = properties.diffuse;
let normal = obj.shape.normal(&hit_pos);
let luminance = emitted + (diffuse * (previous_luminance / distance));
let p = PathPoint::new(hit_pos, luminance.clone());
res.push_point(p);
let new_direction = sample_hemisphere(normal);
// Calculate the incoming light along the new ray
origin = hit_pos + new_direction.as_ref() * 0.001;
direction = new_direction;
previous_luminance = luminance;
}
None => break,
}
}
res
}
#[allow(unused)]
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.scene.bvh.walk(&ray, &self.scene.objects)
}
}
impl Renderer for BidirectionalPathtracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -1,4 +0,0 @@
mod path;
mod bidirectional_pathtracer;
pub use bidirectional_pathtracer::*;

View file

@ -1,34 +0,0 @@
use crate::core::LinearColor;
use crate::Point;
pub struct PathPoint {
pub point: Point,
pub luminance: LinearColor,
}
impl PathPoint {
#[allow(unused)]
pub fn new(point: Point, luminance: LinearColor) -> Self {
PathPoint { point, luminance }
}
}
pub struct Path {
pub origin: Point,
pub points: Vec<PathPoint>,
}
impl Path {
#[allow(unused)]
pub fn new(origin: Point) -> Self {
Path {
origin,
points: Vec::new(),
}
}
#[allow(unused)]
pub fn push_point(&mut self, new_point: PathPoint) {
self.points.push(new_point)
}
}

View file

@ -23,7 +23,7 @@ impl LightAggregate {
/// # Examples
///
/// ```
/// # use pathtracer::scene::LightAggregate;
/// # use pathtracer::render::LightAggregate;
/// #
/// let la = LightAggregate::empty();
/// assert_eq!(la.ambient_lights_iter().count(), 0);
@ -40,7 +40,7 @@ impl LightAggregate {
/// # Examples
///
/// ```
/// # use pathtracer::scene::LightAggregate;
/// # use pathtracer::render::LightAggregate;
/// #
/// let la = LightAggregate::new(
/// Vec::new(),
@ -87,20 +87,6 @@ impl LightAggregate {
.chain(self.points.iter().map(|l| l as &dyn SpatialLight))
.chain(self.spots.iter().map(|l| l as &dyn SpatialLight))
}
/// Returns an iterator over the aggregate's [`SampleLight`]s.
///
/// This simply merges iterators over [`SpotLight`], and [`PointLight`].
///
/// [`SampleLight`]: ../../light/trait.SampleLight.html
/// [`PointLight`]: ../../light/point_light/struct.PointLight.html
/// [`Spotight`]: ../../light/spot_light/struct.Spotight.html
pub fn sample_lights_iter(&self) -> impl Iterator<Item = &dyn SampleLight> {
self.spots
.iter()
.map(|sl| sl as &dyn SampleLight)
.chain(self.points.iter().map(|pl| pl as &dyn SampleLight))
}
}
impl Default for LightAggregate {

View file

@ -1,22 +1,12 @@
//! Define the different kinds of renderers for use on a given scene.
use image::RgbImage;
//! Rendering logic
/// Each renderer implements this trait, to be called after being built.
pub trait Renderer {
/// Render the [`Scene`] using the chosen rendering technique.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
fn render(&self) -> RgbImage;
}
pub mod light_aggregate;
pub use light_aggregate::*;
mod bidirectional;
pub use bidirectional::*;
pub mod object;
pub use object::*;
mod pathtrace;
pub use pathtrace::*;
pub mod scene;
pub use scene::*;
mod raytrace;
pub use raytrace::*;
pub(crate) mod progress;
pub(crate) mod utils;

View file

@ -1,7 +1,7 @@
//! Logic for the scene objects
use crate::material::MaterialEnum;
use crate::shape::ShapeEnum;
use crate::shape::{Shape, ShapeEnum};
use crate::texture::TextureEnum;
use crate::Point;
use beevee::{
@ -12,7 +12,7 @@ use beevee::{
use serde::Deserialize;
/// An object being rendered in the scene.
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Object {
/// The `Object`'s physical shape
pub shape: ShapeEnum,
@ -30,7 +30,7 @@ impl Object {
/// ```
/// # use pathtracer::core::{LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::scene::Object;
/// # use pathtracer::render::Object;
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
@ -42,7 +42,6 @@ impl Object {
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
@ -88,7 +87,6 @@ mod test {
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
));
let texture = UniformTexture::new(LinearColor::new(0.25, 0.5, 1.));
Object::new(shape.into(), material.into(), texture.into())
@ -101,7 +99,6 @@ mod test {
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
));
let texture = UniformTexture::new(LinearColor::new(0.25, 0.5, 1.));
assert_eq!(

View file

@ -1,2 +0,0 @@
mod pathtracer;
pub use self::pathtracer::*;

View file

@ -1,135 +0,0 @@
use indicatif::ProgressIterator;
use rayon::prelude::*;
use super::super::utils::{buffer_to_image, sample_hemisphere};
use super::super::Renderer;
use crate::{
core::LinearColor,
material::Material,
scene::{Object, Scene},
shape::Shape,
};
use beevee::ray::Ray;
use image::RgbImage;
/// Render the [`Scene`] using Pathtracing
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct Pathtracer {
#[allow(unused)]
scene: Scene,
}
impl Pathtracer {
/// Create a [`Pathtracer`] renderer with the given [`Scene`]
///
/// [`Pathtracer`]: struct.Pathtracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
Pathtracer { scene }
}
/// Render the [`Scene`] using Pathtracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
let (width, height) = (
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let total = width * height;
let p = super::super::progress::get_passes_progressbar(self.scene.shot_rays);
// Ensure at least one round of shots
let (img_buf, _) = (0..self.scene.shot_rays.max(1))
.progress_with(p)
.map(|_| {
let mut buffer: Vec<LinearColor> = Vec::new();
buffer.resize_with(total as usize, LinearColor::black);
buffer
.par_chunks_mut(width as usize)
.enumerate()
.for_each(|(y, row)| {
for x in 0..width {
row[x as usize] += self.pixel_ray(x as f32, y as f32);
}
});
buffer
})
.fold(
{
let mut vec = Vec::new();
vec.resize_with(total as usize, LinearColor::black);
let count = 0usize;
(vec, count)
},
|(mut acc, count), buf| {
for (i, pixel) in buf.into_iter().enumerate() {
acc[i] += pixel;
}
let count = count + 1; // Because count is 0-indexed
if self.scene.steps.contains(&count) {
let image = buffer_to_image(&acc, count as u32, width, height);
image
.save(format!("{}_passes.png", count))
.expect("writing image failed!");
}
(acc, count) // Count has been updated previously
},
);
buffer_to_image(&img_buf, self.scene.shot_rays, width, height)
}
fn pixel_ray(&self, x: f32, y: f32) -> LinearColor {
let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let ray = self.scene.camera.ray_with_ratio(x, y);
self.cast_ray(ray).map_or_else(
|| self.scene.background.clone(),
|(t, obj)| self.radiance(ray, t, obj, self.scene.reflection_limit),
)
}
fn radiance(&self, ray: Ray, t: f32, obj: &Object, limit: u32) -> LinearColor {
// This doesn't look great, but it works ¯\_(ツ)_/¯
let hit_pos = ray.origin + ray.direction.as_ref() * t;
let texel = obj.shape.project_texel(&hit_pos);
let properties = obj.material.properties(texel);
// If we are the at recursion limit, return the light emitted by the object
if limit == 0 {
return properties.emitted;
};
// Get BRDF
// FIXME: what about the material's albedo ?
let brdf = properties.diffuse;
// Pick a new direction
let normal = obj.shape.normal(&hit_pos);
let new_direction = sample_hemisphere(normal);
// Calculate the incoming light along the new ray
let new_ray = Ray::new(hit_pos + new_direction.as_ref() * 0.001, new_direction);
let incoming = self
.cast_ray(new_ray)
.map_or_else(LinearColor::black, |(t, obj)| {
self.radiance(new_ray, t, obj, limit - 1)
});
// Put it all together
// The weight of the sample and the cosine of the new ray cancel each other out
properties.emitted + (brdf * incoming)
}
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.scene.bvh.walk(&ray, &self.scene.objects)
}
}
impl Renderer for Pathtracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -1,26 +0,0 @@
use indicatif::{ProgressBar, ProgressStyle};
pub fn get_progressbar(total: u64, style: &str) -> ProgressBar {
let pb = ProgressBar::new(total);
pb.set_draw_delta((total / 10000).max(1));
pb.set_style(ProgressStyle::default_bar().template(style));
pb
}
pub fn get_pixels_progressbar(total: u64) -> ProgressBar {
get_progressbar(
total,
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})",
)
}
pub fn get_passes_progressbar(total: u32) -> ProgressBar {
let pb = get_progressbar(
total as u64,
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} passes (ETA: {eta})",
);
pb.enable_steady_tick(1000);
pb
}

View file

@ -1,2 +0,0 @@
mod raytracer;
pub use self::raytracer::*;

View file

@ -1,48 +1,102 @@
use super::super::utils::*;
use super::super::Renderer;
use crate::scene::{Object, Scene};
//! Scene rendering logic
use super::{light_aggregate::LightAggregate, object::Object, utils::*};
use crate::{
core::{LightProperties, LinearColor, ReflTransEnum},
core::{Camera, LightProperties, LinearColor, ReflTransEnum},
material::Material,
shape::Shape,
texture::Texture,
{Point, Vector},
};
use beevee::ray::Ray;
use beevee::{bvh::BVH, ray::Ray};
use image::RgbImage;
use nalgebra::Unit;
use rand::prelude::thread_rng;
use rand::Rng;
use serde::{Deserialize, Deserializer};
/// Render the [`Scene`] using Raytracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct Raytracer {
scene: Scene,
/// Represent the scene being rendered.
pub struct Scene {
camera: Camera,
lights: LightAggregate,
objects: Vec<Object>,
bvh: BVH,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
}
impl Raytracer {
/// Create a [`Raytracer`] renderer with the given [`Scene`]
impl Scene {
/// Creates a new `Scene`.
///
/// [`Raytracer`]: struct.Raytracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
Raytracer { scene }
/// # Examples
///
/// ```
/// # use pathtracer::core::{Camera, LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::render::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // aliasing limit
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
) -> Self {
// NOTE(Antoine): fun fact: BVH::build stack overflows when given an empty slice :)
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
aliasing_limit,
reflection_limit,
diffraction_index,
}
}
/// Render the [`Scene`] using Raytracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
/// Render the scene into an image.
pub fn render(&self) -> RgbImage {
let mut image = RgbImage::new(
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let mut image = RgbImage::new(self.camera.film().width(), self.camera.film().height());
let total = (image.width() * image.height()) as u64;
let pb = super::super::progress::get_pixels_progressbar(total);
let pb = indicatif::ProgressBar::new(total);
pb.set_draw_delta(total / 10000);
pb.set_style(indicatif::ProgressStyle::default_bar().template(
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})",
));
let pixel_func = if self.scene.shot_rays > 0 {
let pixel_func = if self.aliasing_limit > 0 {
Self::anti_alias_pixel
} else {
Self::pixel
@ -67,17 +121,18 @@ impl Raytracer {
/// Get pixel color for (x, y) a pixel **coordinate**
fn pixel(&self, x: f32, y: f32) -> LinearColor {
let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let indices = RefractionInfo::with_index(self.scene.diffraction_index);
let ray = self.scene.camera.ray_with_ratio(x, y);
self.cast_ray(ray).map_or_else(
|| self.scene.background.clone(),
let (x, y) = self.camera.film().pixel_ratio(x, y);
let pixel = self.camera.film().pixel_at_ratio(x, y);
let direction = Unit::new_normalize(pixel - self.camera.origin());
let indices = RefractionInfo::with_index(self.diffraction_index);
self.cast_ray(Ray::new(pixel, direction)).map_or_else(
|| self.background.clone(),
|(t, obj)| {
self.color_at(
ray.origin + ray.direction.as_ref() * t,
pixel + direction.as_ref() * t,
obj,
ray.direction,
self.scene.reflection_limit,
direction,
self.reflection_limit,
indices,
)
},
@ -86,7 +141,7 @@ impl Raytracer {
/// Get pixel color with anti-aliasing
fn anti_alias_pixel(&self, x: f32, y: f32) -> LinearColor {
let range = 0..self.scene.shot_rays;
let range = 0..self.aliasing_limit;
let mut rng = thread_rng();
let acc: LinearColor = range
.map(|_| {
@ -96,11 +151,11 @@ impl Raytracer {
})
.map(LinearColor::clamp)
.sum();
acc / self.scene.shot_rays as f32
acc / self.aliasing_limit as f32
}
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.scene.bvh.walk(&ray, &self.scene.objects)
self.bvh.walk(&ray, &self.objects)
}
fn color_at(
@ -118,9 +173,6 @@ impl Raytracer {
let normal = object.shape.normal(&point);
let reflected_ray = reflected(incident_ray, normal);
// FIXME: change this to averaged sampled rays instead of visiting every light ?
// Indeed the path-tracing algorithm is good for calculating the radiance at a point
// But it should be used for reflection and refraction too...
let lighting = self.illuminate(point, object_color, &properties, normal, reflected_ray);
if properties.refl_trans.is_none() {
// Avoid calculating reflection when not needed
@ -209,8 +261,7 @@ impl Raytracer {
}
fn illuminate_ambient(&self, color: LinearColor) -> LinearColor {
self.scene
.lights
self.lights
.ambient_lights_iter()
.map(|light| color.clone() * light.illumination(&Point::origin()))
.map(LinearColor::clamp)
@ -224,12 +275,11 @@ impl Raytracer {
normal: Unit<Vector>,
reflected: Unit<Vector>,
) -> LinearColor {
self.scene
.lights
self.lights
.spatial_lights_iter()
.map(|light| {
let (direction, t) = light.to_source(&point);
let light_ray = Ray::new(point + direction.as_ref() * 0.001, direction);
let light_ray = Ray::new(point + 0.001 * direction.as_ref(), direction);
match self.cast_ray(light_ray) {
// Take shadows into account
Some((obstacle_t, _)) if obstacle_t < t => return LinearColor::black(),
@ -245,8 +295,72 @@ impl Raytracer {
}
}
impl Renderer for Raytracer {
fn render(&self) -> RgbImage {
self.render()
#[derive(Debug, PartialEq, Deserialize)]
struct SerializedScene {
camera: Camera,
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
aliasing_limit: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
}
impl From<SerializedScene> for Scene {
fn from(scene: SerializedScene) -> Self {
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.aliasing_limit,
scene.reflection_limit,
scene.starting_diffraction,
)
}
}
impl<'de> Deserialize<'de> for Scene {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedScene = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
#[ignore] // stack overflow because of BVH :(
fn bvh_fails() {
use crate::core::Camera;
use crate::render::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
);
}
}

View file

@ -1,9 +1,5 @@
use crate::core::LinearColor;
use crate::Vector;
use image::RgbImage;
use nalgebra::Unit;
use rand::prelude::thread_rng;
use rand::Rng;
pub fn reflected(incident: Unit<Vector>, normal: Unit<Vector>) -> Unit<Vector> {
let proj = incident.dot(&normal);
@ -69,66 +65,3 @@ impl RefractionInfo {
std::mem::swap(&mut self.old_index, &mut self.new_index)
}
}
/// Returns a random ray in the hemisphere described by a normal unit-vector
/// It is cosine-sampled, which is convenient for path-tracing.
pub fn sample_hemisphere(normal: Unit<Vector>) -> Unit<Vector> {
let mut rng = thread_rng();
let azimuth = rng.gen::<f32>() * std::f32::consts::PI * 2.;
// Cosine weighted importance sampling
let cos_elevation: f32 = rng.gen();
let sin_elevation = f32::sqrt(1. - cos_elevation * cos_elevation);
let x = sin_elevation * azimuth.cos();
let y = cos_elevation;
let z = sin_elevation * azimuth.sin();
// Calculate orthonormal base, defined by (normalb_b, normal, normal_t)
// Pay attention to degenerate cases when (y, z) is small for use with cross product
let normal_t = if normal.x.abs() > normal.y.abs() {
Vector::new(normal.z, 0., -normal.x).normalize()
} else {
Vector::new(0., -normal.z, normal.y).normalize()
};
let normal_b = normal.cross(&normal_t);
// Perform the matrix calculation by hand...
// The probability to have picked the ray is inversely proportional to cosine of the angle with
// the normal
Unit::new_normalize(Vector::new(
x * normal_b.x + y * normal.x + z * normal_t.x,
x * normal_b.y + y * normal.y + z * normal_t.y,
x * normal_b.z + y * normal.z + z * normal_t.z,
))
}
pub fn buffer_to_image(buffer: &[LinearColor], passes: u32, width: u32, height: u32) -> RgbImage {
let mut image = RgbImage::new(width, height);
for (x, y, pixel) in image.enumerate_pixels_mut() {
let i = x as usize + y as usize * width as usize;
*pixel = (buffer[i].clone() / passes as f32).into();
}
image
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sample_hemisphere_work() {
// NOTE(Bruno): should use some test-case generation for failure-reproduction purposes...
let mut rng = thread_rng();
for _ in 0..100 {
let normal = Unit::new_normalize(Vector::new(rng.gen(), rng.gen(), rng.gen()));
for _ in 0..100 {
let (sample, proportion) = sample_hemisphere(normal);
let cos_angle = normal.dot(&sample);
assert!(cos_angle >= 0.);
assert!(1. / cos_angle - proportion < std::f32::EPSILON);
}
}
}
}

View file

@ -1,182 +0,0 @@
use std::convert::TryFrom;
use std::path::PathBuf;
use nalgebra::{Similarity3, Unit, VectorSlice3};
use serde::Deserialize;
use tobj::{self, load_obj};
use super::Object;
use crate::{
core::{LightProperties, LinearColor},
material::{MaterialEnum, UniformMaterial},
shape::{InterpolatedTriangle, ShapeEnum, Triangle},
texture::{TextureEnum, UniformTexture},
Point, Vector,
};
/// Represent a mesh of objects.
#[serde(try_from = "Wavefront")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Mesh {
/// The shapes composing the mesh
pub(crate) shapes: Vec<Object>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub(crate) struct Wavefront {
pub obj_file: PathBuf,
#[serde(default = "nalgebra::zero")]
translation: Vector,
#[serde(default = "nalgebra::zero")]
rotation: Vector,
#[serde(default = "crate::serialize::coefficient::default_identity")]
scale: f32,
}
fn parse_float3(s: &str) -> Result<[f32; 3], tobj::LoadError> {
let mut res = [0.0, 0.0, 0.0];
let mut count = 0;
for (i, s) in s.split_whitespace().enumerate() {
if count == 3 {
return Err(tobj::LoadError::MaterialParseError);
}
res[i] = s.parse().map_err(|_| tobj::LoadError::MaterialParseError)?;
count += 1;
}
if count < 3 {
return Err(tobj::LoadError::MaterialParseError);
}
Ok(res)
}
impl TryFrom<Wavefront> for Mesh {
type Error = tobj::LoadError;
fn try_from(wavefront: Wavefront) -> Result<Mesh, Self::Error> {
let mut shapes = Vec::new();
let (models, materials) = load_obj(&wavefront.obj_file)?;
// The object to world transformation matrix
let transform = Similarity3::new(
wavefront.translation,
wavefront.rotation * std::f32::consts::PI / 180., // From degrees to radians
wavefront.scale,
);
for model in models {
let mesh = &model.mesh;
// mesh.indices contains all vertices. Each group of 3 vertices
// is a triangle, so we iterate over indices 3 by 3.
for i in 0..(mesh.indices.len() / 3) {
let (a, b, c) = (
mesh.indices[i * 3] as usize,
mesh.indices[i * 3 + 1] as usize,
mesh.indices[i * 3 + 2] as usize,
);
let pos_a = transform * Point::from_slice(&mesh.positions[(a * 3)..(a * 3 + 3)]);
let pos_b = transform * Point::from_slice(&mesh.positions[(b * 3)..(b * 3 + 3)]);
let pos_c = transform * Point::from_slice(&mesh.positions[(c * 3)..(c * 3 + 3)]);
let triangle: ShapeEnum = if mesh.normals.is_empty() {
Triangle::new(pos_a, pos_b, pos_c).into()
} else {
// We apply the (arguably useless) scaling to the vectors in case it is
// negative, which would invert their direction
let norm_a = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(a * 3)..(a * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_b = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(b * 3)..(b * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_c = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(c * 3)..(c * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
InterpolatedTriangle::new(pos_a, pos_b, pos_c, norm_a, norm_b, norm_c).into()
};
// FIXME: handle material
let (material, texture): (MaterialEnum, TextureEnum) =
if let Some(mat_id) = mesh.material_id {
let mesh_mat = &materials[mat_id];
let diffuse = LinearColor::from_slice(&mesh_mat.ambient[..]);
let specular = LinearColor::from_slice(&mesh_mat.ambient[..]);
let emitted = mesh_mat
.unknown_param
.get("Ke")
// we want a default if "Ke" isn't provided, but we
// want an error if it is provided but its value
// doesn't parse
.map_or(Ok(LinearColor::black()), |ke| {
parse_float3(ke).map(|vals| LinearColor::from_slice(&vals))
})?;
let material = UniformMaterial::new(LightProperties::new(
diffuse.clone(),
specular,
// FIXME: material.dissolve is supposed to be "the alpha term"
// Needs translation to our ReflTransEnum
None,
emitted,
));
// we only handle uniform textures
let texture = UniformTexture::new(diffuse);
(material.into(), texture.into())
} else {
// FIXME: should we accept this, and use a default
// Material, or throw a LoadError
(
UniformMaterial::new(LightProperties::new(
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(0.1, 0.1, 0.1),
None,
LinearColor::black(),
))
.into(),
UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
)
};
shapes.push(Object::new(triangle, material, texture));
}
}
Ok(Mesh { shapes })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_float3_works() {
assert_eq!(parse_float3("1 1 1"), Ok([1., 1., 1.]));
assert_eq!(
parse_float3("1 1"),
Err(tobj::LoadError::MaterialParseError)
);
assert_eq!(
parse_float3("1 1 1 1"),
Err(tobj::LoadError::MaterialParseError)
);
}
}

View file

@ -1,167 +0,0 @@
//! Desciption of the scene.
use beevee::bvh::BVH;
use serde::Deserialize;
use crate::core::{Camera, LinearColor};
pub mod light_aggregate;
pub use light_aggregate::*;
mod mesh;
pub use mesh::*;
pub mod object;
pub use object::*;
/// Represent the scene being rendered.
#[serde(from = "SerializedScene")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Scene {
pub(crate) camera: Camera,
pub(crate) lights: LightAggregate,
pub(crate) objects: Vec<Object>,
pub(crate) bvh: BVH,
pub(crate) background: LinearColor,
pub(crate) shot_rays: u32,
pub(crate) reflection_limit: u32,
pub(crate) diffraction_index: f32,
pub(crate) steps: Vec<usize>,
}
impl Scene {
#[allow(clippy::too_many_arguments)]
/// Creates a new `Scene`.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::{Camera, LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::scene::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // amount of rays shot per pixel
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// Vec::new(), // steps
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
shot_rays: u32,
reflection_limit: u32,
diffraction_index: f32,
steps: Vec<usize>,
) -> Self {
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
shot_rays,
reflection_limit,
diffraction_index,
steps,
}
}
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(deny_unknown_fields)]
struct SerializedScene {
camera: Camera,
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
meshes: Vec<Mesh>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
shot_rays: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
#[serde(default)]
steps: Vec<usize>,
}
impl From<SerializedScene> for Scene {
fn from(mut scene: SerializedScene) -> Self {
let mut flattened_meshes: Vec<Object> = scene
.meshes
.into_iter()
.map(|m| m.shapes)
.flatten()
.collect();
scene.objects.append(&mut flattened_meshes);
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.shot_rays,
scene.reflection_limit,
scene.starting_diffraction,
scene.steps,
)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
fn empty_scene() {
use crate::core::Camera;
use crate::scene::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
Vec::new(), // steps
);
}
}

View file

@ -1,158 +0,0 @@
use super::triangle::Triangle;
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
/// Represent a triangle with interpolated normals inside the scene.
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct InterpolatedTriangle {
#[serde(flatten)]
tri: Triangle,
// FIXME: serialize with unit
normals: [Unit<Vector>; 3],
}
impl InterpolatedTriangle {
/// Creates a new `InterpolatedTriangle` from 3 [`Point`]s and 3 [`Vector`]s.
///
/// [`Point`]: ../../type.Point.html
/// [`Point`]: ../../type.Vector.html
///
/// # Examples
///
/// ```
/// # use pathtracer::shape::InterpolatedTriangle;
/// # use pathtracer::{Point, Vector};
/// #
/// let t = InterpolatedTriangle::new(
/// Point::new(1.0, 0.0, 0.0),
/// Point::new(0.0, 1.0, 0.0),
/// Point::new(0.0, 0.0, 1.0),
/// Vector::x_axis(),
/// Vector::y_axis(),
/// Vector::z_axis(),
/// );
/// ```
pub fn new(
c0: Point,
c1: Point,
c2: Point,
n0: Unit<Vector>,
n1: Unit<Vector>,
n2: Unit<Vector>,
) -> Self {
InterpolatedTriangle {
tri: Triangle::new(c0, c1, c2),
normals: [n0, n1, n2],
}
}
}
impl Shape for InterpolatedTriangle {
fn normal(&self, point: &Point) -> Unit<Vector> {
let (u, v) = {
let c = self.tri.barycentric(point);
(c.x, c.y)
};
let interpol = self.normals[0].as_ref() * (1. - u - v)
+ self.normals[1].as_ref() * u
+ self.normals[2].as_ref() * v;
Unit::new_normalize(interpol)
}
fn project_texel(&self, point: &Point) -> Point2D {
self.tri.project_texel(point)
}
}
impl Bounded for InterpolatedTriangle {
fn aabb(&self) -> AABB {
self.tri.aabb()
}
fn centroid(&self) -> Point {
self.tri.centroid()
}
}
impl Intersected for InterpolatedTriangle {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.tri.intersect(ray)
}
}
#[cfg(test)]
mod test {
use super::*;
fn simple_triangle() -> InterpolatedTriangle {
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
}
#[test]
fn normal_interpolation_at_c0_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::origin());
assert_eq!(normal, Vector::x_axis());
}
#[test]
fn normal_interpolation_at_c1_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 1.));
assert_eq!(normal, Vector::y_axis());
}
#[test]
fn normal_interpolation_at_c2_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 0.));
assert_eq!(normal, Vector::z_axis());
}
#[test]
fn normal_interpolation_at_center_works() {
let triangle = simple_triangle();
let center = Point::new(0., 2. / 3., 1. / 3.);
let normal = triangle.normal(&center);
let expected = Unit::new_normalize(Vector::new(1., 1., 1.));
assert!((normal.as_ref() - expected.as_ref()).magnitude() < 1e-5)
}
#[test]
fn deserialization_works() {
let yaml = r#"
corners:
- [0.0, 0.0, 0.0]
- [0.0, 1.0, 1.0]
- [0.0, 1.0, 0.0]
normals:
- [1.0, 0.0, 0.0]
- [0.0, 1.0, 0.0]
- [0.0, 0.0, 1.0]
"#;
let triangle: InterpolatedTriangle = serde_yaml::from_str(yaml).unwrap();
assert_eq!(
triangle,
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
)
}
}

View file

@ -1,9 +0,0 @@
use super::{InterpolatedTriangle, Shape, Triangle};
use crate::material::{Material, TriangleMaterial, UniformMaterial};
use crate::texture::{Texture, TriangleTexture, UniformTexture};
use crate::Point;
use beevee::{
aabb::{Bounded, AABB},
bvh::Intersected,
ray::Ray,
};

View file

@ -14,53 +14,42 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[derive(Debug, PartialEq, Deserialize)]
pub enum ShapeEnum {
Sphere,
Triangle,
InterpolatedTriangle,
}
// FIXME: this has to be written by hand due to a limitation of `enum_dispatch` on super traits
impl Bounded for ShapeEnum {
fn aabb(&self) -> AABB {
match self {
ShapeEnum::Sphere(s) => s.aabb(),
ShapeEnum::Triangle(s) => s.aabb(),
ShapeEnum::InterpolatedTriangle(s) => s.aabb(),
}
}
fn centroid(&self) -> Point {
match self {
ShapeEnum::Sphere(s) => s.centroid(),
ShapeEnum::Triangle(s) => s.centroid(),
ShapeEnum::InterpolatedTriangle(s) => s.centroid(),
}
}
}
impl Intersected for ShapeEnum {
fn intersect(&self, ray: &Ray) -> Option<f32> {
match self {
ShapeEnum::Sphere(s) => s.intersect(ray),
ShapeEnum::Triangle(s) => s.intersect(ray),
ShapeEnum::InterpolatedTriangle(s) => s.intersect(ray),
}
}
}
/// Represent an abstract shape inside the scene.
#[enum_dispatch::enum_dispatch(ShapeEnum)]
pub trait Shape: std::fmt::Debug + Intersected {
pub trait Shape: std::fmt::Debug {
/// Return the distance at which the object intersects with the ray, or None if it does not.
fn intersect(&self, ray: &Ray) -> Option<f32>;
/// Return the unit vector corresponding to the normal at this point of the shape.
fn normal(&self, point: &Point) -> Unit<Vector>;
/// Project the point from the shape's surface to its texel coordinates.
fn project_texel(&self, point: &Point) -> Point2D;
/// Enclose the `Shape` in an axi-aligned bounding-box.
fn aabb(&self) -> AABB;
/// Return the centroid of the shape.
fn centroid(&self) -> Point;
}
mod interpolated_triangle;
pub use interpolated_triangle::*;
impl Bounded for dyn Shape {
fn aabb(&self) -> AABB {
self.aabb()
}
fn centroid(&self) -> Point {
self.centroid()
}
}
impl Intersected for dyn Shape {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.intersect(ray)
}
}
mod sphere;
pub use sphere::*;

View file

@ -1,7 +1,6 @@
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::aabb::AABB;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
@ -39,38 +38,6 @@ impl Sphere {
}
impl Shape for Sphere {
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
}
impl Bounded for Sphere {
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
}
impl Intersected for Sphere {
fn intersect(&self, ray: &Ray) -> Option<f32> {
use std::mem;
@ -100,6 +67,34 @@ impl Intersected for Sphere {
Some(t_0)
}
}
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
}
#[cfg(test)]

View file

@ -1,14 +1,12 @@
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::aabb::AABB;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
use serde::{Deserialize, Deserializer};
/// Represent a triangle inside the scene.
#[serde(from = "SerializedTriangle")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Clone, Debug, PartialEq)]
pub struct Triangle {
c0: Point,
c0c1: Vector,
@ -40,7 +38,7 @@ impl Triangle {
}
}
pub(crate) fn barycentric(&self, point: &Point) -> Point2D {
fn barycentric(&self, point: &Point) -> Point2D {
let c0_pos = point - self.c0;
// P - A = u * (B - A) + v * (C - A)
// (C - A) = v0 is c0c2
@ -60,29 +58,6 @@ impl Triangle {
}
impl Shape for Triangle {
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
}
impl Bounded for Triangle {
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
}
impl Intersected for Triangle {
fn intersect(&self, ray: &Ray) -> Option<f32> {
let pvec = ray.direction.cross(&self.c0c2);
let det = self.c0c1.dot(&pvec);
@ -113,6 +88,25 @@ impl Intersected for Triangle {
Some(t)
}
}
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
}
#[derive(Debug, Deserialize)]
@ -130,6 +124,16 @@ impl From<SerializedTriangle> for Triangle {
}
}
impl<'de> Deserialize<'de> for Triangle {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedTriangle = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -9,11 +9,10 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[derive(Debug, PartialEq, Deserialize)]
pub enum TextureEnum {
#[serde(rename = "uniform")]
UniformTexture,
TriangleTexture,
}
/// Represent an object's texture.
@ -23,8 +22,5 @@ pub trait Texture: std::fmt::Debug {
fn texel_color(&self, point: Point2D) -> LinearColor;
}
mod triangle;
pub use triangle::*;
mod uniform;
pub use uniform::*;

View file

@ -1,23 +0,0 @@
use super::{uniform::UniformTexture, Texture};
use crate::core::LinearColor;
use crate::Point2D;
use serde::Deserialize;
/// Represent a texture which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleTexture {
/// The texture at each point
textures: [UniformTexture; 3],
}
impl Texture for TriangleTexture {
fn texel_color(&self, point: Point2D) -> LinearColor {
let (u, v) = (point.x, point.y);
let sum = self.textures[0].texel_color(point) * (1. - u - v)
+ self.textures[1].texel_color(point) * u
+ self.textures[2].texel_color(point) * v;
sum / 3.
}
}
// FIXME: tests