Compare commits

...

42 commits
master ... WIP

Author SHA1 Message Date
Bruno BELANYI c5867bca84 WIP: pathtrace soup 2020-04-01 23:13:02 +02:00
Bruno BELANYI 8afc5ba382 library: light: spot_light: add sample_ray method 2020-04-01 02:46:13 +02:00
Bruno BELANYI bb7fa79fc3 library: light: point_light: add sample_ray method 2020-04-01 02:46:01 +02:00
Bruno BELANYI 7eefd7b574 WIP: library: render: pathtracer: add build_path 2020-03-31 12:09:28 +02:00
Bruno BELANYI 6d0de72e57 WIP: library: render: pathtrace: add Path struct 2020-03-31 12:09:28 +02:00
Bruno BELANYI a200a839b6 examples: rename aliasing_limit to shot_rays 2020-03-31 12:09:28 +02:00
Bruno BELANYI d4345e6ea4 library: scene: rename aliasing_limit to shot_rays 2020-03-31 12:09:28 +02:00
Bruno BELANYI e68ceb484d executable: use dummy pahtracer renderer 2020-03-31 12:09:09 +02:00
Bruno BELANYI b624ced37f library: render: pathtrace: add dummy Pathtracer 2020-03-30 02:25:58 +02:00
Bruno BELANYI 9c6b9af31a library: render: add Renderer trait 2020-03-29 21:28:06 +02:00
Bruno BELANYI 83ed6406ac executable: allow the choice of renderer 2020-03-29 21:28:06 +02:00
Bruno BELANYI 5ebad7c1ab pathtracer: move rendering logic to 'render' module 2020-03-29 20:41:19 +02:00
Bruno BELANYI ad668251d4 beevee: bvh: accelerated: add missing link to BVH 2020-03-29 20:20:01 +02:00
Bruno BELANYI a59bd026bc library: rename 'render' module to 'scene' 2020-03-29 20:15:27 +02:00
Bruno BELANYI aa47b54e4c examples: move OBJ-related files to subdirectory 2020-03-29 19:44:12 +02:00
Bruno BELANYI 4593e276c4 library: core: camera: fix documentation phrasing 2020-03-29 19:36:51 +02:00
Bruno BELANYI 9ad1100ded library: core: camera: move film behind the camera
To prepare for adding the handling of focal blur, move the film so that
it is behind the point of convergence of the lens.

In addition, store the distance to the focal plane in the camera, which
will be used when calculating rays with an non-zero aperture.
2020-03-29 19:36:51 +02:00
Bruno BELANYI 2f3224ea07 library: render: scene: calculate rays w/ camera 2020-03-29 19:36:51 +02:00
Bruno BELANYI 78d5954419 library: core: color: fix from_slice's doc-test 2020-03-29 19:36:16 +02:00
Bruno BELANYI b79c94aad9 library: core: camera: add ray calculation method 2020-03-29 19:35:50 +02:00
Antoine Martin c7fec074c2 examples: add cornell box example 2020-03-29 19:22:12 +02:00
Bruno BELANYI 2994a7dcfa library: render: mesh: parse rotation in degrees 2020-03-29 18:13:38 +02:00
Antoine Martin e1f18786ce cargo: bump tobj to 1.0 2020-03-29 16:52:26 +02:00
Bruno BELANYI 642f4221cd beevee: bvh: tree: fix build panic 2020-03-27 17:40:12 +01:00
Antoine Martin 0e65a75e2b library: render: mesh: fix panic when parsing OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin cca40bcb8e library: render: mesh: from_slice to build Vector 2020-03-27 17:40:12 +01:00
Antoine Martin f0d36c7d7b library: render: mesh: use nalgebra::zero instead 2020-03-27 17:40:12 +01:00
Bruno BELANYI 15381d4bbd library: render: mesh: scale, rotate, & translate
The scaling factor is the same on all axis, to avoid having angles
changed which would mess with the normals too much.
2020-03-27 17:40:12 +01:00
Antoine Martin 2d624c517f library: render: mesh: make default material grey 2020-03-27 17:40:12 +01:00
Antoine Martin fe5eee0172 library: render: mesh: load basic material from OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin 6ba0f328cd library: render: mesh: handle empty normals in OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin 0368edbd74 library: render: mesh: add OBJ loading 2020-03-27 17:39:44 +01:00
Bruno BELANYI a0d7d5e590 library: render: scene: remove ignored test 2020-03-26 19:05:01 +01:00
Bruno BELANYI 8727ae9d87 library: use #[serde(from)] for Deserialize 2020-03-26 19:03:42 +01:00
Bruno BELANYI 0678317442 library: render: scene: deserialize meshes 2020-03-26 18:48:48 +01:00
Bruno BELANYI f03880799b library: render: deserialization: error on unknown 2020-03-26 18:37:42 +01:00
Bruno BELANYI 0f6b81e40c WIP: add Mesh, TriangleTexture, TriangleMaterial 2020-03-26 18:37:42 +01:00
Bruno BELANYI e65a2a1f48 WIP: add comment about path-tracing 2020-03-26 17:42:08 +01:00
Bruno BELANYI 998838a6fc library: use Intersected as a super trait 2020-03-26 17:42:08 +01:00
Bruno BELANYI 3039607e4f beevee: bvh: use Accelerated trait for objects
This will allow for the use of meshes inside the BVH.

Returning the reference to a triangle inside the mesh directly instead
of returning the reference to the mesh itself allows for more optimum
execution.
2020-03-26 17:42:08 +01:00
Bruno BELANYI 5c0fc9689e library: shape: add InterpolatedTriangle type
This is a triangle with added normal interpolation at its edges. This is
particularly useful when rendering mesh objects.
2020-03-25 00:27:26 +01:00
Bruno BELANYI 3b5410aef9 library: render: scene: add hemisphere sampling
This method takes a given normal, and computes a random ray in the
unit-hemisphere described by that normal.

We use cosine-weighted importance sampling because it leads to better
convergence and is a nice micro-optimisation (from four trigonometric
operations to only two).
2020-03-25 00:12:05 +01:00
37 changed files with 1434 additions and 328 deletions

View file

@ -0,0 +1,39 @@
use super::Intersected;
use crate::aabb::Bounded;
use crate::ray::Ray;
/// The trait for any mesh-like object to be used in the [`BVH`]. If your object is not an
/// aggregate, you should instead implement [`Intersected`] which derives this trait automatically.
///
/// This trait is there to accomodate for aggregate objects inside the [`BVH`]: you can implement a
/// faster look-up of information using a [`BVH`] in a mesh for example, returning directly the
/// reference to a hit triangle. This enables us to return this triangle instead of returning a
/// reference to the whole mesh.
///
/// [`BVH`]: struct.BVH.html
/// [`Intersected`]: struct.Intersected.html
pub trait Accelerated: Bounded {
/// The type contained in your [`Accelerated`] structure
///
/// [`Accelerated`]: struct.Accelerated.html
type Output;
/// Return None if no intersection happens with the ray, or a tuple of distance along the ray
/// and a reference to the object that was hit.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)>;
}
/// The automatic implementation for any [`Intersected`] object to be used in the [`BVH`].
///
/// [`BVH`]: struct.BVH.html
impl<T> Accelerated for T
where
T: Intersected,
{
type Output = Self;
/// Return a reference to `self` when a distance was found.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)> {
self.intersect(ray).map(|t| (t, self))
}
}

View file

@ -1,8 +1,11 @@
use crate::aabb::Bounded; use crate::aabb::Bounded;
use crate::ray::Ray; use crate::ray::Ray;
/// The trait for any object to be used in the [`BVH`]. /// The trait for any object to be used in the [`BVH`]. Its derivation for [`Accelerated`] is
/// automatically derived to return a reference to itself. If this not the intended semantics, see
/// [`Accelerated`].
/// ///
/// [`Accelerated`]: struct.Accelerated.html
/// [`BVH`]: struct.BVH.html /// [`BVH`]: struct.BVH.html
pub trait Intersected: Bounded { pub trait Intersected: Bounded {
/// Return None if there is no intersection, or the distance along the ray to the closest /// Return None if there is no intersection, or the distance along the ray to the closest

View file

@ -1,5 +1,8 @@
//! The Boudning Volume Hiearchy //! The Boudning Volume Hiearchy
mod accelerated;
pub use accelerated::*;
mod intersected; mod intersected;
pub use intersected::*; pub use intersected::*;

View file

@ -1,4 +1,4 @@
use super::Intersected; use super::Accelerated;
use crate::aabb::AABB; use crate::aabb::AABB;
use crate::ray::Ray; use crate::ray::Ray;
use crate::Axis; use crate::Axis;
@ -23,9 +23,9 @@ struct Node {
} }
/// The BVH containing all the objects of type O. /// The BVH containing all the objects of type O.
/// This type must implement [`Intersected`]. /// This type must implement [`Accelerated`].
/// ///
/// [`Intersected`]: trait.Intersected.html /// [`Accelerated`]: trait.Accelerated.html
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct BVH { pub struct BVH {
tree: Node, tree: Node,
@ -92,7 +92,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }]; /// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::build(spheres); /// let bvh = BVH::build(spheres);
/// ``` /// ```
pub fn build<O: Intersected>(objects: &mut [O]) -> Self { pub fn build<O: Accelerated>(objects: &mut [O]) -> Self {
Self::with_max_capacity(objects, 32) Self::with_max_capacity(objects, 32)
} }
@ -157,7 +157,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }]; /// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::with_max_capacity(spheres, 32); /// let bvh = BVH::with_max_capacity(spheres, 32);
/// ``` /// ```
pub fn with_max_capacity<O: Intersected>(objects: &mut [O], max_cap: usize) -> Self { pub fn with_max_capacity<O: Accelerated>(objects: &mut [O], max_cap: usize) -> Self {
let tree = build_node(objects, 0, objects.len(), max_cap); let tree = build_node(objects, 0, objects.len(), max_cap);
Self { tree } Self { tree }
} }
@ -226,8 +226,8 @@ impl BVH {
/// let bvh = BVH::with_max_capacity(spheres, 32); /// let bvh = BVH::with_max_capacity(spheres, 32);
/// assert!(bvh.is_sound(spheres)); /// assert!(bvh.is_sound(spheres));
/// ``` /// ```
pub fn is_sound<O: Intersected>(&self, objects: &[O]) -> bool { pub fn is_sound<O: Accelerated>(&self, objects: &[O]) -> bool {
fn check_node<O: Intersected>(objects: &[O], node: &Node) -> bool { fn check_node<O: Accelerated>(objects: &[O], node: &Node) -> bool {
if node.begin > node.end { if node.begin > node.end {
return false; return false;
} }
@ -322,17 +322,21 @@ impl BVH {
/// assert_eq!(dist, 0.5); /// assert_eq!(dist, 0.5);
/// assert_eq!(obj, &spheres[0]); /// assert_eq!(obj, &spheres[0]);
/// ``` /// ```
pub fn walk<'o, O: Intersected>(&self, ray: &Ray, objects: &'o [O]) -> Option<(f32, &'o O)> { pub fn walk<'o, O: Accelerated>(
&self,
ray: &Ray,
objects: &'o [O],
) -> Option<(f32, &'o O::Output)> {
walk_rec_helper(ray, objects, &self.tree, std::f32::INFINITY) walk_rec_helper(ray, objects, &self.tree, std::f32::INFINITY)
} }
} }
fn walk_rec_helper<'o, O: Intersected>( fn walk_rec_helper<'o, O: Accelerated>(
ray: &Ray, ray: &Ray,
objects: &'o [O], objects: &'o [O],
node: &Node, node: &Node,
min: f32, min: f32,
) -> Option<(f32, &'o O)> { ) -> Option<(f32, &'o O::Output)> {
use std::cmp::Ordering; use std::cmp::Ordering;
match &node.kind { match &node.kind {
@ -340,7 +344,7 @@ fn walk_rec_helper<'o, O: Intersected>(
NodeEnum::Leaf => objects[node.begin..node.end] NodeEnum::Leaf => objects[node.begin..node.end]
.iter() .iter()
// This turns the Option<f32> of an intersection into an Option<(f32, &O)> // This turns the Option<f32> of an intersection into an Option<(f32, &O)>
.filter_map(|o| o.intersect(ray).map(|d| (d, o))) .filter_map(|o| o.intersect(ray))
// Discard values that are too far away // Discard values that are too far away
.filter(|(dist, _)| dist < &min) .filter(|(dist, _)| dist < &min)
// Only keep the minimum value, if there is one // Only keep the minimum value, if there is one
@ -382,14 +386,14 @@ fn walk_rec_helper<'o, O: Intersected>(
} }
} }
fn bounds_from_slice<O: Intersected>(objects: &[O]) -> AABB { fn bounds_from_slice<O: Accelerated>(objects: &[O]) -> AABB {
objects objects
.iter() .iter()
.map(|o| o.aabb()) .map(|o| o.aabb())
.fold(AABB::empty(), |acc, other| acc.union(&other)) .fold(AABB::empty(), |acc, other| acc.union(&other))
} }
fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node { fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node {
let aabb = bounds_from_slice(objects); let aabb = bounds_from_slice(objects);
// Don't split nodes under capacity // Don't split nodes under capacity
if objects.len() <= max_cap { if objects.len() <= max_cap {
@ -401,7 +405,7 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
}; };
} }
// Calculate the SAH heuristic for this slice // Calculate the SAH heuristic for this slice
let (split, axis, cost) = compute_sah(&mut objects[begin..end], aabb.surface(), max_cap); let (split, axis, cost) = compute_sah(objects, aabb.surface(), max_cap);
// Only split if the heuristic shows that it is worth it // Only split if the heuristic shows that it is worth it
if cost >= objects.len() as f32 { if cost >= objects.len() as f32 {
return Node { return Node {
@ -411,11 +415,11 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
kind: NodeEnum::Leaf, kind: NodeEnum::Leaf,
}; };
} }
// Avoid degenerate cases, and recenter the split inside [begin, end) // Avoid degenerate cases
let split = if split == 0 || split >= (end - begin - 1) { let split = if split <= 1 || split >= (objects.len() - 1) {
begin + (end - begin) / 2 (end - begin) / 2
} else { } else {
begin + split split
}; };
// Project along chosen axis // Project along chosen axis
pdqselect::select_by(objects, split, |lhs, rhs| { pdqselect::select_by(objects, split, |lhs, rhs| {
@ -424,8 +428,18 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
.expect("Can't use Nans in the SAH computation") .expect("Can't use Nans in the SAH computation")
}); });
// Construct children recurivsely on [begin, split) and [split, end) // Construct children recurivsely on [begin, split) and [split, end)
let left = Box::new(build_node(objects, begin, split, max_cap)); let left = Box::new(build_node(
let right = Box::new(build_node(objects, split, end, max_cap)); &mut objects[0..split],
begin,
begin + split,
max_cap,
));
let right = Box::new(build_node(
&mut objects[split..],
begin + split,
end,
max_cap,
));
// Build the node recursivelly // Build the node recursivelly
Node { Node {
bounds: aabb, bounds: aabb,
@ -437,7 +451,7 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
/// Returns the index at which to split for SAH, the Axis along which to split, and the calculated /// Returns the index at which to split for SAH, the Axis along which to split, and the calculated
/// cost. /// cost.
fn compute_sah<O: Intersected>( fn compute_sah<O: Accelerated>(
objects: &mut [O], objects: &mut [O],
surface: f32, surface: f32,
max_cap: usize, max_cap: usize,
@ -481,7 +495,7 @@ fn compute_sah<O: Intersected>(
let cost = 1. / max_cap as f32 let cost = 1. / max_cap as f32
+ (left_count as f32 * left_surfaces[left_count - 1] + (left_count as f32 * left_surfaces[left_count - 1]
+ right_count as f32 * right_surfaces[right_count]) + right_count as f32 * right_surfaces[right_count - 1])
/ surface; / surface;
if cost < min { if cost < min {

View file

@ -28,6 +28,7 @@ rand = "0.7"
rayon = "1.3.0" rayon = "1.3.0"
serde_yaml = "0.8" serde_yaml = "0.8"
structopt = "0.3" structopt = "0.3"
tobj = "1.0"
[dependencies.nalgebra] [dependencies.nalgebra]
version = "0.20.0" version = "0.20.0"

View file

@ -1,5 +1,5 @@
# Optional field # Optional field
aliasing_limit: 10 shot_rays: 10
# Optional field # Optional field
reflection_limit: 5 reflection_limit: 5

View file

@ -0,0 +1,24 @@
reflection_limit: 5
camera:
origin: [0.0, 1.0, 0.0]
forward: [ 0.0, 0.0, 1.0]
up: [0.0, 1.0, 0.0]
fov: 90.0
distance_to_image: 1.0
x: 2160
y: 2160
lights:
ambients:
- color: {r: 0.1, g: 0.1, b: 0.1}
points:
- position: [-0.5, 1.0, 1.8]
color: {r: 1.0, g: 1.0, b: 1.0}
meshes:
# FIXME: make the path relative to the YAML in some way?
# Easiest solution would be to chdir to the YAML's directory
- obj_file: "pathtracer/examples/objs/cornell-box.obj"
translation: [0.0, 0.0, 1.0]
rotation: [0, 180, 0]

View file

@ -0,0 +1,88 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
newmtl leftWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.63 0.065 0.05 # Red
Kd 0.63 0.065 0.05
Ks 0 0 0
Ke 0 0 0
newmtl rightWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.14 0.45 0.091 # Green
Kd 0.14 0.45 0.091
Ks 0 0 0
Ke 0 0 0
newmtl floor
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl ceiling
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl backWall
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl shortBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl tallBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl light
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.78 0.78 0.78 # White
Kd 0.78 0.78 0.78
Ks 0 0 0
Ke 17 12 4

View file

@ -0,0 +1,168 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
mtllib cornell-box.mtl
## Object floor
v -1.01 0.00 0.99
v 1.00 0.00 0.99
v 1.00 0.00 -1.04
v -0.99 0.00 -1.04
g floor
usemtl floor
f -4 -3 -2 -1
## Object ceiling
v -1.02 1.99 0.99
v -1.02 1.99 -1.04
v 1.00 1.99 -1.04
v 1.00 1.99 0.99
g ceiling
usemtl ceiling
f -4 -3 -2 -1
## Object backwall
v -0.99 0.00 -1.04
v 1.00 0.00 -1.04
v 1.00 1.99 -1.04
v -1.02 1.99 -1.04
g backWall
usemtl backWall
f -4 -3 -2 -1
## Object rightwall
v 1.00 0.00 -1.04
v 1.00 0.00 0.99
v 1.00 1.99 0.99
v 1.00 1.99 -1.04
g rightWall
usemtl rightWall
f -4 -3 -2 -1
## Object leftWall
v -1.01 0.00 0.99
v -0.99 0.00 -1.04
v -1.02 1.99 -1.04
v -1.02 1.99 0.99
g leftWall
usemtl leftWall
f -4 -3 -2 -1
## Object shortBox
usemtl shortBox
# Top Face
v 0.53 0.60 0.75
v 0.70 0.60 0.17
v 0.13 0.60 0.00
v -0.05 0.60 0.57
f -4 -3 -2 -1
# Left Face
v -0.05 0.00 0.57
v -0.05 0.60 0.57
v 0.13 0.60 0.00
v 0.13 0.00 0.00
f -4 -3 -2 -1
# Front Face
v 0.53 0.00 0.75
v 0.53 0.60 0.75
v -0.05 0.60 0.57
v -0.05 0.00 0.57
f -4 -3 -2 -1
# Right Face
v 0.70 0.00 0.17
v 0.70 0.60 0.17
v 0.53 0.60 0.75
v 0.53 0.00 0.75
f -4 -3 -2 -1
# Back Face
v 0.13 0.00 0.00
v 0.13 0.60 0.00
v 0.70 0.60 0.17
v 0.70 0.00 0.17
f -4 -3 -2 -1
# Bottom Face
v 0.53 0.00 0.75
v 0.70 0.00 0.17
v 0.13 0.00 0.00
v -0.05 0.00 0.57
f -12 -11 -10 -9
g shortBox
usemtl shortBox
## Object tallBox
usemtl tallBox
# Top Face
v -0.53 1.20 0.09
v 0.04 1.20 -0.09
v -0.14 1.20 -0.67
v -0.71 1.20 -0.49
f -4 -3 -2 -1
# Left Face
v -0.53 0.00 0.09
v -0.53 1.20 0.09
v -0.71 1.20 -0.49
v -0.71 0.00 -0.49
f -4 -3 -2 -1
# Back Face
v -0.71 0.00 -0.49
v -0.71 1.20 -0.49
v -0.14 1.20 -0.67
v -0.14 0.00 -0.67
f -4 -3 -2 -1
# Right Face
v -0.14 0.00 -0.67
v -0.14 1.20 -0.67
v 0.04 1.20 -0.09
v 0.04 0.00 -0.09
f -4 -3 -2 -1
# Front Face
v 0.04 0.00 -0.09
v 0.04 1.20 -0.09
v -0.53 1.20 0.09
v -0.53 0.00 0.09
f -4 -3 -2 -1
# Bottom Face
v -0.53 0.00 0.09
v 0.04 0.00 -0.09
v -0.14 0.00 -0.67
v -0.71 0.00 -0.49
f -8 -7 -6 -5
g tallBox
usemtl tallBox
## Object light
v -0.24 1.98 0.16
v -0.24 1.98 -0.22
v 0.23 1.98 -0.22
v 0.23 1.98 0.16
g light
usemtl light
f -4 -3 -2 -1

View file

@ -1,4 +1,4 @@
aliasing_limit: 10 shot_rays: 10
reflection_limit: 5 reflection_limit: 5
background: {r: 0.5, g: 0.5, b: 0.5} background: {r: 0.5, g: 0.5, b: 0.5}

View file

@ -2,13 +2,18 @@
use super::film::Film; use super::film::Film;
use crate::{Point, Vector}; use crate::{Point, Vector};
use serde::{Deserialize, Deserializer}; use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
/// Represent an abstract camera to observe the scene. /// Represent an abstract camera to observe the scene.
#[derive(Debug, PartialEq)] #[serde(from = "SerializedCamera")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Camera { pub struct Camera {
/// Where the camera is set in the scene (i.e: its focal point). /// Where the camera is set in the scene (i.e: the center of the lens).
origin: Point, origin: Point,
/// How far away is the camera's focal plane.
distance_to_image: f32,
/// The film to represent each pixel in the scene. /// The film to represent each pixel in the scene.
film: Film, film: Film,
} }
@ -37,15 +42,20 @@ impl Camera {
forward: Vector, forward: Vector,
up: Vector, up: Vector,
fov: f32, fov: f32,
dist_to_image: f32, distance_to_image: f32,
x: u32, x: u32,
y: u32, y: u32,
) -> Self { ) -> Self {
let right = forward.cross(&up); let right = forward.cross(&up);
let center = origin + forward.normalize() * dist_to_image; let screen_size = 2. * f32::tan(fov / 2.);
let screen_size = 2. * f32::tan(fov / 2.) * dist_to_image; // Construct the film behind the camera, upside down
let film = Film::new(x, y, screen_size, center, up, right); let center = origin - forward.normalize();
Camera { origin, film } let film = Film::new(x, y, screen_size, center, -up, -right);
Camera {
origin,
distance_to_image,
film,
}
} }
/// Get the `Camera`'s [`Film`]. /// Get the `Camera`'s [`Film`].
@ -78,6 +88,24 @@ impl Camera {
pub fn origin(&self) -> &Point { pub fn origin(&self) -> &Point {
&self.origin &self.origin
} }
/// Get the Ray coming out of the camera at a given ratio on the image.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::Camera;
/// # use pathtracer::Point;
/// #
/// let cam = Camera::default();
/// let ray_ul = cam.ray_with_ratio(0., 0.); // Ray coming out of the upper-left pixel
/// let ray_ul = cam.ray_with_ratio(1., 1.); // Ray coming out of the lower-right pixel
/// ```
pub fn ray_with_ratio(&self, x: f32, y: f32) -> Ray {
let pixel = self.film().pixel_at_ratio(x, y);
let direction = Unit::new_normalize(self.origin() - pixel);
Ray::new(pixel, direction)
}
} }
impl Default for Camera { impl Default for Camera {
@ -140,16 +168,6 @@ impl From<SerializedCamera> for Camera {
} }
} }
impl<'de> Deserialize<'de> for Camera {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedCamera = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -157,7 +175,7 @@ mod test {
#[test] #[test]
fn new_works() { fn new_works() {
let cam = Camera::new( let cam = Camera::new(
Point::new(-1., 0., 0.), Point::new(1., 0., 0.),
Vector::new(1., 0., 0.), Vector::new(1., 0., 0.),
Vector::new(0., 1., 0.), Vector::new(0., 1., 0.),
2. * f32::atan(1.), /* 90° in radian */ 2. * f32::atan(1.), /* 90° in radian */
@ -168,14 +186,15 @@ mod test {
assert_eq!( assert_eq!(
cam, cam,
Camera { Camera {
origin: Point::new(-1., 0., 0.), origin: Point::new(1., 0., 0.),
distance_to_image: 1.,
film: Film::new( film: Film::new(
1080, 1080,
1080, 1080,
2., 2.,
Point::origin(), Point::origin(),
Vector::new(0., 1., 0.), -Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.), -Vector::new(0., 0., 1.),
) )
} }
) )
@ -184,7 +203,7 @@ mod test {
#[test] #[test]
fn deserialization_works() { fn deserialization_works() {
let yaml = r#" let yaml = r#"
origin: [-1.0, 0.0, 0.0] origin: [1.0, 0.0, 0.0]
forward: [ 1.0, 0.0, 0.0] forward: [ 1.0, 0.0, 0.0]
up: [0.0, 1.0, 0.0] up: [0.0, 1.0, 0.0]
fov: 90.0 fov: 90.0
@ -196,14 +215,15 @@ mod test {
assert_eq!( assert_eq!(
cam, cam,
Camera { Camera {
origin: Point::new(-1., 0., 0.), origin: Point::new(1., 0., 0.),
distance_to_image: 1.0,
film: Film::new( film: Film::new(
1080, 1080,
1080, 1080,
2., 2.,
Point::origin(), Point::origin(),
Vector::new(0., 1., 0.), -Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.), -Vector::new(0., 0., 1.),
) )
} }
) )

View file

@ -70,6 +70,25 @@ impl LinearColor {
LinearColor { r, g, b } LinearColor { r, g, b }
} }
/// Creates a new `Color` from a slice.
///
/// Panics if slice has less than 3 elements.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::LinearColor;
/// #
/// let color = LinearColor::from_slice(&[1.0, 0.0, 0.0]); // bright red!
/// ```
pub fn from_slice(s: &[f32]) -> Self {
LinearColor {
r: s[0],
g: s[1],
b: s[2],
}
}
#[must_use] #[must_use]
/// Clamps the color's RGB components between 0.0 and 1.0. /// Clamps the color's RGB components between 0.0 and 1.0.
/// ///

View file

@ -12,6 +12,7 @@ pub mod core;
pub mod light; pub mod light;
pub mod material; pub mod material;
pub mod render; pub mod render;
pub mod scene;
pub mod serialize; pub mod serialize;
pub mod shape; pub mod shape;
pub mod texture; pub mod texture;

View file

@ -2,6 +2,7 @@
use super::core::LinearColor; use super::core::LinearColor;
use super::{Point, Vector}; use super::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Unit; use nalgebra::Unit;
/// Represent a light in the scene being rendered. /// Represent a light in the scene being rendered.
@ -16,6 +17,11 @@ pub trait SpatialLight: Light {
fn to_source(&self, origin: &Point) -> (Unit<Vector>, f32); fn to_source(&self, origin: &Point) -> (Unit<Vector>, f32);
} }
/// Represent a light from which rays can be sampled
pub trait SampleLight: SpatialLight {
fn sample_ray(&self) -> Ray;
}
mod ambient_light; mod ambient_light;
pub use ambient_light::*; pub use ambient_light::*;

View file

@ -1,7 +1,9 @@
use super::{Light, SpatialLight}; use super::{Light, SampleLight, SpatialLight};
use crate::core::LinearColor; use crate::core::LinearColor;
use crate::{Point, Vector}; use crate::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Unit; use nalgebra::Unit;
use rand::{distributions::Uniform, Rng};
use serde::Deserialize; use serde::Deserialize;
/// Represent a light emanating from a point in space, following the square distance law. /// Represent a light emanating from a point in space, following the square distance law.
@ -45,6 +47,37 @@ impl SpatialLight for PointLight {
(Unit::new_normalize(delt), dist) (Unit::new_normalize(delt), dist)
} }
} }
impl SampleLight for PointLight {
/// Uniformly sample a ray from the point-light in a random direction.
///
/// # Examles
///
///```
/// # use pathtracer::light::PointLight;
/// # use pathtracer::core::color::LinearColor;
/// # use pathtracer::Point;
/// #
/// let dir_light = PointLight::new(
/// Point::origin(),
/// LinearColor::new(1.0, 0.0, 1.0),
/// );
/// let sampled = dir_light.sample_ray();
/// ```
fn sample_ray(&self) -> Ray {
let mut rng = rand::thread_rng();
// Sample sphere uniformly
// See <https://mathworld.wolfram.com/SpherePointPicking.html>
let theta = rng.gen_range(0., std::f32::consts::PI * 2.);
let y = rng.sample(Uniform::new(-1., 1.)); // Inclusive for the poles
let dir = Unit::new_unchecked(Vector::new(
// this vector is already of unit length
f32::sqrt(1. - y * y) * f32::cos(theta),
y,
f32::sqrt(1. - y * y) * f32::sin(theta),
));
Ray::new(self.position, dir)
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {

View file

@ -1,13 +1,15 @@
use super::{Light, SpatialLight}; use super::{Light, SampleLight, SpatialLight};
use crate::core::LinearColor; use crate::core::LinearColor;
use crate::{Point, Vector}; use crate::{Point, Vector};
use beevee::ray::Ray;
use nalgebra::Rotation3;
use nalgebra::Unit; use nalgebra::Unit;
use serde::{Deserialize, Deserializer}; use rand::{distributions::Uniform, Rng};
use serde::Deserialize;
/// Represent a light emanating from a directed light-source, outputting rays in a cone. /// Represent a light emanating from a directed light-source, outputting rays in a cone.
/// #[serde(from = "SerializedSpotLight")]
/// The illumination cone cannot have an FOV over 180°. #[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, PartialEq)]
pub struct SpotLight { pub struct SpotLight {
position: Point, position: Point,
direction: Unit<Vector>, direction: Unit<Vector>,
@ -66,6 +68,52 @@ impl SpatialLight for SpotLight {
(Unit::new_normalize(delt), dist) (Unit::new_normalize(delt), dist)
} }
} }
impl SampleLight for SpotLight {
/// Uniformly sample a ray from the spot-light in a random direction.
///
/// # Examles
///
///```
/// # use pathtracer::light::SpotLight;
/// # use pathtracer::core::color::LinearColor;
/// # use pathtracer::{Point, Vector};
/// #
/// let spot_light = SpotLight::degrees_new(
/// Point::origin(),
/// Vector::x_axis(),
/// 90.,
/// LinearColor::new(1.0, 0.0, 1.0),
/// );
/// let sampled = spot_light.sample_ray();
/// ```
fn sample_ray(&self) -> Ray {
let mut rng = rand::thread_rng();
// Sample cap at Z-pole uniformly
// See <https://math.stackexchange.com/questions/56784>
let theta = rng.gen_range(0., std::f32::consts::PI * 2.);
let z = rng.sample(Uniform::new(self.cosine_value, 1.)); // Inclusive for the poles
let dir = Unit::new_unchecked(Vector::new(
// this vector is already of unit length
f32::sqrt(1. - z * z) * f32::cos(theta),
f32::sqrt(1. - z * z) * f32::sin(theta),
z,
));
let dir =
if let Some(rotate) = Rotation3::rotation_between(&Vector::z_axis(), &self.direction) {
// Rotate the direction if needed
rotate * dir
} else if self.direction.dot(&dir) < 0. {
// Special case if the direction is directly opposite, its rotation axis is
// undefined, but we don't care about a special axis to perform the rotation
-dir
} else {
dir
};
// We should now be oriented the right way
debug_assert!(self.direction.dot(&dir) >= self.cosine_value);
Ray::new(self.position, dir)
}
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct SerializedSpotLight { struct SerializedSpotLight {
@ -82,16 +130,6 @@ impl From<SerializedSpotLight> for SpotLight {
} }
} }
impl<'de> Deserialize<'de> for SpotLight {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedSpotLight = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View file

@ -1,7 +1,18 @@
use pathtracer::render::Scene; use pathtracer::render::{Pathtracer, Raytracer};
use pathtracer::scene::Scene;
use std::path::PathBuf; use std::path::PathBuf;
use std::str;
use structopt::clap::arg_enum;
use structopt::StructOpt; use structopt::StructOpt;
arg_enum! {
#[derive(Debug)]
enum RenderOption {
Raytracer,
Pathtracer,
}
}
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
struct Options { struct Options {
/// Input description for the scene to be rendered. /// Input description for the scene to be rendered.
@ -10,6 +21,15 @@ struct Options {
/// Output image for the rendered scene. /// Output image for the rendered scene.
#[structopt(short, long, parse(from_os_str), default_value = "scene.png")] #[structopt(short, long, parse(from_os_str), default_value = "scene.png")]
output: PathBuf, output: PathBuf,
/// Which renderer should be used on the input scene.
#[structopt(
short,
long,
possible_values = &RenderOption::variants(),
case_insensitive = true,
default_value = "Raytracer"
)]
renderer: RenderOption,
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
@ -17,7 +37,10 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let f = std::fs::File::open(options.input)?; let f = std::fs::File::open(options.input)?;
let scene: Scene = serde_yaml::from_reader(f)?; let scene: Scene = serde_yaml::from_reader(f)?;
let image = scene.render(); let image = match options.renderer {
RenderOption::Raytracer => Raytracer::new(scene).render(),
RenderOption::Pathtracer => Pathtracer::new(scene).render(),
};
image.save(options.output)?; image.save(options.output)?;
Ok(()) Ok(())

View file

@ -9,10 +9,11 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[allow(missing_docs)] #[allow(missing_docs)]
#[enum_dispatch::enum_dispatch] #[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)] #[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum MaterialEnum { pub enum MaterialEnum {
#[serde(rename = "uniform")] #[serde(rename = "uniform")]
UniformMaterial, UniformMaterial,
TriangleMaterial,
} }
/// Represent the physical light properties of an object in the scene; /// Represent the physical light properties of an object in the scene;
@ -22,5 +23,8 @@ pub trait Material: std::fmt::Debug {
fn properties(&self, point: Point2D) -> LightProperties; fn properties(&self, point: Point2D) -> LightProperties;
} }
mod triangle;
pub use triangle::*;
mod uniform; mod uniform;
pub use uniform::*; pub use uniform::*;

View file

@ -0,0 +1,31 @@
use super::Material;
use crate::core::{LightProperties, LinearColor, ReflTransEnum};
use crate::Point2D;
use serde::Deserialize;
/// Represent a material which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleMaterial {
/// The diffuse components.
diffuse: [LinearColor; 3],
/// The specular components.
specular: [LinearColor; 3],
/// The transparency or reflectivity properties, this is not interpolated.
#[serde(flatten)]
pub refl_trans: Option<ReflTransEnum>,
}
impl Material for TriangleMaterial {
fn properties(&self, point: Point2D) -> LightProperties {
let (u, v) = (point.x, point.y);
let diffuse = self.diffuse[0].clone() * (1. - u - v)
+ self.diffuse[1].clone() * u
+ self.diffuse[2].clone() * v;
let specular = self.specular[0].clone() * (1. - u - v)
+ self.specular[1].clone() * u
+ self.specular[2].clone() * v;
LightProperties::new(diffuse, specular, self.refl_trans.clone())
}
}
// FIXME: tests

View file

@ -1,12 +1,18 @@
//! Rendering logic //! Define the different kinds of renderers for use on a given scene.
use image::RgbImage;
pub mod light_aggregate; /// Each renderer implements this trait, to be called after being built.
pub use light_aggregate::*; pub trait Renderer {
/// Render the [`Scene`] using the chosen rendering technique.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
fn render(&self) -> RgbImage;
}
pub mod object; mod pathtrace;
pub use object::*; pub use pathtrace::*;
pub mod scene; mod raytracer;
pub use scene::*; pub use raytracer::*;
pub(crate) mod utils; pub(crate) mod utils;

View file

@ -0,0 +1,37 @@
use crate::core::{LightProperties, LinearColor};
use crate::light::SampleLight;
use crate::Point;
pub struct LightPathPoint {
pub point: Point,
pub luminance: LinearColor,
pub properties: LightProperties,
}
impl LightPathPoint {
pub fn new(point: Point, luminance: LinearColor, properties: LightProperties) -> Self {
LightPathPoint {
point,
luminance,
properties,
}
}
}
pub struct LightPath<'a> {
pub origin: &'a dyn SampleLight,
pub points: Vec<LightPathPoint>,
}
impl<'a> LightPath<'a> {
pub fn new(origin: &'a dyn SampleLight) -> Self {
LightPath {
origin,
points: Vec::new(),
}
}
pub fn push_point(&mut self, new_point: LightPathPoint) {
self.points.push(new_point)
}
}

View file

@ -0,0 +1,4 @@
mod light_path;
mod pathtracer;
pub use self::pathtracer::*;

View file

@ -0,0 +1,97 @@
use super::super::utils::*;
use super::super::Renderer;
use super::light_path::{LightPath, LightPathPoint};
use crate::core::{LightProperties, LinearColor};
use crate::light::SampleLight;
use crate::material::Material;
use crate::scene::object::Object;
use crate::scene::Scene;
use crate::shape::Shape;
use crate::{Point, Vector};
use beevee::ray::Ray;
use image::RgbImage;
use nalgebra::Unit;
use rand::Rng;
/// Render the [`Scene`] using Bidirectional-Pathtracing
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct Pathtracer {
#[allow(unused)]
scene: Scene,
}
impl Pathtracer {
/// Create a [`Pathtracer`] renderer with the given [`Scene`]
///
/// [`Pathtracer`]: struct.Pathtracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
Pathtracer { scene }
}
/// Render the [`Scene`] using Bidirectional-Pathtracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
todo!()
}
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.scene.bvh.walk(&ray, &self.scene.objects)
}
fn construct_light_path(&self) -> LightPath {
let mut rng = rand::thread_rng();
let num_lights = self.scene.lights.points.len() + self.scene.lights.spots.len();
let index = rng.gen_range(0, num_lights);
let sample_light: &dyn SampleLight = if index < self.scene.lights.points.len() {
&self.scene.lights.points[index]
} else {
&self.scene.lights.spots[index - self.scene.lights.points.len()]
};
let mut ray = sample_light.sample_ray();
let mut res = LightPath::new(sample_light);
if let Some((dist, obj)) = self.cast_ray(ray) {
let hit_pos = ray.origin + ray.direction.as_ref() * dist;
let texel = obj.shape.project_texel(&hit_pos);
let new_point = LightPathPoint::new(
hit_pos,
sample_light.illumination(&hit_pos),
obj.material.properties(texel),
);
res.push_point(new_point);
ray = todo!(); // Sample new direction
} else {
return res;
};
for _ in 1..self.scene.reflection_limit {
if let Some((dist, obj)) = self.cast_ray(ray) {
let new_point = todo!();
res.push_point(new_point);
} else {
break;
}
}
res
}
fn illuminate(
&self,
point: Point,
properties: LightProperties,
path: LightPath,
) -> LinearColor {
path.points.iter().map(|p| p.luminance.clone()).sum()
}
}
impl Renderer for Pathtracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -1,93 +1,43 @@
//! Scene rendering logic use super::utils::*;
use super::Renderer;
use super::{light_aggregate::LightAggregate, object::Object, utils::*}; use crate::scene::{Object, Scene};
use crate::{ use crate::{
core::{Camera, LightProperties, LinearColor, ReflTransEnum}, core::{LightProperties, LinearColor, ReflTransEnum},
material::Material, material::Material,
shape::Shape, shape::Shape,
texture::Texture, texture::Texture,
{Point, Vector}, {Point, Vector},
}; };
use beevee::{bvh::BVH, ray::Ray}; use beevee::ray::Ray;
use image::RgbImage; use image::RgbImage;
use nalgebra::Unit; use nalgebra::Unit;
use rand::prelude::thread_rng; use rand::prelude::thread_rng;
use rand::Rng; use rand::Rng;
use serde::{Deserialize, Deserializer};
/// Represent the scene being rendered. /// Render the [`Scene`] using Raytracing.
pub struct Scene { ///
camera: Camera, /// [`Scene`]: ../scene/scene/struct.Scene.html
lights: LightAggregate, pub struct Raytracer {
objects: Vec<Object>, scene: Scene,
bvh: BVH,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
} }
impl Scene { impl Raytracer {
/// Creates a new `Scene`. /// Create a [`Raytracer`] renderer with the given [`Scene`]
/// ///
/// # Examples /// [`Raytracer`]: struct.Raytracer.html
/// /// [`Scene`]: ../scene/scene/struct.Scene.html
/// ``` pub fn new(scene: Scene) -> Self {
/// # use pathtracer::core::{Camera, LightProperties, LinearColor}; Raytracer { scene }
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::render::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // aliasing limit
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
) -> Self {
// NOTE(Antoine): fun fact: BVH::build stack overflows when given an empty slice :)
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
aliasing_limit,
reflection_limit,
diffraction_index,
}
} }
/// Render the scene into an image. /// Render the [`Scene`] using Raytracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage { pub fn render(&self) -> RgbImage {
let mut image = RgbImage::new(self.camera.film().width(), self.camera.film().height()); let mut image = RgbImage::new(
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let total = (image.width() * image.height()) as u64; let total = (image.width() * image.height()) as u64;
let pb = indicatif::ProgressBar::new(total); let pb = indicatif::ProgressBar::new(total);
@ -96,7 +46,7 @@ impl Scene {
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})", "{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})",
)); ));
let pixel_func = if self.aliasing_limit > 0 { let pixel_func = if self.scene.shot_rays > 0 {
Self::anti_alias_pixel Self::anti_alias_pixel
} else { } else {
Self::pixel Self::pixel
@ -121,18 +71,17 @@ impl Scene {
/// Get pixel color for (x, y) a pixel **coordinate** /// Get pixel color for (x, y) a pixel **coordinate**
fn pixel(&self, x: f32, y: f32) -> LinearColor { fn pixel(&self, x: f32, y: f32) -> LinearColor {
let (x, y) = self.camera.film().pixel_ratio(x, y); let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let pixel = self.camera.film().pixel_at_ratio(x, y); let indices = RefractionInfo::with_index(self.scene.diffraction_index);
let direction = Unit::new_normalize(pixel - self.camera.origin()); let ray = self.scene.camera.ray_with_ratio(x, y);
let indices = RefractionInfo::with_index(self.diffraction_index); self.cast_ray(ray).map_or_else(
self.cast_ray(Ray::new(pixel, direction)).map_or_else( || self.scene.background.clone(),
|| self.background.clone(),
|(t, obj)| { |(t, obj)| {
self.color_at( self.color_at(
pixel + direction.as_ref() * t, ray.origin + ray.direction.as_ref() * t,
obj, obj,
direction, ray.direction,
self.reflection_limit, self.scene.reflection_limit,
indices, indices,
) )
}, },
@ -141,7 +90,7 @@ impl Scene {
/// Get pixel color with anti-aliasing /// Get pixel color with anti-aliasing
fn anti_alias_pixel(&self, x: f32, y: f32) -> LinearColor { fn anti_alias_pixel(&self, x: f32, y: f32) -> LinearColor {
let range = 0..self.aliasing_limit; let range = 0..self.scene.shot_rays;
let mut rng = thread_rng(); let mut rng = thread_rng();
let acc: LinearColor = range let acc: LinearColor = range
.map(|_| { .map(|_| {
@ -151,11 +100,11 @@ impl Scene {
}) })
.map(LinearColor::clamp) .map(LinearColor::clamp)
.sum(); .sum();
acc / self.aliasing_limit as f32 acc / self.scene.shot_rays as f32
} }
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> { fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.bvh.walk(&ray, &self.objects) self.scene.bvh.walk(&ray, &self.scene.objects)
} }
fn color_at( fn color_at(
@ -173,6 +122,9 @@ impl Scene {
let normal = object.shape.normal(&point); let normal = object.shape.normal(&point);
let reflected_ray = reflected(incident_ray, normal); let reflected_ray = reflected(incident_ray, normal);
// FIXME: change this to averaged sampled rays instead of visiting every light ?
// Indeed the path-tracing algorithm is good for calculating the radiance at a point
// But it should be used for reflection and refraction too...
let lighting = self.illuminate(point, object_color, &properties, normal, reflected_ray); let lighting = self.illuminate(point, object_color, &properties, normal, reflected_ray);
if properties.refl_trans.is_none() { if properties.refl_trans.is_none() {
// Avoid calculating reflection when not needed // Avoid calculating reflection when not needed
@ -261,7 +213,8 @@ impl Scene {
} }
fn illuminate_ambient(&self, color: LinearColor) -> LinearColor { fn illuminate_ambient(&self, color: LinearColor) -> LinearColor {
self.lights self.scene
.lights
.ambient_lights_iter() .ambient_lights_iter()
.map(|light| color.clone() * light.illumination(&Point::origin())) .map(|light| color.clone() * light.illumination(&Point::origin()))
.map(LinearColor::clamp) .map(LinearColor::clamp)
@ -275,11 +228,12 @@ impl Scene {
normal: Unit<Vector>, normal: Unit<Vector>,
reflected: Unit<Vector>, reflected: Unit<Vector>,
) -> LinearColor { ) -> LinearColor {
self.lights self.scene
.lights
.spatial_lights_iter() .spatial_lights_iter()
.map(|light| { .map(|light| {
let (direction, t) = light.to_source(&point); let (direction, t) = light.to_source(&point);
let light_ray = Ray::new(point + 0.001 * direction.as_ref(), direction); let light_ray = Ray::new(point + direction.as_ref() * 0.001, direction);
match self.cast_ray(light_ray) { match self.cast_ray(light_ray) {
// Take shadows into account // Take shadows into account
Some((obstacle_t, _)) if obstacle_t < t => return LinearColor::black(), Some((obstacle_t, _)) if obstacle_t < t => return LinearColor::black(),
@ -295,72 +249,8 @@ impl Scene {
} }
} }
#[derive(Debug, PartialEq, Deserialize)] impl Renderer for Raytracer {
struct SerializedScene { fn render(&self) -> RgbImage {
camera: Camera, self.render()
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
aliasing_limit: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
}
impl From<SerializedScene> for Scene {
fn from(scene: SerializedScene) -> Self {
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.aliasing_limit,
scene.reflection_limit,
scene.starting_diffraction,
)
}
}
impl<'de> Deserialize<'de> for Scene {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedScene = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
#[ignore] // stack overflow because of BVH :(
fn bvh_fails() {
use crate::core::Camera;
use crate::render::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
);
} }
} }

View file

@ -1,5 +1,7 @@
use crate::Vector; use crate::Vector;
use nalgebra::Unit; use nalgebra::Unit;
use rand::prelude::thread_rng;
use rand::Rng;
pub fn reflected(incident: Unit<Vector>, normal: Unit<Vector>) -> Unit<Vector> { pub fn reflected(incident: Unit<Vector>, normal: Unit<Vector>) -> Unit<Vector> {
let proj = incident.dot(&normal); let proj = incident.dot(&normal);
@ -65,3 +67,58 @@ impl RefractionInfo {
std::mem::swap(&mut self.old_index, &mut self.new_index) std::mem::swap(&mut self.old_index, &mut self.new_index)
} }
} }
/// Returns a random ray in the hemisphere described by a normal unit-vector, and the probability
/// to have picked that direction.
#[allow(unused)] // FIXME: remove once used
pub fn sample_hemisphere(normal: Vector) -> (Vector, f32) {
let mut rng = thread_rng();
let azimuth = rng.gen::<f32>() * std::f32::consts::PI * 2.;
// Cosine weighted importance sampling
let cos_elevation: f32 = rng.gen();
let sin_elevation = f32::sqrt(1. - cos_elevation * cos_elevation);
let x = sin_elevation * azimuth.cos();
let y = cos_elevation;
let z = sin_elevation * azimuth.sin();
// Calculate orthonormal base, defined by (normalb_b, normal, normal_t)
// Pay attention to degenerate cases when (y, z) is small for use with cross product
let normal_t = if normal.x.abs() > normal.y.abs() {
Vector::new(normal.z, 0., -normal.x).normalize()
} else {
Vector::new(0., -normal.z, normal.y).normalize()
};
let normal_b = normal.cross(&normal_t);
// Perform the matrix calculation by hand...
let scattered = Vector::new(
x * normal_b.x + y * normal.x + z * normal_t.x,
x * normal_b.y + y * normal.y + z * normal_t.y,
x * normal_b.z + y * normal.z + z * normal_t.z,
);
// The probability to have picked the ray is inversely proportional to cosine of the angle with
// the normal
(scattered, 1. / scattered.dot(&normal))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sample_hemisphere_work() {
// NOTE(Bruno): should use some test-case generation for failure-reproduction purposes...
let mut rng = thread_rng();
for _ in 0..100 {
let normal = Vector::new(rng.gen(), rng.gen(), rng.gen());
for _ in 0..100 {
let (sample, proportion) = sample_hemisphere(normal);
let cos_angle = normal.dot(&sample);
assert!(cos_angle >= 0.);
assert!(1. / cos_angle - proportion < std::f32::EPSILON);
}
}
}
}

View file

@ -8,13 +8,13 @@ use std::iter::Iterator;
/// A struct centralizing the light computation logic. /// A struct centralizing the light computation logic.
pub struct LightAggregate { pub struct LightAggregate {
#[serde(default)] #[serde(default)]
ambients: Vec<AmbientLight>, pub(crate) ambients: Vec<AmbientLight>,
#[serde(default)] #[serde(default)]
directionals: Vec<DirectionalLight>, pub(crate) directionals: Vec<DirectionalLight>,
#[serde(default)] #[serde(default)]
points: Vec<PointLight>, pub(crate) points: Vec<PointLight>,
#[serde(default)] #[serde(default)]
spots: Vec<SpotLight>, pub(crate) spots: Vec<SpotLight>,
} }
impl LightAggregate { impl LightAggregate {
@ -23,7 +23,7 @@ impl LightAggregate {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// # use pathtracer::render::LightAggregate; /// # use pathtracer::scene::LightAggregate;
/// # /// #
/// let la = LightAggregate::empty(); /// let la = LightAggregate::empty();
/// assert_eq!(la.ambient_lights_iter().count(), 0); /// assert_eq!(la.ambient_lights_iter().count(), 0);
@ -40,7 +40,7 @@ impl LightAggregate {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// # use pathtracer::render::LightAggregate; /// # use pathtracer::scene::LightAggregate;
/// # /// #
/// let la = LightAggregate::new( /// let la = LightAggregate::new(
/// Vec::new(), /// Vec::new(),

View file

@ -0,0 +1,133 @@
use std::convert::TryFrom;
use std::path::PathBuf;
use nalgebra::{Similarity3, Unit, VectorSlice3};
use serde::Deserialize;
use tobj::{self, load_obj};
use super::Object;
use crate::{
core::{LightProperties, LinearColor},
material::{MaterialEnum, UniformMaterial},
shape::{InterpolatedTriangle, ShapeEnum, Triangle},
texture::{TextureEnum, UniformTexture},
Point, Vector,
};
/// Represent a mesh of objects.
#[serde(try_from = "Wavefront")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Mesh {
/// The shapes composing the mesh
pub(crate) shapes: Vec<Object>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub(crate) struct Wavefront {
pub obj_file: PathBuf,
#[serde(default = "nalgebra::zero")]
translation: Vector,
#[serde(default = "nalgebra::zero")]
rotation: Vector,
#[serde(default = "crate::serialize::coefficient::default_identity")]
scale: f32,
}
impl TryFrom<Wavefront> for Mesh {
type Error = tobj::LoadError;
fn try_from(wavefront: Wavefront) -> Result<Mesh, Self::Error> {
let mut shapes = Vec::new();
let (models, materials) = load_obj(&wavefront.obj_file)?;
// The object to world transformation matrix
let transform = Similarity3::new(
wavefront.translation,
wavefront.rotation * std::f32::consts::PI / 180., // From degrees to radians
wavefront.scale,
);
for model in models {
let mesh = &model.mesh;
// mesh.indices contains all vertices. Each group of 3 vertices
// is a triangle, so we iterate over indices 3 by 3.
for i in 0..(mesh.indices.len() / 3) {
let (a, b, c) = (
mesh.indices[i * 3] as usize,
mesh.indices[i * 3 + 1] as usize,
mesh.indices[i * 3 + 2] as usize,
);
let pos_a = transform * Point::from_slice(&mesh.positions[(a * 3)..(a * 3 + 3)]);
let pos_b = transform * Point::from_slice(&mesh.positions[(b * 3)..(b * 3 + 3)]);
let pos_c = transform * Point::from_slice(&mesh.positions[(c * 3)..(c * 3 + 3)]);
let triangle: ShapeEnum = if mesh.normals.is_empty() {
Triangle::new(pos_a, pos_b, pos_c).into()
} else {
// We apply the (arguably useless) scaling to the vectors in case it is
// negative, which would invert their direction
let norm_a = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(a * 3)..(a * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_b = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(b * 3)..(b * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_c = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(c * 3)..(c * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
InterpolatedTriangle::new(pos_a, pos_b, pos_c, norm_a, norm_b, norm_c).into()
};
// FIXME: handle material
let (material, texture): (MaterialEnum, TextureEnum) =
if let Some(mat_id) = mesh.material_id {
let mesh_mat = &materials[mat_id];
let diffuse = LinearColor::from_slice(&mesh_mat.ambient[..]);
let specular = LinearColor::from_slice(&mesh_mat.ambient[..]);
let material = UniformMaterial::new(LightProperties::new(
diffuse.clone(),
specular,
// FIXME: material.dissolve is supposed to be "the alpha term"
// Needs translation to our ReflTransEnum
None,
));
// we only handle uniform textures
let texture = UniformTexture::new(diffuse);
(material.into(), texture.into())
} else {
// FIXME: should we accept this, and use a default
// Material, or throw a LoadError
(
UniformMaterial::new(LightProperties::new(
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(0.1, 0.1, 0.1),
None,
))
.into(),
UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
)
};
shapes.push(Object::new(triangle, material, texture));
}
}
Ok(Mesh { shapes })
}
}

View file

@ -0,0 +1,13 @@
//! Desciption of the scene.
pub mod light_aggregate;
pub use light_aggregate::*;
mod mesh;
pub use mesh::*;
pub mod object;
pub use object::*;
pub mod scene;
pub use scene::*;

View file

@ -1,7 +1,7 @@
//! Logic for the scene objects //! Logic for the scene objects
use crate::material::MaterialEnum; use crate::material::MaterialEnum;
use crate::shape::{Shape, ShapeEnum}; use crate::shape::ShapeEnum;
use crate::texture::TextureEnum; use crate::texture::TextureEnum;
use crate::Point; use crate::Point;
use beevee::{ use beevee::{
@ -12,7 +12,7 @@ use beevee::{
use serde::Deserialize; use serde::Deserialize;
/// An object being rendered in the scene. /// An object being rendered in the scene.
#[derive(Debug, PartialEq, Deserialize)] #[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct Object { pub struct Object {
/// The `Object`'s physical shape /// The `Object`'s physical shape
pub shape: ShapeEnum, pub shape: ShapeEnum,
@ -30,7 +30,7 @@ impl Object {
/// ``` /// ```
/// # use pathtracer::core::{LightProperties, LinearColor}; /// # use pathtracer::core::{LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial; /// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::render::Object; /// # use pathtracer::scene::Object;
/// # use pathtracer::shape::Sphere; /// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture; /// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point; /// # use pathtracer::Point;

View file

@ -0,0 +1,148 @@
//! Scene representation.
use super::{LightAggregate, Mesh, Object};
use crate::core::{Camera, LinearColor};
use beevee::bvh::BVH;
use serde::Deserialize;
/// Represent the scene being rendered.
#[serde(from = "SerializedScene")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Scene {
pub(crate) camera: Camera,
pub(crate) lights: LightAggregate,
pub(crate) objects: Vec<Object>,
pub(crate) bvh: BVH,
pub(crate) background: LinearColor,
pub(crate) shot_rays: u32,
pub(crate) reflection_limit: u32,
pub(crate) diffraction_index: f32,
}
impl Scene {
/// Creates a new `Scene`.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::{Camera, LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::scene::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // amount of rays shot per pixel
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
shot_rays: u32,
reflection_limit: u32,
diffraction_index: f32,
) -> Self {
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
shot_rays,
reflection_limit,
diffraction_index,
}
}
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(deny_unknown_fields)]
struct SerializedScene {
camera: Camera,
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
meshes: Vec<Mesh>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
shot_rays: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
}
impl From<SerializedScene> for Scene {
fn from(mut scene: SerializedScene) -> Self {
let mut flattened_meshes: Vec<Object> = scene
.meshes
.into_iter()
.map(|m| m.shapes)
.flatten()
.collect();
scene.objects.append(&mut flattened_meshes);
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.shot_rays,
scene.reflection_limit,
scene.starting_diffraction,
)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
fn empty_scene() {
use crate::core::Camera;
use crate::scene::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
);
}
}

View file

@ -0,0 +1,158 @@
use super::triangle::Triangle;
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
/// Represent a triangle with interpolated normals inside the scene.
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct InterpolatedTriangle {
#[serde(flatten)]
tri: Triangle,
// FIXME: serialize with unit
normals: [Unit<Vector>; 3],
}
impl InterpolatedTriangle {
/// Creates a new `InterpolatedTriangle` from 3 [`Point`]s and 3 [`Vector`]s.
///
/// [`Point`]: ../../type.Point.html
/// [`Point`]: ../../type.Vector.html
///
/// # Examples
///
/// ```
/// # use pathtracer::shape::InterpolatedTriangle;
/// # use pathtracer::{Point, Vector};
/// #
/// let t = InterpolatedTriangle::new(
/// Point::new(1.0, 0.0, 0.0),
/// Point::new(0.0, 1.0, 0.0),
/// Point::new(0.0, 0.0, 1.0),
/// Vector::x_axis(),
/// Vector::y_axis(),
/// Vector::z_axis(),
/// );
/// ```
pub fn new(
c0: Point,
c1: Point,
c2: Point,
n0: Unit<Vector>,
n1: Unit<Vector>,
n2: Unit<Vector>,
) -> Self {
InterpolatedTriangle {
tri: Triangle::new(c0, c1, c2),
normals: [n0, n1, n2],
}
}
}
impl Shape for InterpolatedTriangle {
fn normal(&self, point: &Point) -> Unit<Vector> {
let (u, v) = {
let c = self.tri.barycentric(point);
(c.x, c.y)
};
let interpol = self.normals[0].as_ref() * (1. - u - v)
+ self.normals[1].as_ref() * u
+ self.normals[2].as_ref() * v;
Unit::new_normalize(interpol)
}
fn project_texel(&self, point: &Point) -> Point2D {
self.tri.project_texel(point)
}
}
impl Bounded for InterpolatedTriangle {
fn aabb(&self) -> AABB {
self.tri.aabb()
}
fn centroid(&self) -> Point {
self.tri.centroid()
}
}
impl Intersected for InterpolatedTriangle {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.tri.intersect(ray)
}
}
#[cfg(test)]
mod test {
use super::*;
fn simple_triangle() -> InterpolatedTriangle {
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
}
#[test]
fn normal_interpolation_at_c0_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::origin());
assert_eq!(normal, Vector::x_axis());
}
#[test]
fn normal_interpolation_at_c1_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 1.));
assert_eq!(normal, Vector::y_axis());
}
#[test]
fn normal_interpolation_at_c2_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 0.));
assert_eq!(normal, Vector::z_axis());
}
#[test]
fn normal_interpolation_at_center_works() {
let triangle = simple_triangle();
let center = Point::new(0., 2. / 3., 1. / 3.);
let normal = triangle.normal(&center);
let expected = Unit::new_normalize(Vector::new(1., 1., 1.));
assert!((normal.as_ref() - expected.as_ref()).magnitude() < 1e-5)
}
#[test]
fn deserialization_works() {
let yaml = r#"
corners:
- [0.0, 0.0, 0.0]
- [0.0, 1.0, 1.0]
- [0.0, 1.0, 0.0]
normals:
- [1.0, 0.0, 0.0]
- [0.0, 1.0, 0.0]
- [0.0, 0.0, 1.0]
"#;
let triangle: InterpolatedTriangle = serde_yaml::from_str(yaml).unwrap();
assert_eq!(
triangle,
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
)
}
}

View file

@ -0,0 +1,9 @@
use super::{InterpolatedTriangle, Shape, Triangle};
use crate::material::{Material, TriangleMaterial, UniformMaterial};
use crate::texture::{Texture, TriangleTexture, UniformTexture};
use crate::Point;
use beevee::{
aabb::{Bounded, AABB},
bvh::Intersected,
ray::Ray,
};

View file

@ -14,42 +14,53 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[allow(missing_docs)] #[allow(missing_docs)]
#[enum_dispatch::enum_dispatch] #[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)] #[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum ShapeEnum { pub enum ShapeEnum {
Sphere, Sphere,
Triangle, Triangle,
InterpolatedTriangle,
}
// FIXME: this has to be written by hand due to a limitation of `enum_dispatch` on super traits
impl Bounded for ShapeEnum {
fn aabb(&self) -> AABB {
match self {
ShapeEnum::Sphere(s) => s.aabb(),
ShapeEnum::Triangle(s) => s.aabb(),
ShapeEnum::InterpolatedTriangle(s) => s.aabb(),
}
}
fn centroid(&self) -> Point {
match self {
ShapeEnum::Sphere(s) => s.centroid(),
ShapeEnum::Triangle(s) => s.centroid(),
ShapeEnum::InterpolatedTriangle(s) => s.centroid(),
}
}
}
impl Intersected for ShapeEnum {
fn intersect(&self, ray: &Ray) -> Option<f32> {
match self {
ShapeEnum::Sphere(s) => s.intersect(ray),
ShapeEnum::Triangle(s) => s.intersect(ray),
ShapeEnum::InterpolatedTriangle(s) => s.intersect(ray),
}
}
} }
/// Represent an abstract shape inside the scene. /// Represent an abstract shape inside the scene.
#[enum_dispatch::enum_dispatch(ShapeEnum)] #[enum_dispatch::enum_dispatch(ShapeEnum)]
pub trait Shape: std::fmt::Debug { pub trait Shape: std::fmt::Debug + Intersected {
/// Return the distance at which the object intersects with the ray, or None if it does not.
fn intersect(&self, ray: &Ray) -> Option<f32>;
/// Return the unit vector corresponding to the normal at this point of the shape. /// Return the unit vector corresponding to the normal at this point of the shape.
fn normal(&self, point: &Point) -> Unit<Vector>; fn normal(&self, point: &Point) -> Unit<Vector>;
/// Project the point from the shape's surface to its texel coordinates. /// Project the point from the shape's surface to its texel coordinates.
fn project_texel(&self, point: &Point) -> Point2D; fn project_texel(&self, point: &Point) -> Point2D;
/// Enclose the `Shape` in an axi-aligned bounding-box.
fn aabb(&self) -> AABB;
/// Return the centroid of the shape.
fn centroid(&self) -> Point;
} }
impl Bounded for dyn Shape { mod interpolated_triangle;
fn aabb(&self) -> AABB { pub use interpolated_triangle::*;
self.aabb()
}
fn centroid(&self) -> Point {
self.centroid()
}
}
impl Intersected for dyn Shape {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.intersect(ray)
}
}
mod sphere; mod sphere;
pub use sphere::*; pub use sphere::*;

View file

@ -1,6 +1,7 @@
use super::Shape; use super::Shape;
use crate::{Point, Point2D, Vector}; use crate::{Point, Point2D, Vector};
use beevee::aabb::AABB; use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray; use beevee::ray::Ray;
use nalgebra::Unit; use nalgebra::Unit;
use serde::Deserialize; use serde::Deserialize;
@ -38,6 +39,38 @@ impl Sphere {
} }
impl Shape for Sphere { impl Shape for Sphere {
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
}
impl Bounded for Sphere {
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
}
impl Intersected for Sphere {
fn intersect(&self, ray: &Ray) -> Option<f32> { fn intersect(&self, ray: &Ray) -> Option<f32> {
use std::mem; use std::mem;
@ -67,34 +100,6 @@ impl Shape for Sphere {
Some(t_0) Some(t_0)
} }
} }
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,12 +1,14 @@
use super::Shape; use super::Shape;
use crate::{Point, Point2D, Vector}; use crate::{Point, Point2D, Vector};
use beevee::aabb::AABB; use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray; use beevee::ray::Ray;
use nalgebra::Unit; use nalgebra::Unit;
use serde::{Deserialize, Deserializer}; use serde::Deserialize;
/// Represent a triangle inside the scene. /// Represent a triangle inside the scene.
#[derive(Clone, Debug, PartialEq)] #[serde(from = "SerializedTriangle")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct Triangle { pub struct Triangle {
c0: Point, c0: Point,
c0c1: Vector, c0c1: Vector,
@ -38,7 +40,7 @@ impl Triangle {
} }
} }
fn barycentric(&self, point: &Point) -> Point2D { pub(crate) fn barycentric(&self, point: &Point) -> Point2D {
let c0_pos = point - self.c0; let c0_pos = point - self.c0;
// P - A = u * (B - A) + v * (C - A) // P - A = u * (B - A) + v * (C - A)
// (C - A) = v0 is c0c2 // (C - A) = v0 is c0c2
@ -58,6 +60,29 @@ impl Triangle {
} }
impl Shape for Triangle { impl Shape for Triangle {
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
}
impl Bounded for Triangle {
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
}
impl Intersected for Triangle {
fn intersect(&self, ray: &Ray) -> Option<f32> { fn intersect(&self, ray: &Ray) -> Option<f32> {
let pvec = ray.direction.cross(&self.c0c2); let pvec = ray.direction.cross(&self.c0c2);
let det = self.c0c1.dot(&pvec); let det = self.c0c1.dot(&pvec);
@ -88,25 +113,6 @@ impl Shape for Triangle {
Some(t) Some(t)
} }
} }
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -124,16 +130,6 @@ impl From<SerializedTriangle> for Triangle {
} }
} }
impl<'de> Deserialize<'de> for Triangle {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedTriangle = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View file

@ -9,10 +9,11 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[allow(missing_docs)] #[allow(missing_docs)]
#[enum_dispatch::enum_dispatch] #[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)] #[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum TextureEnum { pub enum TextureEnum {
#[serde(rename = "uniform")] #[serde(rename = "uniform")]
UniformTexture, UniformTexture,
TriangleTexture,
} }
/// Represent an object's texture. /// Represent an object's texture.
@ -22,5 +23,8 @@ pub trait Texture: std::fmt::Debug {
fn texel_color(&self, point: Point2D) -> LinearColor; fn texel_color(&self, point: Point2D) -> LinearColor;
} }
mod triangle;
pub use triangle::*;
mod uniform; mod uniform;
pub use uniform::*; pub use uniform::*;

View file

@ -0,0 +1,23 @@
use super::{uniform::UniformTexture, Texture};
use crate::core::LinearColor;
use crate::Point2D;
use serde::Deserialize;
/// Represent a texture which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleTexture {
/// The texture at each point
textures: [UniformTexture; 3],
}
impl Texture for TriangleTexture {
fn texel_color(&self, point: Point2D) -> LinearColor {
let (u, v) = (point.x, point.y);
let sum = self.textures[0].texel_color(point) * (1. - u - v)
+ self.textures[1].texel_color(point) * u
+ self.textures[2].texel_color(point) * v;
sum / 3.
}
}
// FIXME: tests