Compare commits

...

67 commits

Author SHA1 Message Date
Antoine Martin 7526e41938 project: justify every dependency 2020-05-09 13:08:37 +02:00
Antoine Martin ae3931e796 library: render: don't return infinite weight
hemisphere sampling previously could return infinite, producing NaNs
when later multiplying that weight by 0.
2020-05-09 02:10:05 +02:00
Antoine Martin dad5113724 scene: store number of intermediate steps 2020-05-08 12:51:25 +02:00
Antoine Martin e650655d73 rustfmt: remove requirement on version 2020-05-08 12:51:07 +02:00
Antoine Martin bcaaca2c9a examples: reduce cornell-box resolution to 1080 2020-05-08 12:49:17 +02:00
Antoine Martin 28df5d18a4 cargo: performance flags in release 2020-04-08 21:58:39 +02:00
Bruno BELANYI 1e7d0a2807 library: render: pathtracer: fix off-by-one exports 2020-04-08 20:24:15 +02:00
Antoine Martin faa1ef1fb8 library: render: pathtracer: avoid row allocation 2020-04-08 19:31:46 +02:00
Antoine Martin 00dae425d9 library: render: write passes in steps 2020-04-08 18:05:11 +02:00
Antoine Martin be1f400b34 library: render: pathtracer: sequential passes 2020-04-08 17:54:45 +02:00
Bruno BELANYI 2fe65e9bc6 examples: cornell-box: change FOV and positions
To make sure we see the entirety of the box, with a point-of-view which
is similar to the pictures that can be found online, the box has to be
farther away.

I also re-centered the point-light to have it beneath the lamp in the
middle of the ceiling.
2020-04-08 01:22:34 +02:00
Antoine Martin 9596bb3d00 library: render: pathtracer: simple progress 2020-04-07 23:54:28 +02:00
Antoine Martin 82dee9fde5 library: render: progress: fix refresh for pathtracing 2020-04-07 23:54:28 +02:00
Antoine Martin b89e01107d library: unnest scene module to please clippy
warning: module has the same name as its containing module
  --> pathtracer/src/scene/mod.rs:12:1
   |
12 | pub mod scene;
   | ^^^^^^^^^^^^^^
   |
   = note: `#[warn(clippy::module_inception)]` on by default
2020-04-07 23:01:56 +02:00
Antoine Martin f7780cb54e rustfmt: bump to latest version to please CI 2020-04-07 22:56:00 +02:00
Antoine Martin 3f2c7b40e8 library: render: pathtracer: draft parallelize 2020-04-07 22:53:21 +02:00
Antoine Martin c0c34fba7d library: scene: mesh: use unknown_param for Ke
tobj actually has an `unknown_param` map with everything that's not in
the official MTL spec, so let's use that instead of forking tobj
2020-04-07 22:53:21 +02:00
Antoine Martin 0c289ca482 library: scene: mesh: parse emitted light from MTL 2020-04-07 17:31:19 +02:00
Bruno BELANYI a10fd07f43 library: render: pathtracer: basic pathtracing 2020-04-06 15:40:34 +02:00
Bruno BELANYI ddebd55fcd library: render: utils: use Unit for vectors 2020-04-06 15:25:39 +02:00
Bruno BELANYI c1801a7a78 library: render: pathtracer: use map_or_else 2020-04-06 15:07:43 +02:00
Bruno BELANYI 482e6bea3f library: render: add basics for Pathtracer
This is simple ray-tracing, with a binary white-or-black color depending
on whether we hit an object or not.
2020-04-06 14:58:07 +02:00
Bruno BELANYI 6066fe00dc library: render: move progressbar creation to module 2020-04-02 00:13:43 +02:00
Bruno BELANYI 70a923cf26 library: render: move raytracer to own directory 2020-04-02 00:06:33 +02:00
Bruno BELANYI 4bb20e2f30 executable: add bidirectional renderer option 2020-04-02 00:03:43 +02:00
Bruno BELANYI c0fc885159 library: render: add #[allow(unused)] attributes 2020-04-02 00:02:11 +02:00
Bruno BELANYI fc2de38b1a library: render: split bidirectional pathtracer 2020-04-01 23:58:27 +02:00
Bruno BELANYI 96995e7ef1 library: core: light_properties: add emitted light 2020-04-01 23:43:01 +02:00
Bruno BELANYI 7eefd7b574 WIP: library: render: pathtracer: add build_path 2020-03-31 12:09:28 +02:00
Bruno BELANYI 6d0de72e57 WIP: library: render: pathtrace: add Path struct 2020-03-31 12:09:28 +02:00
Bruno BELANYI a200a839b6 examples: rename aliasing_limit to shot_rays 2020-03-31 12:09:28 +02:00
Bruno BELANYI d4345e6ea4 library: scene: rename aliasing_limit to shot_rays 2020-03-31 12:09:28 +02:00
Bruno BELANYI e68ceb484d executable: use dummy pahtracer renderer 2020-03-31 12:09:09 +02:00
Bruno BELANYI b624ced37f library: render: pathtrace: add dummy Pathtracer 2020-03-30 02:25:58 +02:00
Bruno BELANYI 9c6b9af31a library: render: add Renderer trait 2020-03-29 21:28:06 +02:00
Bruno BELANYI 83ed6406ac executable: allow the choice of renderer 2020-03-29 21:28:06 +02:00
Bruno BELANYI 5ebad7c1ab pathtracer: move rendering logic to 'render' module 2020-03-29 20:41:19 +02:00
Bruno BELANYI ad668251d4 beevee: bvh: accelerated: add missing link to BVH 2020-03-29 20:20:01 +02:00
Bruno BELANYI a59bd026bc library: rename 'render' module to 'scene' 2020-03-29 20:15:27 +02:00
Bruno BELANYI aa47b54e4c examples: move OBJ-related files to subdirectory 2020-03-29 19:44:12 +02:00
Bruno BELANYI 4593e276c4 library: core: camera: fix documentation phrasing 2020-03-29 19:36:51 +02:00
Bruno BELANYI 9ad1100ded library: core: camera: move film behind the camera
To prepare for adding the handling of focal blur, move the film so that
it is behind the point of convergence of the lens.

In addition, store the distance to the focal plane in the camera, which
will be used when calculating rays with an non-zero aperture.
2020-03-29 19:36:51 +02:00
Bruno BELANYI 2f3224ea07 library: render: scene: calculate rays w/ camera 2020-03-29 19:36:51 +02:00
Bruno BELANYI 78d5954419 library: core: color: fix from_slice's doc-test 2020-03-29 19:36:16 +02:00
Bruno BELANYI b79c94aad9 library: core: camera: add ray calculation method 2020-03-29 19:35:50 +02:00
Antoine Martin c7fec074c2 examples: add cornell box example 2020-03-29 19:22:12 +02:00
Bruno BELANYI 2994a7dcfa library: render: mesh: parse rotation in degrees 2020-03-29 18:13:38 +02:00
Antoine Martin e1f18786ce cargo: bump tobj to 1.0 2020-03-29 16:52:26 +02:00
Bruno BELANYI 642f4221cd beevee: bvh: tree: fix build panic 2020-03-27 17:40:12 +01:00
Antoine Martin 0e65a75e2b library: render: mesh: fix panic when parsing OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin cca40bcb8e library: render: mesh: from_slice to build Vector 2020-03-27 17:40:12 +01:00
Antoine Martin f0d36c7d7b library: render: mesh: use nalgebra::zero instead 2020-03-27 17:40:12 +01:00
Bruno BELANYI 15381d4bbd library: render: mesh: scale, rotate, & translate
The scaling factor is the same on all axis, to avoid having angles
changed which would mess with the normals too much.
2020-03-27 17:40:12 +01:00
Antoine Martin 2d624c517f library: render: mesh: make default material grey 2020-03-27 17:40:12 +01:00
Antoine Martin fe5eee0172 library: render: mesh: load basic material from OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin 6ba0f328cd library: render: mesh: handle empty normals in OBJ 2020-03-27 17:40:12 +01:00
Antoine Martin 0368edbd74 library: render: mesh: add OBJ loading 2020-03-27 17:39:44 +01:00
Bruno BELANYI a0d7d5e590 library: render: scene: remove ignored test 2020-03-26 19:05:01 +01:00
Bruno BELANYI 8727ae9d87 library: use #[serde(from)] for Deserialize 2020-03-26 19:03:42 +01:00
Bruno BELANYI 0678317442 library: render: scene: deserialize meshes 2020-03-26 18:48:48 +01:00
Bruno BELANYI f03880799b library: render: deserialization: error on unknown 2020-03-26 18:37:42 +01:00
Bruno BELANYI 0f6b81e40c WIP: add Mesh, TriangleTexture, TriangleMaterial 2020-03-26 18:37:42 +01:00
Bruno BELANYI e65a2a1f48 WIP: add comment about path-tracing 2020-03-26 17:42:08 +01:00
Bruno BELANYI 998838a6fc library: use Intersected as a super trait 2020-03-26 17:42:08 +01:00
Bruno BELANYI 3039607e4f beevee: bvh: use Accelerated trait for objects
This will allow for the use of meshes inside the BVH.

Returning the reference to a triangle inside the mesh directly instead
of returning the reference to the mesh itself allows for more optimum
execution.
2020-03-26 17:42:08 +01:00
Bruno BELANYI 5c0fc9689e library: shape: add InterpolatedTriangle type
This is a triangle with added normal interpolation at its edges. This is
particularly useful when rendering mesh objects.
2020-03-25 00:27:26 +01:00
Bruno BELANYI 3b5410aef9 library: render: scene: add hemisphere sampling
This method takes a given normal, and computes a random ray in the
unit-hemisphere described by that normal.

We use cosine-weighted importance sampling because it leads to better
convergence and is a nice micro-optimisation (from four trigonometric
operations to only two).
2020-03-25 00:12:05 +01:00
44 changed files with 1629 additions and 333 deletions

2
.cargo/config Normal file
View file

@ -0,0 +1,2 @@
[build]
rustflags = ["-C", "target-cpu=native"]

View file

@ -52,7 +52,6 @@ use_field_init_shorthand = false
force_explicit_abi = true
condense_wildcard_suffixes = false
color = "Auto"
required_version = "1.4.12"
unstable_features = false
disable_all_formatting = false
skip_children = false

View file

@ -4,3 +4,7 @@ members = [
"beevee",
"pathtracer",
]
[profile.release]
lto = true
codegen-units = 1

View file

@ -7,5 +7,8 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# Linear algebra basic operations and types
nalgebra = "0.20"
# High performance quicksort/quickselect
pdqselect = "0.1.0"

View file

@ -0,0 +1,39 @@
use super::Intersected;
use crate::aabb::Bounded;
use crate::ray::Ray;
/// The trait for any mesh-like object to be used in the [`BVH`]. If your object is not an
/// aggregate, you should instead implement [`Intersected`] which derives this trait automatically.
///
/// This trait is there to accomodate for aggregate objects inside the [`BVH`]: you can implement a
/// faster look-up of information using a [`BVH`] in a mesh for example, returning directly the
/// reference to a hit triangle. This enables us to return this triangle instead of returning a
/// reference to the whole mesh.
///
/// [`BVH`]: struct.BVH.html
/// [`Intersected`]: struct.Intersected.html
pub trait Accelerated: Bounded {
/// The type contained in your [`Accelerated`] structure
///
/// [`Accelerated`]: struct.Accelerated.html
type Output;
/// Return None if no intersection happens with the ray, or a tuple of distance along the ray
/// and a reference to the object that was hit.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)>;
}
/// The automatic implementation for any [`Intersected`] object to be used in the [`BVH`].
///
/// [`BVH`]: struct.BVH.html
impl<T> Accelerated for T
where
T: Intersected,
{
type Output = Self;
/// Return a reference to `self` when a distance was found.
fn intersect(&self, ray: &Ray) -> Option<(f32, &Self::Output)> {
self.intersect(ray).map(|t| (t, self))
}
}

View file

@ -1,8 +1,11 @@
use crate::aabb::Bounded;
use crate::ray::Ray;
/// The trait for any object to be used in the [`BVH`].
/// The trait for any object to be used in the [`BVH`]. Its derivation for [`Accelerated`] is
/// automatically derived to return a reference to itself. If this not the intended semantics, see
/// [`Accelerated`].
///
/// [`Accelerated`]: struct.Accelerated.html
/// [`BVH`]: struct.BVH.html
pub trait Intersected: Bounded {
/// Return None if there is no intersection, or the distance along the ray to the closest

View file

@ -1,5 +1,8 @@
//! The Boudning Volume Hiearchy
mod accelerated;
pub use accelerated::*;
mod intersected;
pub use intersected::*;

View file

@ -1,4 +1,4 @@
use super::Intersected;
use super::Accelerated;
use crate::aabb::AABB;
use crate::ray::Ray;
use crate::Axis;
@ -23,9 +23,9 @@ struct Node {
}
/// The BVH containing all the objects of type O.
/// This type must implement [`Intersected`].
/// This type must implement [`Accelerated`].
///
/// [`Intersected`]: trait.Intersected.html
/// [`Accelerated`]: trait.Accelerated.html
#[derive(Clone, Debug, PartialEq)]
pub struct BVH {
tree: Node,
@ -92,7 +92,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::build(spheres);
/// ```
pub fn build<O: Intersected>(objects: &mut [O]) -> Self {
pub fn build<O: Accelerated>(objects: &mut [O]) -> Self {
Self::with_max_capacity(objects, 32)
}
@ -157,7 +157,7 @@ impl BVH {
/// let spheres: &mut [Sphere] = &mut [Sphere{ center: Point::origin(), radius: 2.5 }];
/// let bvh = BVH::with_max_capacity(spheres, 32);
/// ```
pub fn with_max_capacity<O: Intersected>(objects: &mut [O], max_cap: usize) -> Self {
pub fn with_max_capacity<O: Accelerated>(objects: &mut [O], max_cap: usize) -> Self {
let tree = build_node(objects, 0, objects.len(), max_cap);
Self { tree }
}
@ -226,8 +226,8 @@ impl BVH {
/// let bvh = BVH::with_max_capacity(spheres, 32);
/// assert!(bvh.is_sound(spheres));
/// ```
pub fn is_sound<O: Intersected>(&self, objects: &[O]) -> bool {
fn check_node<O: Intersected>(objects: &[O], node: &Node) -> bool {
pub fn is_sound<O: Accelerated>(&self, objects: &[O]) -> bool {
fn check_node<O: Accelerated>(objects: &[O], node: &Node) -> bool {
if node.begin > node.end {
return false;
}
@ -322,17 +322,21 @@ impl BVH {
/// assert_eq!(dist, 0.5);
/// assert_eq!(obj, &spheres[0]);
/// ```
pub fn walk<'o, O: Intersected>(&self, ray: &Ray, objects: &'o [O]) -> Option<(f32, &'o O)> {
pub fn walk<'o, O: Accelerated>(
&self,
ray: &Ray,
objects: &'o [O],
) -> Option<(f32, &'o O::Output)> {
walk_rec_helper(ray, objects, &self.tree, std::f32::INFINITY)
}
}
fn walk_rec_helper<'o, O: Intersected>(
fn walk_rec_helper<'o, O: Accelerated>(
ray: &Ray,
objects: &'o [O],
node: &Node,
min: f32,
) -> Option<(f32, &'o O)> {
) -> Option<(f32, &'o O::Output)> {
use std::cmp::Ordering;
match &node.kind {
@ -340,7 +344,7 @@ fn walk_rec_helper<'o, O: Intersected>(
NodeEnum::Leaf => objects[node.begin..node.end]
.iter()
// This turns the Option<f32> of an intersection into an Option<(f32, &O)>
.filter_map(|o| o.intersect(ray).map(|d| (d, o)))
.filter_map(|o| o.intersect(ray))
// Discard values that are too far away
.filter(|(dist, _)| dist < &min)
// Only keep the minimum value, if there is one
@ -382,14 +386,14 @@ fn walk_rec_helper<'o, O: Intersected>(
}
}
fn bounds_from_slice<O: Intersected>(objects: &[O]) -> AABB {
fn bounds_from_slice<O: Accelerated>(objects: &[O]) -> AABB {
objects
.iter()
.map(|o| o.aabb())
.fold(AABB::empty(), |acc, other| acc.union(&other))
}
fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node {
fn build_node<O: Accelerated>(objects: &mut [O], begin: usize, end: usize, max_cap: usize) -> Node {
let aabb = bounds_from_slice(objects);
// Don't split nodes under capacity
if objects.len() <= max_cap {
@ -401,7 +405,7 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
};
}
// Calculate the SAH heuristic for this slice
let (split, axis, cost) = compute_sah(&mut objects[begin..end], aabb.surface(), max_cap);
let (split, axis, cost) = compute_sah(objects, aabb.surface(), max_cap);
// Only split if the heuristic shows that it is worth it
if cost >= objects.len() as f32 {
return Node {
@ -411,11 +415,11 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
kind: NodeEnum::Leaf,
};
}
// Avoid degenerate cases, and recenter the split inside [begin, end)
let split = if split == 0 || split >= (end - begin - 1) {
begin + (end - begin) / 2
// Avoid degenerate cases
let split = if split <= 1 || split >= (objects.len() - 1) {
(end - begin) / 2
} else {
begin + split
split
};
// Project along chosen axis
pdqselect::select_by(objects, split, |lhs, rhs| {
@ -424,8 +428,18 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
.expect("Can't use Nans in the SAH computation")
});
// Construct children recurivsely on [begin, split) and [split, end)
let left = Box::new(build_node(objects, begin, split, max_cap));
let right = Box::new(build_node(objects, split, end, max_cap));
let left = Box::new(build_node(
&mut objects[0..split],
begin,
begin + split,
max_cap,
));
let right = Box::new(build_node(
&mut objects[split..],
begin + split,
end,
max_cap,
));
// Build the node recursivelly
Node {
bounds: aabb,
@ -437,7 +451,7 @@ fn build_node<O: Intersected>(objects: &mut [O], begin: usize, end: usize, max_c
/// Returns the index at which to split for SAH, the Axis along which to split, and the calculated
/// cost.
fn compute_sah<O: Intersected>(
fn compute_sah<O: Accelerated>(
objects: &mut [O],
surface: f32,
max_cap: usize,
@ -481,7 +495,7 @@ fn compute_sah<O: Intersected>(
let cost = 1. / max_cap as f32
+ (left_count as f32 * left_surfaces[left_count - 1]
+ right_count as f32 * right_surfaces[right_count])
+ right_count as f32 * right_surfaces[right_count - 1])
/ surface;
if cost < min {

View file

@ -19,20 +19,44 @@ name = "pathtracer"
path = "src/main.rs"
[dependencies]
# Our own BVH implementation
beevee = { path = "../beevee" }
# Macro to implement arithmetic operators automagically
derive_more = "0.99.3"
# Transform interfaces into enums for better performance than dynamic dispatch
enum_dispatch = "0.2.1"
# Save an image to PNG
image = "0.23.0"
indicatif = "0.14.0"
# Random implementation, not part of the standard library in Rust
rand = "0.7"
# Parallelism utility functions
rayon = "1.3.0"
# YAML deserialization
serde_yaml = "0.8"
# Command-line argument parsing utilities
structopt = "0.3"
# OBJ format parser
tobj = "1.0"
# Fancy terminal progress bar
[dependencies.indicatif]
version = "0.14"
features = ["with_rayon"]
# Linear algebra basic operations and types
[dependencies.nalgebra]
version = "0.20.0"
features = ["serde-serialize"]
# YAML deserialization
[dependencies.serde]
version = "1.0"
features = ["derive"]

View file

@ -1,5 +1,5 @@
# Optional field
aliasing_limit: 10
shot_rays: 10
# Optional field
reflection_limit: 5

View file

@ -0,0 +1,29 @@
reflection_limit: 5
shot_rays: 50
camera:
origin: [0.0, 1.0, 0.0]
forward: [ 0.0, 0.0, 1.0]
up: [0.0, 1.0, 0.0]
fov: 60.0
distance_to_image: 1.0
x: 1080
y: 1080
lights:
ambients:
- color: {r: 0.1, g: 0.1, b: 0.1}
points:
- position: [0.0, 1.95, 3.2]
color: {r: 1.0, g: 1.0, b: 1.0}
meshes:
# FIXME: make the path relative to the YAML in some way?
# Easiest solution would be to chdir to the YAML's directory
- obj_file: "pathtracer/examples/objs/cornell-box.obj"
translation: [0.0, 0.0, 2.8]
rotation: [0, 180, 0]
steps:
- 10
- 25

View file

@ -0,0 +1,88 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
newmtl leftWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.63 0.065 0.05 # Red
Kd 0.63 0.065 0.05
Ks 0 0 0
Ke 0 0 0
newmtl rightWall
Ns 10.0000
Ni 1.5000
illum 2
Ka 0.14 0.45 0.091 # Green
Kd 0.14 0.45 0.091
Ks 0 0 0
Ke 0 0 0
newmtl floor
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl ceiling
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl backWall
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl shortBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl tallBox
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.725 0.71 0.68 # White
Kd 0.725 0.71 0.68
Ks 0 0 0
Ke 0 0 0
newmtl light
Ns 10.0000
Ni 1.0000
illum 2
Ka 0.78 0.78 0.78 # White
Kd 0.78 0.78 0.78
Ks 0 0 0
Ke 17 12 4

View file

@ -0,0 +1,168 @@
# The original Cornell Box in OBJ format.
# Note that the real box is not a perfect cube, so
# the faces are imperfect in this data set.
#
# Created by Guedis Cardenas and Morgan McGuire at Williams College, 2011
# Released into the Public Domain.
#
# http://graphics.cs.williams.edu/data
# http://www.graphics.cornell.edu/online/box/data.html
#
mtllib cornell-box.mtl
## Object floor
v -1.01 0.00 0.99
v 1.00 0.00 0.99
v 1.00 0.00 -1.04
v -0.99 0.00 -1.04
g floor
usemtl floor
f -4 -3 -2 -1
## Object ceiling
v -1.02 1.99 0.99
v -1.02 1.99 -1.04
v 1.00 1.99 -1.04
v 1.00 1.99 0.99
g ceiling
usemtl ceiling
f -4 -3 -2 -1
## Object backwall
v -0.99 0.00 -1.04
v 1.00 0.00 -1.04
v 1.00 1.99 -1.04
v -1.02 1.99 -1.04
g backWall
usemtl backWall
f -4 -3 -2 -1
## Object rightwall
v 1.00 0.00 -1.04
v 1.00 0.00 0.99
v 1.00 1.99 0.99
v 1.00 1.99 -1.04
g rightWall
usemtl rightWall
f -4 -3 -2 -1
## Object leftWall
v -1.01 0.00 0.99
v -0.99 0.00 -1.04
v -1.02 1.99 -1.04
v -1.02 1.99 0.99
g leftWall
usemtl leftWall
f -4 -3 -2 -1
## Object shortBox
usemtl shortBox
# Top Face
v 0.53 0.60 0.75
v 0.70 0.60 0.17
v 0.13 0.60 0.00
v -0.05 0.60 0.57
f -4 -3 -2 -1
# Left Face
v -0.05 0.00 0.57
v -0.05 0.60 0.57
v 0.13 0.60 0.00
v 0.13 0.00 0.00
f -4 -3 -2 -1
# Front Face
v 0.53 0.00 0.75
v 0.53 0.60 0.75
v -0.05 0.60 0.57
v -0.05 0.00 0.57
f -4 -3 -2 -1
# Right Face
v 0.70 0.00 0.17
v 0.70 0.60 0.17
v 0.53 0.60 0.75
v 0.53 0.00 0.75
f -4 -3 -2 -1
# Back Face
v 0.13 0.00 0.00
v 0.13 0.60 0.00
v 0.70 0.60 0.17
v 0.70 0.00 0.17
f -4 -3 -2 -1
# Bottom Face
v 0.53 0.00 0.75
v 0.70 0.00 0.17
v 0.13 0.00 0.00
v -0.05 0.00 0.57
f -12 -11 -10 -9
g shortBox
usemtl shortBox
## Object tallBox
usemtl tallBox
# Top Face
v -0.53 1.20 0.09
v 0.04 1.20 -0.09
v -0.14 1.20 -0.67
v -0.71 1.20 -0.49
f -4 -3 -2 -1
# Left Face
v -0.53 0.00 0.09
v -0.53 1.20 0.09
v -0.71 1.20 -0.49
v -0.71 0.00 -0.49
f -4 -3 -2 -1
# Back Face
v -0.71 0.00 -0.49
v -0.71 1.20 -0.49
v -0.14 1.20 -0.67
v -0.14 0.00 -0.67
f -4 -3 -2 -1
# Right Face
v -0.14 0.00 -0.67
v -0.14 1.20 -0.67
v 0.04 1.20 -0.09
v 0.04 0.00 -0.09
f -4 -3 -2 -1
# Front Face
v 0.04 0.00 -0.09
v 0.04 1.20 -0.09
v -0.53 1.20 0.09
v -0.53 0.00 0.09
f -4 -3 -2 -1
# Bottom Face
v -0.53 0.00 0.09
v 0.04 0.00 -0.09
v -0.14 0.00 -0.67
v -0.71 0.00 -0.49
f -8 -7 -6 -5
g tallBox
usemtl tallBox
## Object light
v -0.24 1.98 0.16
v -0.24 1.98 -0.22
v 0.23 1.98 -0.22
v 0.23 1.98 0.16
g light
usemtl light
f -4 -3 -2 -1

View file

@ -1,4 +1,4 @@
aliasing_limit: 10
shot_rays: 10
reflection_limit: 5
background: {r: 0.5, g: 0.5, b: 0.5}

View file

@ -2,13 +2,18 @@
use super::film::Film;
use crate::{Point, Vector};
use serde::{Deserialize, Deserializer};
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
/// Represent an abstract camera to observe the scene.
#[derive(Debug, PartialEq)]
#[serde(from = "SerializedCamera")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Camera {
/// Where the camera is set in the scene (i.e: its focal point).
/// Where the camera is set in the scene (i.e: the center of the lens).
origin: Point,
/// How far away is the camera's focal plane.
distance_to_image: f32,
/// The film to represent each pixel in the scene.
film: Film,
}
@ -37,15 +42,20 @@ impl Camera {
forward: Vector,
up: Vector,
fov: f32,
dist_to_image: f32,
distance_to_image: f32,
x: u32,
y: u32,
) -> Self {
let right = forward.cross(&up);
let center = origin + forward.normalize() * dist_to_image;
let screen_size = 2. * f32::tan(fov / 2.) * dist_to_image;
let film = Film::new(x, y, screen_size, center, up, right);
Camera { origin, film }
let screen_size = 2. * f32::tan(fov / 2.);
// Construct the film behind the camera, upside down
let center = origin - forward.normalize();
let film = Film::new(x, y, screen_size, center, -up, -right);
Camera {
origin,
distance_to_image,
film,
}
}
/// Get the `Camera`'s [`Film`].
@ -78,6 +88,24 @@ impl Camera {
pub fn origin(&self) -> &Point {
&self.origin
}
/// Get the Ray coming out of the camera at a given ratio on the image.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::Camera;
/// # use pathtracer::Point;
/// #
/// let cam = Camera::default();
/// let ray_ul = cam.ray_with_ratio(0., 0.); // Ray coming out of the upper-left pixel
/// let ray_ul = cam.ray_with_ratio(1., 1.); // Ray coming out of the lower-right pixel
/// ```
pub fn ray_with_ratio(&self, x: f32, y: f32) -> Ray {
let pixel = self.film().pixel_at_ratio(x, y);
let direction = Unit::new_normalize(self.origin() - pixel);
Ray::new(pixel, direction)
}
}
impl Default for Camera {
@ -140,16 +168,6 @@ impl From<SerializedCamera> for Camera {
}
}
impl<'de> Deserialize<'de> for Camera {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedCamera = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;
@ -157,7 +175,7 @@ mod test {
#[test]
fn new_works() {
let cam = Camera::new(
Point::new(-1., 0., 0.),
Point::new(1., 0., 0.),
Vector::new(1., 0., 0.),
Vector::new(0., 1., 0.),
2. * f32::atan(1.), /* 90° in radian */
@ -168,14 +186,15 @@ mod test {
assert_eq!(
cam,
Camera {
origin: Point::new(-1., 0., 0.),
origin: Point::new(1., 0., 0.),
distance_to_image: 1.,
film: Film::new(
1080,
1080,
2.,
Point::origin(),
Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.),
-Vector::new(0., 1., 0.),
-Vector::new(0., 0., 1.),
)
}
)
@ -184,7 +203,7 @@ mod test {
#[test]
fn deserialization_works() {
let yaml = r#"
origin: [-1.0, 0.0, 0.0]
origin: [1.0, 0.0, 0.0]
forward: [ 1.0, 0.0, 0.0]
up: [0.0, 1.0, 0.0]
fov: 90.0
@ -196,14 +215,15 @@ mod test {
assert_eq!(
cam,
Camera {
origin: Point::new(-1., 0., 0.),
origin: Point::new(1., 0., 0.),
distance_to_image: 1.0,
film: Film::new(
1080,
1080,
2.,
Point::origin(),
Vector::new(0., 1., 0.),
Vector::new(0., 0., 1.),
-Vector::new(0., 1., 0.),
-Vector::new(0., 0., 1.),
)
}
)

View file

@ -70,6 +70,25 @@ impl LinearColor {
LinearColor { r, g, b }
}
/// Creates a new `Color` from a slice.
///
/// Panics if slice has less than 3 elements.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::LinearColor;
/// #
/// let color = LinearColor::from_slice(&[1.0, 0.0, 0.0]); // bright red!
/// ```
pub fn from_slice(s: &[f32]) -> Self {
LinearColor {
r: s[0],
g: s[1],
b: s[2],
}
}
#[must_use]
/// Clamps the color's RGB components between 0.0 and 1.0.
///

View file

@ -33,6 +33,9 @@ pub struct LightProperties {
/// The transparency or reflectivity properties.
#[serde(flatten)]
pub refl_trans: Option<ReflTransEnum>,
/// The emitted light from this object, only used for path-tracing rendering techniques
#[serde(default)]
pub emitted: LinearColor,
}
impl LightProperties {
@ -48,17 +51,20 @@ impl LightProperties {
/// LinearColor::new(0.25, 0.5, 1.),
/// LinearColor::new(0.75, 0.375, 0.125),
/// Some(ReflTransEnum::Reflectivity { coef: 0.5 }),
/// LinearColor::new(0., 0., 0.),
/// );
/// ```
pub fn new(
diffuse: LinearColor,
specular: LinearColor,
refl_trans: Option<ReflTransEnum>,
emitted: LinearColor,
) -> Self {
LightProperties {
diffuse,
specular,
refl_trans,
emitted,
}
}
}
@ -72,14 +78,20 @@ mod test {
let diffuse = LinearColor::new(0.25, 0.5, 1.);
let specular = LinearColor::new(0.75, 0.375, 0.125);
let refl_trans = Some(ReflTransEnum::Reflectivity { coef: 0.5 });
let properties =
LightProperties::new(diffuse.clone(), specular.clone(), refl_trans.clone());
let emitted = LinearColor::new(0., 1., 0.);
let properties = LightProperties::new(
diffuse.clone(),
specular.clone(),
refl_trans.clone(),
emitted.clone(),
);
assert_eq!(
properties,
LightProperties {
diffuse,
specular,
refl_trans,
emitted,
}
)
}
@ -96,7 +108,8 @@ mod test {
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
None
None,
LinearColor::black(),
)
)
}
@ -118,7 +131,8 @@ mod test {
Some(ReflTransEnum::Transparency {
coef: 0.5,
index: 1.5
})
}),
LinearColor::black(),
)
)
}
@ -136,7 +150,27 @@ mod test {
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
Some(ReflTransEnum::Reflectivity { coef: 0.25 })
Some(ReflTransEnum::Reflectivity { coef: 0.25 }),
LinearColor::black(),
)
)
}
#[test]
fn deserialization_with_emitted_works() {
let yaml = r#"
diffuse: {r: 1.0, g: 0.5, b: 0.25}
specular: {r: 0.25, g: 0.125, b: 0.75}
emitted: {r: 0.25, g: 0.5, b: 1.0}
"#;
let properties: LightProperties = serde_yaml::from_str(yaml).unwrap();
assert_eq!(
properties,
LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
None,
LinearColor::new(0.25, 0.5, 1.0),
)
)
}

View file

@ -12,6 +12,7 @@ pub mod core;
pub mod light;
pub mod material;
pub mod render;
pub mod scene;
pub mod serialize;
pub mod shape;
pub mod texture;

View file

@ -2,12 +2,13 @@ use super::{Light, SpatialLight};
use crate::core::LinearColor;
use crate::{Point, Vector};
use nalgebra::Unit;
use serde::{Deserialize, Deserializer};
use serde::Deserialize;
/// Represent a light emanating from a directed light-source, outputting rays in a cone.
///
/// The illumination cone cannot have an FOV over 180°.
#[derive(Debug, PartialEq)]
#[serde(from = "SerializedSpotLight")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct SpotLight {
position: Point,
direction: Unit<Vector>,
@ -82,16 +83,6 @@ impl From<SerializedSpotLight> for SpotLight {
}
}
impl<'de> Deserialize<'de> for SpotLight {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedSpotLight = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -1,7 +1,19 @@
use pathtracer::render::Scene;
use pathtracer::render::{BidirectionalPathtracer, Pathtracer, Raytracer};
use pathtracer::scene::Scene;
use std::path::PathBuf;
use std::str;
use structopt::clap::arg_enum;
use structopt::StructOpt;
arg_enum! {
#[derive(Debug)]
enum RenderOption {
Raytracer,
Pathtracer,
Bidirectional,
}
}
#[derive(StructOpt, Debug)]
struct Options {
/// Input description for the scene to be rendered.
@ -10,6 +22,15 @@ struct Options {
/// Output image for the rendered scene.
#[structopt(short, long, parse(from_os_str), default_value = "scene.png")]
output: PathBuf,
/// Which renderer should be used on the input scene.
#[structopt(
short,
long,
possible_values = &RenderOption::variants(),
case_insensitive = true,
default_value = "Raytracer"
)]
renderer: RenderOption,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
@ -17,7 +38,11 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let f = std::fs::File::open(options.input)?;
let scene: Scene = serde_yaml::from_reader(f)?;
let image = scene.render();
let image = match options.renderer {
RenderOption::Raytracer => Raytracer::new(scene).render(),
RenderOption::Pathtracer => Pathtracer::new(scene).render(),
RenderOption::Bidirectional => BidirectionalPathtracer::new(scene).render(),
};
image.save(options.output)?;
Ok(())

View file

@ -9,10 +9,11 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum MaterialEnum {
#[serde(rename = "uniform")]
UniformMaterial,
TriangleMaterial,
}
/// Represent the physical light properties of an object in the scene;
@ -22,5 +23,8 @@ pub trait Material: std::fmt::Debug {
fn properties(&self, point: Point2D) -> LightProperties;
}
mod triangle;
pub use triangle::*;
mod uniform;
pub use uniform::*;

View file

@ -0,0 +1,33 @@
use super::Material;
use crate::core::{LightProperties, LinearColor, ReflTransEnum};
use crate::Point2D;
use serde::Deserialize;
/// Represent a material which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleMaterial {
/// The diffuse components.
diffuse: [LinearColor; 3],
/// The specular components.
specular: [LinearColor; 3],
/// The transparency or reflectivity properties, this is not interpolated.
#[serde(flatten)]
refl_trans: Option<ReflTransEnum>,
/// The amount of light emitted by the material, only used during path-tracing rendering.
emitted: [LinearColor; 3],
}
impl Material for TriangleMaterial {
fn properties(&self, point: Point2D) -> LightProperties {
let (u, v) = (point.x, point.y);
let sample = |param: &[LinearColor; 3]| -> LinearColor {
param[0].clone() * (1. - u - v) + param[1].clone() * u + param[2].clone() * v
};
let diffuse = sample(&self.diffuse);
let specular = sample(&self.specular);
let emitted = sample(&self.emitted);
LightProperties::new(diffuse, specular, self.refl_trans.clone(), emitted)
}
}
// FIXME: tests

View file

@ -24,6 +24,7 @@ impl UniformMaterial {
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// );
/// ```
@ -50,6 +51,7 @@ mod test {
diffuse: LinearColor::new(0., 0.5, 0.),
specular: LinearColor::new(1., 1., 1.),
refl_trans: None,
emitted: LinearColor::black(),
};
let mat = UniformMaterial::new(properties.clone());
assert_eq!(mat, UniformMaterial { properties })
@ -61,6 +63,7 @@ mod test {
LinearColor::new(0., 0.5, 0.),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
);
let mat = UniformMaterial::new(properties.clone());
assert_eq!(mat.properties(Point2D::origin()), properties)
@ -79,7 +82,8 @@ mod test {
UniformMaterial::new(LightProperties::new(
LinearColor::new(1., 0.5, 0.25),
LinearColor::new(0.25, 0.125, 0.75),
Some(ReflTransEnum::Reflectivity { coef: 0.25 })
Some(ReflTransEnum::Reflectivity { coef: 0.25 }),
LinearColor::black(),
))
)
}

View file

@ -0,0 +1,50 @@
use super::super::Renderer;
use super::path::*;
use crate::scene::Scene;
use crate::{Point, Vector};
use image::RgbImage;
use nalgebra::Unit;
/// Render the [`Scene`] using Bidirectional-Pathtracing
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct BidirectionalPathtracer {
#[allow(unused)]
scene: Scene,
}
impl BidirectionalPathtracer {
/// Create a [`BidirectionalPathtracer`] renderer with the given [`Scene`]
///
/// [`BidirectionalPathtracer`]: struct.BidirectionalPathtracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
BidirectionalPathtracer { scene }
}
/// Render the [`Scene`] using Bidirectional-Pathtracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
todo!()
}
#[allow(unused)]
fn construct_path(&self, point: Point, _direction: Unit<Vector>) -> Path {
let mut res = Path::new(point);
for _ in 0..self.scene.reflection_limit {
// FIXME:
// * cast_ray: if no intersection, return the empty path
// * look-up information at intersection
// * append to path
// * start again with new origin
}
res
}
}
impl Renderer for BidirectionalPathtracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -0,0 +1,4 @@
mod path;
mod bidirectional_pathtracer;
pub use bidirectional_pathtracer::*;

View file

@ -0,0 +1,47 @@
use crate::core::LightProperties;
use crate::{Point, Vector};
use nalgebra::Unit;
pub struct PathPoint {
pub point: Point,
pub incident: Unit<Vector>,
pub normal: Unit<Vector>,
pub properties: LightProperties,
}
impl PathPoint {
#[allow(unused)]
pub fn new(
point: Point,
incident: Unit<Vector>,
normal: Unit<Vector>,
properties: LightProperties,
) -> Self {
PathPoint {
point,
incident,
normal,
properties,
}
}
}
pub struct Path {
pub origin: Point,
pub points: Vec<PathPoint>,
}
impl Path {
#[allow(unused)]
pub fn new(origin: Point) -> Self {
Path {
origin,
points: Vec::new(),
}
}
#[allow(unused)]
pub fn push_point(&mut self, new_point: PathPoint) {
self.points.push(new_point)
}
}

View file

@ -1,12 +1,22 @@
//! Rendering logic
//! Define the different kinds of renderers for use on a given scene.
use image::RgbImage;
pub mod light_aggregate;
pub use light_aggregate::*;
/// Each renderer implements this trait, to be called after being built.
pub trait Renderer {
/// Render the [`Scene`] using the chosen rendering technique.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
fn render(&self) -> RgbImage;
}
pub mod object;
pub use object::*;
mod bidirectional;
pub use bidirectional::*;
pub mod scene;
pub use scene::*;
mod pathtrace;
pub use pathtrace::*;
mod raytrace;
pub use raytrace::*;
pub(crate) mod progress;
pub(crate) mod utils;

View file

@ -0,0 +1,2 @@
mod pathtracer;
pub use self::pathtracer::*;

View file

@ -0,0 +1,135 @@
use indicatif::ProgressIterator;
use rayon::prelude::*;
use super::super::utils::{buffer_to_image, sample_hemisphere};
use super::super::Renderer;
use crate::{
core::LinearColor,
material::Material,
scene::{Object, Scene},
shape::Shape,
};
use beevee::ray::Ray;
use image::RgbImage;
/// Render the [`Scene`] using Pathtracing
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct Pathtracer {
#[allow(unused)]
scene: Scene,
}
impl Pathtracer {
/// Create a [`Pathtracer`] renderer with the given [`Scene`]
///
/// [`Pathtracer`]: struct.Pathtracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
Pathtracer { scene }
}
/// Render the [`Scene`] using Pathtracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
let (width, height) = (
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let total = width * height;
let p = super::super::progress::get_passes_progressbar(self.scene.shot_rays);
// Ensure at least one round of shots
let (img_buf, _) = (0..self.scene.shot_rays.max(1))
.progress_with(p)
.map(|_| {
let mut buffer: Vec<LinearColor> = Vec::new();
buffer.resize_with(total as usize, LinearColor::black);
buffer
.par_chunks_mut(width as usize)
.enumerate()
.for_each(|(y, row)| {
for x in 0..width {
row[x as usize] += self.pixel_ray(x as f32, y as f32);
}
});
buffer
})
.fold(
{
let mut vec = Vec::new();
vec.resize_with(total as usize, LinearColor::black);
let count = 0usize;
(vec, count)
},
|(mut acc, count), buf| {
for (i, pixel) in buf.into_iter().enumerate() {
acc[i] += pixel;
}
let count = count + 1; // Because count is 0-indexed
if self.scene.steps.contains(&count) {
let image = buffer_to_image(&acc, count as u32, width, height);
image
.save(format!("{}_passes.png", count))
.expect("writing image failed!");
}
(acc, count) // Count has been updated previously
},
);
buffer_to_image(&img_buf, self.scene.shot_rays, width, height)
}
fn pixel_ray(&self, x: f32, y: f32) -> LinearColor {
let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let ray = self.scene.camera.ray_with_ratio(x, y);
self.cast_ray(ray).map_or_else(
|| self.scene.background.clone(),
|(t, obj)| self.radiance(ray, t, obj, self.scene.reflection_limit),
)
}
fn radiance(&self, ray: Ray, t: f32, obj: &Object, limit: u32) -> LinearColor {
// This doesn't look great, but it works ¯\_(ツ)_/¯
let hit_pos = ray.origin + ray.direction.as_ref() * t;
let texel = obj.shape.project_texel(&hit_pos);
let properties = obj.material.properties(texel);
// If we are the at recursion limit, return the light emitted by the object
if limit == 0 {
return properties.emitted;
};
// Get BRDF
// FIXME: what about the material's albedo ?
let brdf = properties.diffuse;
// Pick a new direction
let normal = obj.shape.normal(&hit_pos);
let (new_direction, weight) = sample_hemisphere(normal);
let cos_new_ray = new_direction.dot(&normal);
// Calculate the incoming light along the new ray
let new_ray = Ray::new(hit_pos + new_direction.as_ref() * 0.001, new_direction);
let incoming = self
.cast_ray(new_ray)
.map_or_else(LinearColor::black, |(t, obj)| {
self.radiance(new_ray, t, obj, limit - 1)
});
// Put it all together
properties.emitted + (brdf * incoming * cos_new_ray * weight)
}
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.scene.bvh.walk(&ray, &self.scene.objects)
}
}
impl Renderer for Pathtracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -0,0 +1,26 @@
use indicatif::{ProgressBar, ProgressStyle};
pub fn get_progressbar(total: u64, style: &str) -> ProgressBar {
let pb = ProgressBar::new(total);
pb.set_draw_delta((total / 10000).max(1));
pb.set_style(ProgressStyle::default_bar().template(style));
pb
}
pub fn get_pixels_progressbar(total: u64) -> ProgressBar {
get_progressbar(
total,
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})",
)
}
pub fn get_passes_progressbar(total: u32) -> ProgressBar {
let pb = get_progressbar(
total as u64,
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} passes (ETA: {eta})",
);
pb.enable_steady_tick(1000);
pb
}

View file

@ -0,0 +1,2 @@
mod raytracer;
pub use self::raytracer::*;

View file

@ -1,102 +1,48 @@
//! Scene rendering logic
use super::{light_aggregate::LightAggregate, object::Object, utils::*};
use super::super::utils::*;
use super::super::Renderer;
use crate::scene::{Object, Scene};
use crate::{
core::{Camera, LightProperties, LinearColor, ReflTransEnum},
core::{LightProperties, LinearColor, ReflTransEnum},
material::Material,
shape::Shape,
texture::Texture,
{Point, Vector},
};
use beevee::{bvh::BVH, ray::Ray};
use beevee::ray::Ray;
use image::RgbImage;
use nalgebra::Unit;
use rand::prelude::thread_rng;
use rand::Rng;
use serde::{Deserialize, Deserializer};
/// Represent the scene being rendered.
pub struct Scene {
camera: Camera,
lights: LightAggregate,
objects: Vec<Object>,
bvh: BVH,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
/// Render the [`Scene`] using Raytracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub struct Raytracer {
scene: Scene,
}
impl Scene {
/// Creates a new `Scene`.
impl Raytracer {
/// Create a [`Raytracer`] renderer with the given [`Scene`]
///
/// # Examples
///
/// ```
/// # use pathtracer::core::{Camera, LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::render::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // aliasing limit
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
aliasing_limit: u32,
reflection_limit: u32,
diffraction_index: f32,
) -> Self {
// NOTE(Antoine): fun fact: BVH::build stack overflows when given an empty slice :)
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
aliasing_limit,
reflection_limit,
diffraction_index,
}
/// [`Raytracer`]: struct.Raytracer.html
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn new(scene: Scene) -> Self {
Raytracer { scene }
}
/// Render the scene into an image.
/// Render the [`Scene`] using Raytracing.
///
/// [`Scene`]: ../scene/scene/struct.Scene.html
pub fn render(&self) -> RgbImage {
let mut image = RgbImage::new(self.camera.film().width(), self.camera.film().height());
let mut image = RgbImage::new(
self.scene.camera.film().width(),
self.scene.camera.film().height(),
);
let total = (image.width() * image.height()) as u64;
let pb = indicatif::ProgressBar::new(total);
pb.set_draw_delta(total / 10000);
pb.set_style(indicatif::ProgressStyle::default_bar().template(
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {percent:>3}%: {pos}/{len} pixels (ETA: {eta})",
));
let pb = super::super::progress::get_pixels_progressbar(total);
let pixel_func = if self.aliasing_limit > 0 {
let pixel_func = if self.scene.shot_rays > 0 {
Self::anti_alias_pixel
} else {
Self::pixel
@ -121,18 +67,17 @@ impl Scene {
/// Get pixel color for (x, y) a pixel **coordinate**
fn pixel(&self, x: f32, y: f32) -> LinearColor {
let (x, y) = self.camera.film().pixel_ratio(x, y);
let pixel = self.camera.film().pixel_at_ratio(x, y);
let direction = Unit::new_normalize(pixel - self.camera.origin());
let indices = RefractionInfo::with_index(self.diffraction_index);
self.cast_ray(Ray::new(pixel, direction)).map_or_else(
|| self.background.clone(),
let (x, y) = self.scene.camera.film().pixel_ratio(x, y);
let indices = RefractionInfo::with_index(self.scene.diffraction_index);
let ray = self.scene.camera.ray_with_ratio(x, y);
self.cast_ray(ray).map_or_else(
|| self.scene.background.clone(),
|(t, obj)| {
self.color_at(
pixel + direction.as_ref() * t,
ray.origin + ray.direction.as_ref() * t,
obj,
direction,
self.reflection_limit,
ray.direction,
self.scene.reflection_limit,
indices,
)
},
@ -141,7 +86,7 @@ impl Scene {
/// Get pixel color with anti-aliasing
fn anti_alias_pixel(&self, x: f32, y: f32) -> LinearColor {
let range = 0..self.aliasing_limit;
let range = 0..self.scene.shot_rays;
let mut rng = thread_rng();
let acc: LinearColor = range
.map(|_| {
@ -151,11 +96,11 @@ impl Scene {
})
.map(LinearColor::clamp)
.sum();
acc / self.aliasing_limit as f32
acc / self.scene.shot_rays as f32
}
fn cast_ray(&self, ray: Ray) -> Option<(f32, &Object)> {
self.bvh.walk(&ray, &self.objects)
self.scene.bvh.walk(&ray, &self.scene.objects)
}
fn color_at(
@ -173,6 +118,9 @@ impl Scene {
let normal = object.shape.normal(&point);
let reflected_ray = reflected(incident_ray, normal);
// FIXME: change this to averaged sampled rays instead of visiting every light ?
// Indeed the path-tracing algorithm is good for calculating the radiance at a point
// But it should be used for reflection and refraction too...
let lighting = self.illuminate(point, object_color, &properties, normal, reflected_ray);
if properties.refl_trans.is_none() {
// Avoid calculating reflection when not needed
@ -261,7 +209,8 @@ impl Scene {
}
fn illuminate_ambient(&self, color: LinearColor) -> LinearColor {
self.lights
self.scene
.lights
.ambient_lights_iter()
.map(|light| color.clone() * light.illumination(&Point::origin()))
.map(LinearColor::clamp)
@ -275,11 +224,12 @@ impl Scene {
normal: Unit<Vector>,
reflected: Unit<Vector>,
) -> LinearColor {
self.lights
self.scene
.lights
.spatial_lights_iter()
.map(|light| {
let (direction, t) = light.to_source(&point);
let light_ray = Ray::new(point + 0.001 * direction.as_ref(), direction);
let light_ray = Ray::new(point + direction.as_ref() * 0.001, direction);
match self.cast_ray(light_ray) {
// Take shadows into account
Some((obstacle_t, _)) if obstacle_t < t => return LinearColor::black(),
@ -295,72 +245,8 @@ impl Scene {
}
}
#[derive(Debug, PartialEq, Deserialize)]
struct SerializedScene {
camera: Camera,
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
aliasing_limit: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
}
impl From<SerializedScene> for Scene {
fn from(scene: SerializedScene) -> Self {
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.aliasing_limit,
scene.reflection_limit,
scene.starting_diffraction,
)
}
}
impl<'de> Deserialize<'de> for Scene {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedScene = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
#[ignore] // stack overflow because of BVH :(
fn bvh_fails() {
use crate::core::Camera;
use crate::render::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
);
impl Renderer for Raytracer {
fn render(&self) -> RgbImage {
self.render()
}
}

View file

@ -1,5 +1,9 @@
use crate::core::LinearColor;
use crate::Vector;
use image::RgbImage;
use nalgebra::Unit;
use rand::prelude::thread_rng;
use rand::Rng;
pub fn reflected(incident: Unit<Vector>, normal: Unit<Vector>) -> Unit<Vector> {
let proj = incident.dot(&normal);
@ -65,3 +69,68 @@ impl RefractionInfo {
std::mem::swap(&mut self.old_index, &mut self.new_index)
}
}
/// Returns a random ray in the hemisphere described by a normal unit-vector, and the probability
/// to have picked that direction.
pub fn sample_hemisphere(normal: Unit<Vector>) -> (Unit<Vector>, f32) {
let mut rng = thread_rng();
let azimuth = rng.gen::<f32>() * std::f32::consts::PI * 2.;
// Cosine weighted importance sampling
let cos_elevation: f32 = rng.gen();
let sin_elevation = f32::sqrt(1. - cos_elevation * cos_elevation);
let x = sin_elevation * azimuth.cos();
let y = cos_elevation;
let z = sin_elevation * azimuth.sin();
// Calculate orthonormal base, defined by (normalb_b, normal, normal_t)
// Pay attention to degenerate cases when (y, z) is small for use with cross product
let normal_t = if normal.x.abs() > normal.y.abs() {
Vector::new(normal.z, 0., -normal.x).normalize()
} else {
Vector::new(0., -normal.z, normal.y).normalize()
};
let normal_b = normal.cross(&normal_t);
// Perform the matrix calculation by hand...
let scattered = Unit::new_normalize(Vector::new(
x * normal_b.x + y * normal.x + z * normal_t.x,
x * normal_b.y + y * normal.y + z * normal_t.y,
x * normal_b.z + y * normal.z + z * normal_t.z,
));
// The probability to have picked the ray is inversely proportional to cosine of the angle with
// the normal
(scattered, (1. / scattered.dot(&normal)).min(f32::MAX))
}
pub fn buffer_to_image(buffer: &[LinearColor], passes: u32, width: u32, height: u32) -> RgbImage {
let mut image = RgbImage::new(width, height);
for (x, y, pixel) in image.enumerate_pixels_mut() {
let i = x as usize + y as usize * width as usize;
*pixel = (buffer[i].clone() / passes as f32).into();
}
image
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sample_hemisphere_work() {
// NOTE(Bruno): should use some test-case generation for failure-reproduction purposes...
let mut rng = thread_rng();
for _ in 0..100 {
let normal = Unit::new_normalize(Vector::new(rng.gen(), rng.gen(), rng.gen()));
for _ in 0..100 {
let (sample, proportion) = sample_hemisphere(normal);
let cos_angle = normal.dot(&sample);
assert!(cos_angle >= 0.);
assert!(1. / cos_angle - proportion < std::f32::EPSILON);
}
}
}
}

View file

@ -23,7 +23,7 @@ impl LightAggregate {
/// # Examples
///
/// ```
/// # use pathtracer::render::LightAggregate;
/// # use pathtracer::scene::LightAggregate;
/// #
/// let la = LightAggregate::empty();
/// assert_eq!(la.ambient_lights_iter().count(), 0);
@ -40,7 +40,7 @@ impl LightAggregate {
/// # Examples
///
/// ```
/// # use pathtracer::render::LightAggregate;
/// # use pathtracer::scene::LightAggregate;
/// #
/// let la = LightAggregate::new(
/// Vec::new(),

View file

@ -0,0 +1,182 @@
use std::convert::TryFrom;
use std::path::PathBuf;
use nalgebra::{Similarity3, Unit, VectorSlice3};
use serde::Deserialize;
use tobj::{self, load_obj};
use super::Object;
use crate::{
core::{LightProperties, LinearColor},
material::{MaterialEnum, UniformMaterial},
shape::{InterpolatedTriangle, ShapeEnum, Triangle},
texture::{TextureEnum, UniformTexture},
Point, Vector,
};
/// Represent a mesh of objects.
#[serde(try_from = "Wavefront")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Mesh {
/// The shapes composing the mesh
pub(crate) shapes: Vec<Object>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub(crate) struct Wavefront {
pub obj_file: PathBuf,
#[serde(default = "nalgebra::zero")]
translation: Vector,
#[serde(default = "nalgebra::zero")]
rotation: Vector,
#[serde(default = "crate::serialize::coefficient::default_identity")]
scale: f32,
}
fn parse_float3(s: &str) -> Result<[f32; 3], tobj::LoadError> {
let mut res = [0.0, 0.0, 0.0];
let mut count = 0;
for (i, s) in s.split_whitespace().enumerate() {
if count == 3 {
return Err(tobj::LoadError::MaterialParseError);
}
res[i] = s.parse().map_err(|_| tobj::LoadError::MaterialParseError)?;
count += 1;
}
if count < 3 {
return Err(tobj::LoadError::MaterialParseError);
}
Ok(res)
}
impl TryFrom<Wavefront> for Mesh {
type Error = tobj::LoadError;
fn try_from(wavefront: Wavefront) -> Result<Mesh, Self::Error> {
let mut shapes = Vec::new();
let (models, materials) = load_obj(&wavefront.obj_file)?;
// The object to world transformation matrix
let transform = Similarity3::new(
wavefront.translation,
wavefront.rotation * std::f32::consts::PI / 180., // From degrees to radians
wavefront.scale,
);
for model in models {
let mesh = &model.mesh;
// mesh.indices contains all vertices. Each group of 3 vertices
// is a triangle, so we iterate over indices 3 by 3.
for i in 0..(mesh.indices.len() / 3) {
let (a, b, c) = (
mesh.indices[i * 3] as usize,
mesh.indices[i * 3 + 1] as usize,
mesh.indices[i * 3 + 2] as usize,
);
let pos_a = transform * Point::from_slice(&mesh.positions[(a * 3)..(a * 3 + 3)]);
let pos_b = transform * Point::from_slice(&mesh.positions[(b * 3)..(b * 3 + 3)]);
let pos_c = transform * Point::from_slice(&mesh.positions[(c * 3)..(c * 3 + 3)]);
let triangle: ShapeEnum = if mesh.normals.is_empty() {
Triangle::new(pos_a, pos_b, pos_c).into()
} else {
// We apply the (arguably useless) scaling to the vectors in case it is
// negative, which would invert their direction
let norm_a = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(a * 3)..(a * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_b = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(b * 3)..(b * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
let norm_c = {
let vec: Vector =
VectorSlice3::from_slice(&mesh.normals[(c * 3)..(c * 3 + 3)]).into();
Unit::new_normalize(transform * vec)
};
InterpolatedTriangle::new(pos_a, pos_b, pos_c, norm_a, norm_b, norm_c).into()
};
// FIXME: handle material
let (material, texture): (MaterialEnum, TextureEnum) =
if let Some(mat_id) = mesh.material_id {
let mesh_mat = &materials[mat_id];
let diffuse = LinearColor::from_slice(&mesh_mat.ambient[..]);
let specular = LinearColor::from_slice(&mesh_mat.ambient[..]);
let emitted = mesh_mat
.unknown_param
.get("Ke")
// we want a default if "Ke" isn't provided, but we
// want an error if it is provided but its value
// doesn't parse
.map_or(Ok(LinearColor::black()), |ke| {
parse_float3(ke).map(|vals| LinearColor::from_slice(&vals))
})?;
let material = UniformMaterial::new(LightProperties::new(
diffuse.clone(),
specular,
// FIXME: material.dissolve is supposed to be "the alpha term"
// Needs translation to our ReflTransEnum
None,
emitted,
));
// we only handle uniform textures
let texture = UniformTexture::new(diffuse);
(material.into(), texture.into())
} else {
// FIXME: should we accept this, and use a default
// Material, or throw a LoadError
(
UniformMaterial::new(LightProperties::new(
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(0.1, 0.1, 0.1),
None,
LinearColor::black(),
))
.into(),
UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
)
};
shapes.push(Object::new(triangle, material, texture));
}
}
Ok(Mesh { shapes })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_float3_works() {
assert_eq!(parse_float3("1 1 1"), Ok([1., 1., 1.]));
assert_eq!(
parse_float3("1 1"),
Err(tobj::LoadError::MaterialParseError)
);
assert_eq!(
parse_float3("1 1 1 1"),
Err(tobj::LoadError::MaterialParseError)
);
}
}

167
pathtracer/src/scene/mod.rs Normal file
View file

@ -0,0 +1,167 @@
//! Desciption of the scene.
use beevee::bvh::BVH;
use serde::Deserialize;
use crate::core::{Camera, LinearColor};
pub mod light_aggregate;
pub use light_aggregate::*;
mod mesh;
pub use mesh::*;
pub mod object;
pub use object::*;
/// Represent the scene being rendered.
#[serde(from = "SerializedScene")]
#[derive(Debug, PartialEq, Deserialize)]
pub struct Scene {
pub(crate) camera: Camera,
pub(crate) lights: LightAggregate,
pub(crate) objects: Vec<Object>,
pub(crate) bvh: BVH,
pub(crate) background: LinearColor,
pub(crate) shot_rays: u32,
pub(crate) reflection_limit: u32,
pub(crate) diffraction_index: f32,
pub(crate) steps: Vec<usize>,
}
impl Scene {
#[allow(clippy::too_many_arguments)]
/// Creates a new `Scene`.
///
/// # Examples
///
/// ```
/// # use pathtracer::core::{Camera, LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::scene::{LightAggregate, Object, Scene};
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
/// #
/// let scene = Scene::new(
/// Camera::default(),
/// LightAggregate::empty(),
/// vec![
/// Object::new(
/// Sphere::new(Point::origin(), 1.0).into(),
/// UniformMaterial::new(
/// LightProperties::new(
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
/// ),
/// ],
/// LinearColor::black(), // Background color
/// 5, // amount of rays shot per pixel
/// 3, // reflection recursion limit
/// 0.0, // diffraction index
/// Vec::new(), // steps
/// );
/// ```
pub fn new(
camera: Camera,
lights: LightAggregate,
mut objects: Vec<Object>,
background: LinearColor,
shot_rays: u32,
reflection_limit: u32,
diffraction_index: f32,
steps: Vec<usize>,
) -> Self {
let bvh = BVH::build(&mut objects);
Scene {
camera,
lights,
objects,
bvh,
background,
shot_rays,
reflection_limit,
diffraction_index,
steps,
}
}
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(deny_unknown_fields)]
struct SerializedScene {
camera: Camera,
#[serde(default)]
lights: LightAggregate,
#[serde(default)]
objects: Vec<Object>,
#[serde(default)]
meshes: Vec<Mesh>,
#[serde(default)]
background: LinearColor,
#[serde(default)]
shot_rays: u32,
#[serde(default)]
reflection_limit: u32,
#[serde(default = "crate::serialize::default_identity")]
starting_diffraction: f32,
#[serde(default)]
steps: Vec<usize>,
}
impl From<SerializedScene> for Scene {
fn from(mut scene: SerializedScene) -> Self {
let mut flattened_meshes: Vec<Object> = scene
.meshes
.into_iter()
.map(|m| m.shapes)
.flatten()
.collect();
scene.objects.append(&mut flattened_meshes);
Scene::new(
scene.camera,
scene.lights,
scene.objects,
scene.background,
scene.shot_rays,
scene.reflection_limit,
scene.starting_diffraction,
scene.steps,
)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn deserialization_works() {
let yaml = std::include_str!("../../examples/scene.yaml");
let _: Scene = serde_yaml::from_str(yaml).unwrap();
// FIXME: actually test the equality ?
}
#[test]
fn empty_scene() {
use crate::core::Camera;
use crate::scene::{LightAggregate, Scene};
let _scene = Scene::new(
Camera::default(),
LightAggregate::empty(),
Vec::new(), // Objects list
LinearColor::black(), // Background color
5, // aliasing limit
3, // reflection recursion limit
0.0, // diffraction index
Vec::new(), // steps
);
}
}

View file

@ -1,7 +1,7 @@
//! Logic for the scene objects
use crate::material::MaterialEnum;
use crate::shape::{Shape, ShapeEnum};
use crate::shape::ShapeEnum;
use crate::texture::TextureEnum;
use crate::Point;
use beevee::{
@ -12,7 +12,7 @@ use beevee::{
use serde::Deserialize;
/// An object being rendered in the scene.
#[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct Object {
/// The `Object`'s physical shape
pub shape: ShapeEnum,
@ -30,7 +30,7 @@ impl Object {
/// ```
/// # use pathtracer::core::{LightProperties, LinearColor};
/// # use pathtracer::material::UniformMaterial;
/// # use pathtracer::render::Object;
/// # use pathtracer::scene::Object;
/// # use pathtracer::shape::Sphere;
/// # use pathtracer::texture::UniformTexture;
/// # use pathtracer::Point;
@ -42,6 +42,7 @@ impl Object {
/// LinearColor::new(1.0, 0.0, 0.0), // diffuse component
/// LinearColor::new(0.0, 0.0, 0.0), // specular component
/// None,
/// LinearColor::black(), // Emitted light
/// ),
/// ).into(),
/// UniformTexture::new(LinearColor::new(0.5, 0.5, 0.5)).into(),
@ -87,6 +88,7 @@ mod test {
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
));
let texture = UniformTexture::new(LinearColor::new(0.25, 0.5, 1.));
Object::new(shape.into(), material.into(), texture.into())
@ -99,6 +101,7 @@ mod test {
LinearColor::new(0.5, 0.5, 0.5),
LinearColor::new(1., 1., 1.),
None,
LinearColor::black(),
));
let texture = UniformTexture::new(LinearColor::new(0.25, 0.5, 1.));
assert_eq!(

View file

@ -0,0 +1,158 @@
use super::triangle::Triangle;
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
/// Represent a triangle with interpolated normals inside the scene.
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct InterpolatedTriangle {
#[serde(flatten)]
tri: Triangle,
// FIXME: serialize with unit
normals: [Unit<Vector>; 3],
}
impl InterpolatedTriangle {
/// Creates a new `InterpolatedTriangle` from 3 [`Point`]s and 3 [`Vector`]s.
///
/// [`Point`]: ../../type.Point.html
/// [`Point`]: ../../type.Vector.html
///
/// # Examples
///
/// ```
/// # use pathtracer::shape::InterpolatedTriangle;
/// # use pathtracer::{Point, Vector};
/// #
/// let t = InterpolatedTriangle::new(
/// Point::new(1.0, 0.0, 0.0),
/// Point::new(0.0, 1.0, 0.0),
/// Point::new(0.0, 0.0, 1.0),
/// Vector::x_axis(),
/// Vector::y_axis(),
/// Vector::z_axis(),
/// );
/// ```
pub fn new(
c0: Point,
c1: Point,
c2: Point,
n0: Unit<Vector>,
n1: Unit<Vector>,
n2: Unit<Vector>,
) -> Self {
InterpolatedTriangle {
tri: Triangle::new(c0, c1, c2),
normals: [n0, n1, n2],
}
}
}
impl Shape for InterpolatedTriangle {
fn normal(&self, point: &Point) -> Unit<Vector> {
let (u, v) = {
let c = self.tri.barycentric(point);
(c.x, c.y)
};
let interpol = self.normals[0].as_ref() * (1. - u - v)
+ self.normals[1].as_ref() * u
+ self.normals[2].as_ref() * v;
Unit::new_normalize(interpol)
}
fn project_texel(&self, point: &Point) -> Point2D {
self.tri.project_texel(point)
}
}
impl Bounded for InterpolatedTriangle {
fn aabb(&self) -> AABB {
self.tri.aabb()
}
fn centroid(&self) -> Point {
self.tri.centroid()
}
}
impl Intersected for InterpolatedTriangle {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.tri.intersect(ray)
}
}
#[cfg(test)]
mod test {
use super::*;
fn simple_triangle() -> InterpolatedTriangle {
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
}
#[test]
fn normal_interpolation_at_c0_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::origin());
assert_eq!(normal, Vector::x_axis());
}
#[test]
fn normal_interpolation_at_c1_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 1.));
assert_eq!(normal, Vector::y_axis());
}
#[test]
fn normal_interpolation_at_c2_works() {
let triangle = simple_triangle();
let normal = triangle.normal(&Point::new(0., 1., 0.));
assert_eq!(normal, Vector::z_axis());
}
#[test]
fn normal_interpolation_at_center_works() {
let triangle = simple_triangle();
let center = Point::new(0., 2. / 3., 1. / 3.);
let normal = triangle.normal(&center);
let expected = Unit::new_normalize(Vector::new(1., 1., 1.));
assert!((normal.as_ref() - expected.as_ref()).magnitude() < 1e-5)
}
#[test]
fn deserialization_works() {
let yaml = r#"
corners:
- [0.0, 0.0, 0.0]
- [0.0, 1.0, 1.0]
- [0.0, 1.0, 0.0]
normals:
- [1.0, 0.0, 0.0]
- [0.0, 1.0, 0.0]
- [0.0, 0.0, 1.0]
"#;
let triangle: InterpolatedTriangle = serde_yaml::from_str(yaml).unwrap();
assert_eq!(
triangle,
InterpolatedTriangle::new(
Point::origin(),
Point::new(0., 1., 1.),
Point::new(0., 1., 0.),
Vector::x_axis(),
Vector::y_axis(),
Vector::z_axis(),
)
)
}
}

View file

@ -0,0 +1,9 @@
use super::{InterpolatedTriangle, Shape, Triangle};
use crate::material::{Material, TriangleMaterial, UniformMaterial};
use crate::texture::{Texture, TriangleTexture, UniformTexture};
use crate::Point;
use beevee::{
aabb::{Bounded, AABB},
bvh::Intersected,
ray::Ray,
};

View file

@ -14,42 +14,53 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum ShapeEnum {
Sphere,
Triangle,
InterpolatedTriangle,
}
// FIXME: this has to be written by hand due to a limitation of `enum_dispatch` on super traits
impl Bounded for ShapeEnum {
fn aabb(&self) -> AABB {
match self {
ShapeEnum::Sphere(s) => s.aabb(),
ShapeEnum::Triangle(s) => s.aabb(),
ShapeEnum::InterpolatedTriangle(s) => s.aabb(),
}
}
fn centroid(&self) -> Point {
match self {
ShapeEnum::Sphere(s) => s.centroid(),
ShapeEnum::Triangle(s) => s.centroid(),
ShapeEnum::InterpolatedTriangle(s) => s.centroid(),
}
}
}
impl Intersected for ShapeEnum {
fn intersect(&self, ray: &Ray) -> Option<f32> {
match self {
ShapeEnum::Sphere(s) => s.intersect(ray),
ShapeEnum::Triangle(s) => s.intersect(ray),
ShapeEnum::InterpolatedTriangle(s) => s.intersect(ray),
}
}
}
/// Represent an abstract shape inside the scene.
#[enum_dispatch::enum_dispatch(ShapeEnum)]
pub trait Shape: std::fmt::Debug {
/// Return the distance at which the object intersects with the ray, or None if it does not.
fn intersect(&self, ray: &Ray) -> Option<f32>;
pub trait Shape: std::fmt::Debug + Intersected {
/// Return the unit vector corresponding to the normal at this point of the shape.
fn normal(&self, point: &Point) -> Unit<Vector>;
/// Project the point from the shape's surface to its texel coordinates.
fn project_texel(&self, point: &Point) -> Point2D;
/// Enclose the `Shape` in an axi-aligned bounding-box.
fn aabb(&self) -> AABB;
/// Return the centroid of the shape.
fn centroid(&self) -> Point;
}
impl Bounded for dyn Shape {
fn aabb(&self) -> AABB {
self.aabb()
}
fn centroid(&self) -> Point {
self.centroid()
}
}
impl Intersected for dyn Shape {
fn intersect(&self, ray: &Ray) -> Option<f32> {
self.intersect(ray)
}
}
mod interpolated_triangle;
pub use interpolated_triangle::*;
mod sphere;
pub use sphere::*;

View file

@ -1,6 +1,7 @@
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::AABB;
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::Deserialize;
@ -38,6 +39,38 @@ impl Sphere {
}
impl Shape for Sphere {
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
}
impl Bounded for Sphere {
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
}
impl Intersected for Sphere {
fn intersect(&self, ray: &Ray) -> Option<f32> {
use std::mem;
@ -67,34 +100,6 @@ impl Shape for Sphere {
Some(t_0)
}
}
fn normal(&self, point: &Point) -> Unit<Vector> {
let delt = if self.inverted {
self.center - point
} else {
point - self.center
};
Unit::new_normalize(delt)
}
fn project_texel(&self, point: &Point) -> Point2D {
// Project the sphere on the XY-plane
Point2D::new(
0.5 + (point.x - self.center.x) / (2. * self.radius),
0.5 + (point.y - self.center.y) / (2. * self.radius),
)
}
fn aabb(&self) -> AABB {
let delt = Vector::new(self.radius, self.radius, self.radius);
let min = self.center - delt;
let max = self.center + delt;
AABB::with_bounds(min, max)
}
fn centroid(&self) -> Point {
self.center
}
}
#[cfg(test)]

View file

@ -1,12 +1,14 @@
use super::Shape;
use crate::{Point, Point2D, Vector};
use beevee::aabb::AABB;
use beevee::aabb::{Bounded, AABB};
use beevee::bvh::Intersected;
use beevee::ray::Ray;
use nalgebra::Unit;
use serde::{Deserialize, Deserializer};
use serde::Deserialize;
/// Represent a triangle inside the scene.
#[derive(Clone, Debug, PartialEq)]
#[serde(from = "SerializedTriangle")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
pub struct Triangle {
c0: Point,
c0c1: Vector,
@ -38,7 +40,7 @@ impl Triangle {
}
}
fn barycentric(&self, point: &Point) -> Point2D {
pub(crate) fn barycentric(&self, point: &Point) -> Point2D {
let c0_pos = point - self.c0;
// P - A = u * (B - A) + v * (C - A)
// (C - A) = v0 is c0c2
@ -58,6 +60,29 @@ impl Triangle {
}
impl Shape for Triangle {
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
}
impl Bounded for Triangle {
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
}
impl Intersected for Triangle {
fn intersect(&self, ray: &Ray) -> Option<f32> {
let pvec = ray.direction.cross(&self.c0c2);
let det = self.c0c1.dot(&pvec);
@ -88,25 +113,6 @@ impl Shape for Triangle {
Some(t)
}
}
fn normal(&self, _: &Point) -> Unit<Vector> {
Unit::new_normalize(self.c0c1.cross(&self.c0c2))
}
fn project_texel(&self, point: &Point) -> Point2D {
self.barycentric(point)
}
fn aabb(&self) -> AABB {
AABB::empty()
.grow(&self.c0)
.grow(&(self.c0 + self.c0c1))
.grow(&(self.c0 + self.c0c2))
}
fn centroid(&self) -> Point {
self.c0 + (self.c0c1 + self.c0c2) / 2.
}
}
#[derive(Debug, Deserialize)]
@ -124,16 +130,6 @@ impl From<SerializedTriangle> for Triangle {
}
}
impl<'de> Deserialize<'de> for Triangle {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let cam: SerializedTriangle = Deserialize::deserialize(deserializer)?;
Ok(cam.into())
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -9,10 +9,11 @@ use serde::Deserialize;
#[serde(rename_all = "lowercase")]
#[allow(missing_docs)]
#[enum_dispatch::enum_dispatch]
#[derive(Debug, PartialEq, Deserialize)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub enum TextureEnum {
#[serde(rename = "uniform")]
UniformTexture,
TriangleTexture,
}
/// Represent an object's texture.
@ -22,5 +23,8 @@ pub trait Texture: std::fmt::Debug {
fn texel_color(&self, point: Point2D) -> LinearColor;
}
mod triangle;
pub use triangle::*;
mod uniform;
pub use uniform::*;

View file

@ -0,0 +1,23 @@
use super::{uniform::UniformTexture, Texture};
use crate::core::LinearColor;
use crate::Point2D;
use serde::Deserialize;
/// Represent a texture which interpolates between three points.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct TriangleTexture {
/// The texture at each point
textures: [UniformTexture; 3],
}
impl Texture for TriangleTexture {
fn texel_color(&self, point: Point2D) -> LinearColor {
let (u, v) = (point.x, point.y);
let sum = self.textures[0].texel_color(point) * (1. - u - v)
+ self.textures[1].texel_color(point) * u
+ self.textures[2].texel_color(point) * v;
sum / 3.
}
}
// FIXME: tests