COSC-3P93-Project/Step 3/src/engine/raytracing.cpp

247 lines
11 KiB
C++
Raw Normal View History

/*
* Created by Brett Terpstra 6920201 on 16/10/22.
* Copyright (c) 2022 Brett Terpstra. All Rights Reserved.
*/
#include "engine/raytracing.h"
#include <queue>
#include <functional>
2022-10-24 17:06:20 -04:00
#include <utility>
#include <engine/util/debug.h>
2022-10-28 01:44:23 -04:00
extern bool* haltExecution;
extern bool* pauseRaytracing;
extern bool* haltRaytracing;
2022-10-28 01:44:23 -04:00
namespace Raytracing {
2022-10-24 17:06:20 -04:00
Ray Camera::projectRay(PRECISION_TYPE x, PRECISION_TYPE y) {
// transform the x and y to points from image coords to be inside the camera's viewport.
double transformedX = (x / (image.getWidth() - 1));
auto transformedY = (y / (image.getHeight() - 1));
// then generate a ray which extends out from the camera position in the direction with respects to its position on the image
return {position, imageOrigin + transformedX * horizontalAxis + transformedY * verticalAxis - position};
}
2022-10-24 17:06:20 -04:00
void Camera::lookAt(const Vec4& lookAtPos) {
// standard camera lookAt function
auto w = (position - lookAtPos).normalize();
auto u = (Vec4::cross(up, w)).normalize();
auto v = Vec4::cross(w, u);
2022-10-24 17:06:20 -04:00
horizontalAxis = viewportWidth * u;
verticalAxis = viewportHeight * v;
2022-10-24 17:06:20 -04:00
imageOrigin = position - horizontalAxis / 2 - verticalAxis / 2 - w;
}
2022-10-24 17:06:20 -04:00
void Camera::setRotation(const PRECISION_TYPE yaw, const PRECISION_TYPE pitch) {
// TODO:
}
Mat4x4 Camera::view(PRECISION_TYPE yaw, PRECISION_TYPE pitch) {
Mat4x4 view;
pitch = degreeeToRadian(pitch);
yaw = degreeeToRadian(yaw);
PRECISION_TYPE cosPitch = std::cos(pitch);
PRECISION_TYPE cosYaw = std::cos(yaw);
PRECISION_TYPE sinPitch = std::sin(pitch);
PRECISION_TYPE sinYaw = std::sin(yaw);
auto x = Vec4{cosYaw, 0, -sinYaw}; // forward
auto y = Vec4{sinYaw * sinPitch, cosPitch, cosYaw * sinPitch}; // right
auto z = Vec4{sinYaw * cosPitch, -sinPitch, cosPitch * cosYaw}; // up
2022-11-15 16:23:37 -05:00
// we can actually take those x, y, z vectors and use them to compute the raytracer camera settings
viewportHeight = 2 * tanFovHalf;
viewportWidth = aspectRatio * viewportHeight;
2022-11-15 16:23:37 -05:00
// exactly the same as the look at function.
horizontalAxis = viewportWidth * x;
verticalAxis = viewportHeight * y;
imageOrigin = position - horizontalAxis / 2 - verticalAxis / 2 - z;
view.m00(float(x.x()));
view.m01(float(x.y()));
view.m02(float(x.z()));
view.m03(float(x.w()));
view.m10(float(y.x()));
view.m11(float(y.y()));
view.m12(float(y.z()));
view.m13(float(y.w()));
view.m20(float(z.x()));
view.m21(float(z.y()));
view.m22(float(z.z()));
view.m23(float(z.w()));
// view matrix are inverted, dot product to simulate translate matrix multiplication
view.m03(-float(Vec4::dot(x, position)));
view.m13(-float(Vec4::dot(y, position)));
view.m23(-float(Vec4::dot(z, position)));
view.m33(1);
return view;
}
2022-10-25 01:06:26 -04:00
struct RayData {
Ray ray;
int depth;
Vec4 color;
};
2022-10-28 01:44:23 -04:00
Vec4 Raycaster::raycast(const Ray& ray) {
Ray localRay = ray;
2022-10-25 01:06:26 -04:00
Vec4 color {1.0, 1.0, 1.0};
2022-10-28 01:44:23 -04:00
for (int CURRENT_BOUNCE = 0; CURRENT_BOUNCE < maxBounceDepth; CURRENT_BOUNCE++){
if (*haltExecution || *haltRaytracing)
2022-10-28 01:44:23 -04:00
return color;
while (*pauseRaytracing) // sleep for 1/60th of a second, or about 1 frame.
std::this_thread::sleep_for(std::chrono::milliseconds(16));
2022-10-28 01:44:23 -04:00
auto hit = world.checkIfHit(localRay, 0.001, infinity);
2022-10-25 01:06:26 -04:00
if (hit.first.hit) {
auto object = hit.second;
2022-10-28 01:44:23 -04:00
auto scatterResults = object->getMaterial()->scatter(localRay, hit.first);
2022-10-25 01:06:26 -04:00
// if the material scatters the ray, ie casts a new one,
if (scatterResults.scattered) { // attenuate the recursive raycast by the material's color
2022-10-28 01:44:23 -04:00
color = color * scatterResults.attenuationColor;
localRay = scatterResults.newRay;
} else {
// if we don't scatter, we don't need to keep looping
color = {0.0, 0.0, 0.0};
break;
2022-10-25 01:06:26 -04:00
}
} else {
2022-10-28 01:44:23 -04:00
// since we didn't hit, we hit the sky.
2022-10-25 01:06:26 -04:00
color = color * Vec4{0.5, 0.7, 1.0};
2022-10-28 01:44:23 -04:00
// if we don't hit we cannot keep looping.
2022-10-25 01:06:26 -04:00
break;
}
2022-10-28 01:44:23 -04:00
}
2022-10-25 01:06:26 -04:00
return color;
2022-10-28 01:44:23 -04:00
// old recursive version:
2022-10-25 01:06:26 -04:00
/*if (depth > maxBounceDepth)
return {0, 0, 0};
auto hit = world.checkIfHit(ray, 0.001, infinity);
if (hit.first.hit) {
auto object = hit.second;
auto scatterResults = object->getMaterial()->scatter(ray, hit.first);
// if the material scatters the ray, ie casts a new one,
if (scatterResults.scattered) // attenuate the recursive raycast by the material's color
return scatterResults.attenuationColor * raycast(scatterResults.newRay, depth + 1);
//tlog << "Not scattered? " << object->getMaterial() << "\n";
return {0, 0, 0};
}
// skybox color
return {0.5, 0.7, 1.0};*/
}
void Raycaster::runSingle() {
executors.push_back(std::make_unique<std::thread>([this]() -> void {
2022-10-25 01:06:26 -04:00
profiler::start("Raytracer Results", "Single Thread");
2022-10-24 17:06:20 -04:00
for (int i = 0; i < image.getWidth(); i++) {
for (int j = 0; j < image.getHeight(); j++) {
Raytracing::Vec4 color;
// TODO: profile for speed;
2022-10-24 17:06:20 -04:00
for (int s = 0; s < raysPerPixel; s++) {
// simulate anti aliasing by generating rays with very slight random directions
2022-10-28 01:44:23 -04:00
color = color + raycast(camera.projectRay(i + rnd.getDouble(), j + rnd.getDouble()));
}
PRECISION_TYPE sf = 1.0 / raysPerPixel;
// apply pixel color with gamma correction
image.setPixelColor(i, j, {std::sqrt(sf * color.r()), std::sqrt(sf * color.g()), std::sqrt(sf * color.b())});
if (*haltExecution || *haltRaytracing)
2022-10-28 01:44:23 -04:00
return;
while (*pauseRaytracing) // sleep for 1/60th of a second, or about 1 frame.
std::this_thread::sleep_for(std::chrono::milliseconds(16));
}
}
2022-10-25 01:06:26 -04:00
profiler::end("Raytracer Results", "Single Thread");
finishedThreads++;
}));
}
2022-10-24 17:06:20 -04:00
void Raycaster::runMulti(unsigned int t) {
// calculate the max divisions we can have per side
// say we have 16 threads, making divs 4
// 4 divs per axis, two axis, 16 total quadrants
// matching the 16 threads.
if (t == 0)
t = system_threads;
ilog << "Starting multithreaded raytracer with " << t << " threads!\n";
2022-10-25 01:06:26 -04:00
int divs = int(std::log(t) / std::log(2));
// now double the divs, splitting each quadrant into 4 sub-quadrants which we can queue
// the reason to do this is that some of them will finish before others, and the now free threads can keep working
// do it without a queue like this leads to a single thread critical path and isn't optimally efficient.
2022-10-25 01:06:26 -04:00
divs *= 4; // 2 because two axis getting split makes 4 sub-quadrants, but I tested 4, and it was faster by two seconds, so I'm keeping 4.
delete(unprocessedQuads);
unprocessedQuads = new std::queue<std::vector<int>>();
2022-10-24 17:06:20 -04:00
for (int dx = 0; dx < divs; dx++) {
for (int dy = 0; dy < divs; dy++) {
2022-10-25 01:06:26 -04:00
// sending functions wasn't working. (fixed, however it feels janky sending lambda functions w/ captures)
2022-10-24 17:06:20 -04:00
unprocessedQuads->push({
2022-10-25 01:06:26 -04:00
image.getWidth() / divs,
image.getHeight() / divs,
(image.getWidth() / divs) * dx,
(image.getHeight() / divs) * dy
2022-10-24 17:06:20 -04:00
});
}
}
for (int i = 0; i < t; i++) {
executors.push_back(std::make_unique<std::thread>([this, i, divs, t]() -> void {
2022-10-24 17:06:20 -04:00
// run through all the quadrants
2022-10-25 01:06:26 -04:00
std::stringstream str;
str << "Threading of #";
str << (i+1);
profiler::start("Raytracer Results", str.str());
2022-10-24 17:06:20 -04:00
int j = 0;
while (true) {
std::vector<int> func;
// get the function for the quadrant
queueSync.lock();
if (unprocessedQuads->empty()) {
queueSync.unlock();
break;
}
func = unprocessedQuads->front();
unprocessedQuads->pop();
queueSync.unlock();
2022-10-25 01:06:26 -04:00
// the run it
2022-10-24 17:06:20 -04:00
for (int kx = 0; kx <= func[0]; kx++) {
for (int ky = 0; ky < func[1]; ky++) {
try {
2022-10-24 17:06:20 -04:00
int x = func[2] + kx;
int y = func[3] + ky;
Raytracing::Vec4 color;
// TODO: profile for speed;
for (int s = 0; s < raysPerPixel; s++) {
// simulate anti aliasing by generating rays with very slight random directions
2022-10-28 01:44:23 -04:00
color = color + raycast(camera.projectRay(x + rnd.getDouble(), y + rnd.getDouble()));
}
PRECISION_TYPE sf = 1.0 / raysPerPixel;
// apply pixel color with gamma correction
image.setPixelColor(x, y, {std::sqrt(sf * color.r()), std::sqrt(sf * color.g()), std::sqrt(sf * color.b())});
if (*haltExecution || *haltRaytracing)
2022-10-28 01:44:23 -04:00
return;
while (*pauseRaytracing) // sleep for 1/60th of a second, or about 1 frame.
std::this_thread::sleep_for(std::chrono::milliseconds(16));
2022-10-24 17:06:20 -04:00
} catch (std::exception& error) {
flog << "Possibly fatal error in the multithreaded raytracer!\n";
flog << error.what() << "\n";
}
}
}
j++;
}
finishedThreads++;
2022-10-25 01:06:26 -04:00
profiler::end("Raytracer Results", str.str());
}));
}
}
}