2022-10-23 23:46:12 -04:00
|
|
|
/*
|
|
|
|
* Created by Brett Terpstra 6920201 on 16/10/22.
|
|
|
|
* Copyright (c) 2022 Brett Terpstra. All Rights Reserved.
|
|
|
|
*/
|
|
|
|
#include "engine/raytracing.h"
|
|
|
|
#include <queue>
|
|
|
|
#include <functional>
|
2022-10-24 17:06:20 -04:00
|
|
|
#include <utility>
|
2022-10-23 23:46:12 -04:00
|
|
|
#include <engine/util/debug.h>
|
|
|
|
|
|
|
|
namespace Raytracing {
|
2022-10-24 17:06:20 -04:00
|
|
|
|
2022-11-16 17:49:02 -05:00
|
|
|
extern Signals* RTSignal;
|
|
|
|
|
2022-10-23 23:46:12 -04:00
|
|
|
Ray Camera::projectRay(PRECISION_TYPE x, PRECISION_TYPE y) {
|
|
|
|
// transform the x and y to points from image coords to be inside the camera's viewport.
|
|
|
|
double transformedX = (x / (image.getWidth() - 1));
|
|
|
|
auto transformedY = (y / (image.getHeight() - 1));
|
|
|
|
// then generate a ray which extends out from the camera position in the direction with respects to its position on the image
|
|
|
|
return {position, imageOrigin + transformedX * horizontalAxis + transformedY * verticalAxis - position};
|
|
|
|
}
|
2022-10-24 17:06:20 -04:00
|
|
|
|
2022-11-07 00:29:12 -05:00
|
|
|
void Camera::lookAt(const Vec4& lookAtPos) {
|
2022-10-23 23:46:12 -04:00
|
|
|
// standard camera lookAt function
|
2022-11-07 00:29:12 -05:00
|
|
|
auto w = (position - lookAtPos).normalize();
|
2022-10-23 23:46:12 -04:00
|
|
|
auto u = (Vec4::cross(up, w)).normalize();
|
|
|
|
auto v = Vec4::cross(w, u);
|
2022-10-24 17:06:20 -04:00
|
|
|
|
2022-10-23 23:46:12 -04:00
|
|
|
horizontalAxis = viewportWidth * u;
|
|
|
|
verticalAxis = viewportHeight * v;
|
2022-10-24 17:06:20 -04:00
|
|
|
imageOrigin = position - horizontalAxis / 2 - verticalAxis / 2 - w;
|
2022-10-23 23:46:12 -04:00
|
|
|
}
|
2022-10-24 17:06:20 -04:00
|
|
|
|
2022-11-14 20:44:49 -05:00
|
|
|
void Camera::setRotation(const PRECISION_TYPE yaw, const PRECISION_TYPE pitch) {
|
2022-10-23 23:46:12 -04:00
|
|
|
// TODO:
|
|
|
|
}
|
2022-11-14 20:44:49 -05:00
|
|
|
Mat4x4 Camera::view(PRECISION_TYPE yaw, PRECISION_TYPE pitch) {
|
|
|
|
Mat4x4 view;
|
|
|
|
|
|
|
|
pitch = degreeeToRadian(pitch);
|
|
|
|
yaw = degreeeToRadian(yaw);
|
|
|
|
|
|
|
|
PRECISION_TYPE cosPitch = std::cos(pitch);
|
|
|
|
PRECISION_TYPE cosYaw = std::cos(yaw);
|
|
|
|
PRECISION_TYPE sinPitch = std::sin(pitch);
|
|
|
|
PRECISION_TYPE sinYaw = std::sin(yaw);
|
|
|
|
|
|
|
|
auto x = Vec4{cosYaw, 0, -sinYaw}; // forward
|
|
|
|
auto y = Vec4{sinYaw * sinPitch, cosPitch, cosYaw * sinPitch}; // right
|
|
|
|
auto z = Vec4{sinYaw * cosPitch, -sinPitch, cosPitch * cosYaw}; // up
|
|
|
|
|
2022-11-15 16:23:37 -05:00
|
|
|
// we can actually take those x, y, z vectors and use them to compute the raytracer camera settings
|
|
|
|
viewportHeight = 2 * tanFovHalf;
|
2022-11-14 20:44:49 -05:00
|
|
|
viewportWidth = aspectRatio * viewportHeight;
|
2022-11-15 16:23:37 -05:00
|
|
|
// exactly the same as the look at function.
|
2022-11-14 20:44:49 -05:00
|
|
|
horizontalAxis = viewportWidth * x;
|
|
|
|
verticalAxis = viewportHeight * y;
|
|
|
|
imageOrigin = position - horizontalAxis / 2 - verticalAxis / 2 - z;
|
|
|
|
|
|
|
|
view.m00(float(x.x()));
|
|
|
|
view.m01(float(x.y()));
|
|
|
|
view.m02(float(x.z()));
|
|
|
|
view.m03(float(x.w()));
|
|
|
|
|
|
|
|
view.m10(float(y.x()));
|
|
|
|
view.m11(float(y.y()));
|
|
|
|
view.m12(float(y.z()));
|
|
|
|
view.m13(float(y.w()));
|
|
|
|
|
|
|
|
view.m20(float(z.x()));
|
|
|
|
view.m21(float(z.y()));
|
|
|
|
view.m22(float(z.z()));
|
|
|
|
view.m23(float(z.w()));
|
|
|
|
|
|
|
|
// view matrix are inverted, dot product to simulate translate matrix multiplication
|
|
|
|
view.m03(-float(Vec4::dot(x, position)));
|
|
|
|
view.m13(-float(Vec4::dot(y, position)));
|
|
|
|
view.m23(-float(Vec4::dot(z, position)));
|
|
|
|
view.m33(1);
|
|
|
|
|
|
|
|
return view;
|
|
|
|
}
|
2022-10-25 01:06:26 -04:00
|
|
|
|
|
|
|
struct RayData {
|
|
|
|
Ray ray;
|
|
|
|
int depth;
|
|
|
|
Vec4 color;
|
|
|
|
};
|
|
|
|
|
2022-10-28 01:44:23 -04:00
|
|
|
Vec4 Raycaster::raycast(const Ray& ray) {
|
|
|
|
Ray localRay = ray;
|
2022-10-25 01:06:26 -04:00
|
|
|
Vec4 color {1.0, 1.0, 1.0};
|
2022-10-28 01:44:23 -04:00
|
|
|
for (int CURRENT_BOUNCE = 0; CURRENT_BOUNCE < maxBounceDepth; CURRENT_BOUNCE++){
|
2022-11-16 17:49:02 -05:00
|
|
|
if (RTSignal->haltExecution || RTSignal->haltRaytracing)
|
2022-10-28 01:44:23 -04:00
|
|
|
return color;
|
2022-11-16 17:49:02 -05:00
|
|
|
while (RTSignal->pauseRaytracing) // sleep for 1/60th of a second, or about 1 frame.
|
2022-10-28 15:05:08 -04:00
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(16));
|
2022-10-28 01:44:23 -04:00
|
|
|
auto hit = world.checkIfHit(localRay, 0.001, infinity);
|
2022-10-25 01:06:26 -04:00
|
|
|
if (hit.first.hit) {
|
|
|
|
auto object = hit.second;
|
2022-10-28 01:44:23 -04:00
|
|
|
auto scatterResults = object->getMaterial()->scatter(localRay, hit.first);
|
2022-10-25 01:06:26 -04:00
|
|
|
// if the material scatters the ray, ie casts a new one,
|
|
|
|
if (scatterResults.scattered) { // attenuate the recursive raycast by the material's color
|
2022-10-28 01:44:23 -04:00
|
|
|
color = color * scatterResults.attenuationColor;
|
|
|
|
localRay = scatterResults.newRay;
|
|
|
|
} else {
|
|
|
|
// if we don't scatter, we don't need to keep looping
|
|
|
|
color = {0.0, 0.0, 0.0};
|
|
|
|
break;
|
2022-10-25 01:06:26 -04:00
|
|
|
}
|
|
|
|
} else {
|
2022-10-28 01:44:23 -04:00
|
|
|
// since we didn't hit, we hit the sky.
|
2022-10-25 01:06:26 -04:00
|
|
|
color = color * Vec4{0.5, 0.7, 1.0};
|
2022-10-28 01:44:23 -04:00
|
|
|
// if we don't hit we cannot keep looping.
|
2022-10-25 01:06:26 -04:00
|
|
|
break;
|
|
|
|
}
|
2022-10-28 01:44:23 -04:00
|
|
|
}
|
|
|
|
|
2022-10-25 01:06:26 -04:00
|
|
|
return color;
|
2022-10-23 23:46:12 -04:00
|
|
|
}
|
2022-11-17 10:52:00 -05:00
|
|
|
|
|
|
|
void Raycaster::runSTDThread(int threads){
|
|
|
|
for (int i = 0; i < threads; i++) {
|
|
|
|
executors.push_back(std::make_unique<std::thread>([this, i, threads]() -> void {
|
2022-10-24 17:06:20 -04:00
|
|
|
// run through all the quadrants
|
2022-10-25 01:06:26 -04:00
|
|
|
std::stringstream str;
|
|
|
|
str << "Threading of #";
|
|
|
|
str << (i+1);
|
|
|
|
profiler::start("Raytracer Results", str.str());
|
2022-10-24 17:06:20 -04:00
|
|
|
int j = 0;
|
|
|
|
while (true) {
|
2022-11-17 10:52:00 -05:00
|
|
|
RaycasterImageBounds imageBoundingData;
|
2022-10-24 17:06:20 -04:00
|
|
|
// get the function for the quadrant
|
|
|
|
queueSync.lock();
|
|
|
|
if (unprocessedQuads->empty()) {
|
|
|
|
queueSync.unlock();
|
|
|
|
break;
|
|
|
|
}
|
2022-11-17 10:52:00 -05:00
|
|
|
imageBoundingData = unprocessedQuads->front();
|
2022-10-24 17:06:20 -04:00
|
|
|
unprocessedQuads->pop();
|
|
|
|
queueSync.unlock();
|
2022-10-25 01:06:26 -04:00
|
|
|
// the run it
|
2022-11-17 10:52:00 -05:00
|
|
|
for (int kx = 0; kx <= imageBoundingData.width; kx++) {
|
|
|
|
for (int ky = 0; ky < imageBoundingData.height; ky++) {
|
2022-10-23 23:46:12 -04:00
|
|
|
try {
|
2022-11-17 10:52:00 -05:00
|
|
|
int x = imageBoundingData.x + kx;
|
|
|
|
int y = imageBoundingData.y + ky;
|
2022-10-23 23:46:12 -04:00
|
|
|
Raytracing::Vec4 color;
|
|
|
|
// TODO: profile for speed;
|
|
|
|
for (int s = 0; s < raysPerPixel; s++) {
|
|
|
|
// simulate anti aliasing by generating rays with very slight random directions
|
2022-10-28 01:44:23 -04:00
|
|
|
color = color + raycast(camera.projectRay(x + rnd.getDouble(), y + rnd.getDouble()));
|
2022-10-23 23:46:12 -04:00
|
|
|
}
|
|
|
|
PRECISION_TYPE sf = 1.0 / raysPerPixel;
|
|
|
|
// apply pixel color with gamma correction
|
|
|
|
image.setPixelColor(x, y, {std::sqrt(sf * color.r()), std::sqrt(sf * color.g()), std::sqrt(sf * color.b())});
|
2022-11-16 17:49:02 -05:00
|
|
|
if (RTSignal->haltExecution || RTSignal->haltRaytracing)
|
2022-10-28 01:44:23 -04:00
|
|
|
return;
|
2022-11-16 17:49:02 -05:00
|
|
|
while (RTSignal->pauseRaytracing) // sleep for 1/60th of a second, or about 1 frame.
|
2022-10-28 15:05:08 -04:00
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(16));
|
2022-10-24 17:06:20 -04:00
|
|
|
} catch (std::exception& error) {
|
2022-10-23 23:46:12 -04:00
|
|
|
flog << "Possibly fatal error in the multithreaded raytracer!\n";
|
|
|
|
flog << error.what() << "\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
j++;
|
|
|
|
}
|
|
|
|
finishedThreads++;
|
2022-10-25 01:06:26 -04:00
|
|
|
profiler::end("Raytracer Results", str.str());
|
2022-10-23 23:46:12 -04:00
|
|
|
}));
|
|
|
|
}
|
|
|
|
}
|
2022-11-17 10:52:00 -05:00
|
|
|
|
|
|
|
void Raycaster::run(bool multithreaded, int threads) {
|
|
|
|
if (threads == 0)
|
|
|
|
threads = system_threads;
|
|
|
|
// calculate the max divisions we can have per side, then expand by a factor of 4.
|
|
|
|
// the reason to do this is that some of them will finish far quciker than others. The now free threads can keep working.
|
|
|
|
// to do it without a queue like this leads to most threads finishing and a single thread being the critical path which isn't optimally efficient.
|
|
|
|
int divs = int(std::log(threads) / std::log(2)) * 4;
|
|
|
|
|
|
|
|
// if we are running single threaded, disable everything special
|
|
|
|
// the reason we run single threaded in a seperate thread is because the GUI requires its own set of updating commands
|
|
|
|
// which cannot be blocked by the raytracer, otherwise it would become unresponsive.
|
|
|
|
if (!multithreaded){
|
|
|
|
threads = 1;
|
|
|
|
divs = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
ilog << "Starting multithreaded raytracer with " << threads << " threads!\n";
|
|
|
|
|
|
|
|
delete(unprocessedQuads);
|
|
|
|
unprocessedQuads = new std::queue<RaycasterImageBounds>();
|
|
|
|
|
|
|
|
// we need to subdivide the image for the threads, since this is really quick it's fine to due sequentially
|
|
|
|
for (int dx = 0; dx < divs; dx++) {
|
|
|
|
for (int dy = 0; dy < divs; dy++) {
|
|
|
|
unprocessedQuads->push({
|
|
|
|
image.getWidth() / divs,
|
|
|
|
image.getHeight() / divs,
|
|
|
|
(image.getWidth() / divs) * dx,
|
|
|
|
(image.getHeight() / divs) * dy
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
runSTDThread(threads);
|
|
|
|
}
|
|
|
|
|
2022-10-23 23:46:12 -04:00
|
|
|
}
|