aboutsummaryrefslogtreecommitdiff
path: root/src/accelerate/myqtconcurrent.cpp
blob: aeb160b7492b24d1b0f083e1a59b95976751edef (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
#include <QList>
#include <QtConcurrent>
#include "../raytracer/raytracer.h"

struct pixelRoutineArgs {
    glm::vec4 pCamera;
    glm::vec4 dCamera;
    const RayTraceScene &scene;
    RayTracer *rt;
};
static RGBA pixelRoutine(pixelRoutineArgs args);

void RayTracer::renderParallel(RGBA *imageData, const RayTraceScene &scene)
{
    Camera camera = scene.getCamera();
    float cameraDepth = 1.f;
    float viewplaneHeight = 2.f*cameraDepth*std::tan(camera.getHeightAngle() / 2.f);
    float viewplaneWidth = cameraDepth*viewplaneHeight*((float)scene.width()/(float)scene.height());

    QList<pixelRoutineArgs> l{};
    for (int imageRow = 0; imageRow < scene.height(); imageRow++) {
        for (int imageCol = 0; imageCol < scene.width(); imageCol++) {
            float xCameraSpace = viewplaneWidth *
                                 (-.5f + (imageCol + .5f) / scene.width());
            float yCameraSpace = viewplaneHeight *
                                 (-.5f + (imageRow + .5f) / scene.height());

            glm::vec4 pixelDirCamera{xCameraSpace, -yCameraSpace, -cameraDepth, 0.f}; //w=0 for dir
            glm::vec4 eyeCamera{0.f, 0.f, 0.f, 1.f}; // w=1.f for point
            pixelRoutineArgs args{
                    eyeCamera,
                    pixelDirCamera,
                    scene,
                    this
            };
            l.append(args);

        }
    }
    QList<RGBA> pixels = QtConcurrent::blockingMapped(l, pixelRoutine);
    QtConcurrent::blockingMap(l, pixelRoutine);
    int index = 0;
    for (RGBA p : pixels) {
        imageData[index++] = p;
    }

    if (m_enableAntiAliasing)
    {
        filterBlur(imageData, scene.width(), scene.height());
    }
}


RGBA pixelRoutine(pixelRoutineArgs args)
{
    auto eyeCamera = args.pCamera;
    auto pixelDirCamera = args.dCamera;
    auto scene = args.scene;
    auto rt = args.rt;

    // convert camera space to world space
    auto inv = scene.getCamera().getInverseViewMatrix();
    glm::vec4 pWorld = inv * eyeCamera;
    glm::vec4 dWorld = glm::normalize(inv * pixelDirCamera);

    if (rt->m_enableDepthOfField)
    {
        // if we're doing depth of field, we need to shoot multiple rays, see camera.cpp
        return RayTracer::toRGBA(rt->secondaryRays(pWorld, dWorld, scene));
    }
    if (rt->m_enableSuperSample)
    {
        // if we're doing super sampling, we need to shoot multiple rays, see raytracer.cpp
        return rt->superSample(eyeCamera, pixelDirCamera, scene);
    }

    // shoot ray!
    RGBA pixel = RayTracer::toRGBA(rt->getPixelFromRay(pWorld, dWorld, scene, 0));
    return pixel;
}