diff --git a/scripts/deploy/V3_NEXT_TAG.txt b/scripts/deploy/V3_NEXT_TAG.txt
index f906e18..a9c8fe8 100644
--- a/scripts/deploy/V3_NEXT_TAG.txt
+++ b/scripts/deploy/V3_NEXT_TAG.txt
@@ -1 +1 @@
-96
+103
diff --git a/src/app/[locale]/shader/page.tsx b/src/app/[locale]/shader/page.tsx
index 51656c0..90ec338 100644
--- a/src/app/[locale]/shader/page.tsx
+++ b/src/app/[locale]/shader/page.tsx
@@ -1,4 +1,5 @@
import ShaderHeader from "@/components/shader/ShaderHeader";
+import ShaderRenderer from "@/components/shader/ShaderRenderer";
import { fetchMessages } from "@/shared/i18n/translation";
import { BasePageProps } from "@/shared/types/comp";
@@ -10,6 +11,7 @@ export default async function Page({ params, children }: BasePageProps) {
return (
<>
+
>
);
}
diff --git a/src/components/shader/ShaderHeader.tsx b/src/components/shader/ShaderHeader.tsx
index 4c66a7e..6c33399 100644
--- a/src/components/shader/ShaderHeader.tsx
+++ b/src/components/shader/ShaderHeader.tsx
@@ -2,132 +2,138 @@
import { Messages } from "@/shared/i18n/type";
import { BaseCompProps } from "@/shared/types/comp";
import { tm } from "@/shared/utils";
+import { BiTargetLock } from "react-icons/bi";
+import { BsShadows } from "react-icons/bs";
import { GiFilmProjector, GiShieldReflect } from "react-icons/gi";
-import { IoIosCut } from "react-icons/io";
+import { IoIosColorPalette, IoIosCut } from "react-icons/io";
import { IoLayers } from "react-icons/io5";
-import { MdBlurOn, MdDeblur, MdGroupWork, MdLightbulb, MdScatterPlot } from "react-icons/md";
+import { MdBlurOn, MdGroupWork, MdLightbulb, MdScatterPlot } from "react-icons/md";
import { PiPath, PiWaveTriangleFill } from "react-icons/pi";
import { RiColorFilterFill } from "react-icons/ri";
import { TbBounceLeftFilled, TbFence, TbTriangleSquareCircleFilled } from "react-icons/tb";
import IconedText from "../ui/icon-text";
import { useCoursework } from "./coursework";
-import { IoIosColorPalette } from "react-icons/io";
-import { BiTargetLock } from "react-icons/bi";
export interface ShaderHeaderProps extends BaseCompProps<"div"> {
messages: Messages;
}
export default function ShaderHeader({ messages }: ShaderHeaderProps) {
- const { active, setActive, activeOptions, selectOption } = useCoursework();
+ const { type, toggleType, options, toggleOption } = useCoursework();
if (!messages) return null;
- const isRayTracer = active === "ray tracer";
- const isRasterization = active === "rasterization";
- const isPathTracer = active === "path tracer";
+ const isRayTracer = type === "ray tracer";
+ const isRasterization = type === "rasterization";
+ const isPathTracer = type === "path tracer";
return (
-
setActive("ray tracer")}>
+ toggleType("ray tracer")}>
Ray Tracer
- setActive("rasterization")}>
+ toggleType("rasterization")}>
Rasterization
- setActive("path tracer")}>
+ toggleType("path tracer")}>
Path Tracer
-
+
selectOption("Cylinder And Plane")}>
+ active={options.includes("Cylinder And Plane")}
+ onClick={() => toggleOption("Cylinder And Plane")}>
Cylinder And Plane
selectOption("Reflect And Refract")}>
+ active={options.includes("Reflect And Refract")}
+ onClick={() => toggleOption("Reflect And Refract")}>
Reflect And Refract
-
selectOption("Fresnel")}>
-
+ toggleOption("Fresnel")}>
+
Fresnel
-
-
selectOption("Projection")}>
+
+ toggleOption("Projection")}>
Projection
selectOption("Rasterization")}>
+ active={options.includes("Rasterization")}
+ onClick={() => toggleOption("Rasterization")}>
Rasterization
- selectOption("Clipping")}>
+ toggleOption("Clipping")}>
Clipping
selectOption("Interpolation")}>
+ active={options.includes("Interpolation")}
+ onClick={() => toggleOption("Interpolation")}>
Interpolation
- selectOption("Zbuffering")}>
+ toggleOption("Zbuffering")}>
Zbuffering
- selectOption("AAlias")}>
+ toggleOption("AAlias")}>
AAlias
-
-
selectOption("Light")}>
+
+
toggleOption("Light")}>
Light
-
selectOption("Bounce")}>
+ toggleOption("Bounce")}>
Bounce
- selectOption("Throughput")}>
+ toggleOption("Throughput")}>
Throughput
- selectOption("Halton")}>
+ toggleOption("Halton")}>
Halton
- selectOption("AAlias")}>
+ toggleOption("AAlias")}>
AAlias
selectOption("Importance Sampling")}>
+ active={options.includes("Importance Sampling")}
+ onClick={() => toggleOption("Importance Sampling")}>
Importance Sampling
selectOption("Multi-light IS")}>
+ active={options.includes("Multi-light IS")}
+ onClick={() => toggleOption("Multi-light IS")}>
Multi-light IS
diff --git a/src/components/shader/ShaderRenderer.tsx b/src/components/shader/ShaderRenderer.tsx
new file mode 100644
index 0000000..157f2d7
--- /dev/null
+++ b/src/components/shader/ShaderRenderer.tsx
@@ -0,0 +1,108 @@
+"use client";
+import { BaseCompProps } from "@/shared/types/comp";
+import { tm } from "@/shared/utils";
+import Script from "next/script";
+import { useEffect, useState } from "react";
+import { MdSpeed } from "react-icons/md";
+import IconedText from "../ui/icon-text";
+import Loading from "../ui/loading/spinner";
+import Separator from "../ui/separator";
+import { TYPE_2_SOLUTION, useCoursework } from "./coursework";
+import { ShaderFramework } from "./shaderFramework";
+
+const COURSEWORK_TYPE_2_FRAG_SHADER = {
+ "ray tracer": "cwk1-fragment-shader",
+ rasterization: "cwk2-fragment-shader",
+ "path tracer": "cwk3-fragment-shader",
+};
+
+const COURSEWORK_TYPE_2_VERT_SHADER = {
+ "ray tracer": "cwk1-vertex-shader",
+ rasterization: "cwk2-vertex-shader",
+ "path tracer": "cwk3-vertex-shader",
+};
+
+export default function ShaderRenderer(params: BaseCompProps<"div">) {
+ const [framework, setFramework] = useState(undefined);
+ const { type, options, processing, setProcessing } = useCoursework();
+ const [frames, setFrames] = useState(0);
+ const [startTime, setStartTime] = useState(0);
+
+ const canvasId = "courseworkCanvas";
+
+ useEffect(() => {
+ setFramework(prevFramework => {
+ setProcessing(true);
+ if (prevFramework) {
+ prevFramework.stop();
+ }
+ if (type === undefined) {
+ console.warn("No coursework type selected");
+ return undefined;
+ }
+ const isPathTracer = type === "path tracer";
+ const newFramework = new ShaderFramework(
+ canvasId,
+ COURSEWORK_TYPE_2_VERT_SHADER[type],
+ COURSEWORK_TYPE_2_FRAG_SHADER[type],
+ isPathTracer,
+ isPathTracer ? "cwk3-tonemap-shader" : undefined
+ );
+ options.map(opt => TYPE_2_SOLUTION[opt]).forEach(opt => newFramework.addSolution(opt));
+
+ const maxFrames = isPathTracer ? 1000 : 1;
+ setStartTime(Date.now());
+ setFrames(0);
+ newFramework.setFrameCallback(() => {
+ setFrames(newFramework.getCurrentFrame());
+ });
+ newFramework
+ .initialize()
+ .then(() => newFramework.start(maxFrames))
+ .then(() => setProcessing(false));
+ return newFramework;
+ });
+ }, [type, options, setProcessing]);
+
+ return (
+ <>
+
+
+
+
+ {processing && }
+
+
+
+ {frames}/{framework?.getMaxFrame()} frames | {getFps(startTime, frames)} fps
+
+
+ *This runs in real-time on your machine
+
+
+
+
+
+
+
+
+
+ >
+ );
+}
+
+function getFps(startTime: number, frames: number) {
+ const time = Date.now() - startTime;
+ return Math.floor((frames / time) * 1000);
+}
diff --git a/src/components/shader/coursework.tsx b/src/components/shader/coursework.tsx
index de8bea5..fb94d47 100644
--- a/src/components/shader/coursework.tsx
+++ b/src/components/shader/coursework.tsx
@@ -1,45 +1,101 @@
"use client";
-import { get } from "http";
-import React, { useCallback, useEffect, useState } from "react";
+import React, { useCallback, useState } from "react";
-export type CouseworkType = "ray tracer" | "rasterization" | "path tracer";
+export type CourseworkType = "ray tracer" | "rasterization" | "path tracer";
export interface UseCoursework {
- active: CouseworkType;
- setActive: (type: CouseworkType) => void;
+ toggleType: (type: CourseworkType) => void;
+ toggleOption: (option: string) => void;
+ type: CourseworkType | undefined;
options: string[];
- activeOptions: string[];
- selectOption: (option: string) => void;
+ processing: boolean;
+ setProcessing: React.Dispatch>;
}
-export const TYPE_2_OPTIONS: Record = {
- "ray tracer": ["Cylinder And Plane", "Reflect And Refract", "Fresnel"],
+export const TYPE_2_OPTIONS: Record = {
+ "ray tracer": ["Cylinder And Plane", "Reflect And Refract", "Shadow", "Fresnel"],
rasterization: ["Projection", "Rasterization", "Clipping", "Interpolation", "Zbuffering", "AAlias"],
"path tracer": ["Light", "Bounce", "Throughput", "Halton", "AAlias", "Importance Sampling", "Multi-light IS"],
};
+export const TYPE_2_SOLUTION: Record = {
+ "Cylinder And Plane": "SOLUTION_CYLINDER_AND_PLANE",
+ "Reflect And Refract": "SOLUTION_REFLECTION_REFRACTION",
+ Shadow: "SOLUTION_SHADOW",
+ Fresnel: "SOLUTION_FRESNEL",
+
+ Projection: "SOLUTION_PROJECTION",
+ Rasterization: "SOLUTION_RASTERIZATION",
+ Clipping: "SOLUTION_CLIPPING",
+ Interpolation: "SOLUTION_INTERPOLATION",
+ Zbuffering: "SOLUTION_ZBUFFERING",
+ AAlias: "SOLUTION_AALIAS",
+
+ Light: "SOLUTION_LIGHT",
+ Bounce: "SOLUTION_BOUNCE",
+ Throughput: "SOLUTION_THROUGHPUT",
+ Halton: "SOLUTION_HALTON",
+ "Importance Sampling": "SOLUTION_IS",
+ "Multi-light IS": "SOLUTION_MIS",
+};
+
+const TYPE_2_DEFAULT_OPTION: Record = {
+ "ray tracer": "",
+ rasterization: "Projection",
+ "path tracer": "",
+};
+
+export interface TypeAndOptions {
+ type: CourseworkType | undefined;
+ options: (typeof TYPE_2_OPTIONS)[CourseworkType];
+}
+
export const CourseworkContext = React.createContext({} as UseCoursework);
export function CourseworkProvider({ children }: { children: React.ReactNode }) {
- const [active, setActive] = useState("ray tracer");
- const [options, setOptions] = useState(TYPE_2_OPTIONS[active]);
- const [activeOptions, setActiveOptions] = useState([]);
- const selectOption = useCallback(
- (option: string) => {
- setActiveOptions(getActiveOptions(active, option));
+ const [typeAndOptions, setTypeAndOptions] = useState(getDefaultTypeAndOptions("ray tracer"));
+ const [processing, setProcessing] = useState(false);
+
+ const toggleType = useCallback(
+ (type: CourseworkType) => {
+ if (processing) return;
+ setTypeAndOptions(prev => {
+ if (prev.type === type) {
+ // toggle off
+ return getUndefinedTypeAndOptions();
+ } else {
+ // toggle on
+ return getDefaultTypeAndOptions(type);
+ }
+ });
},
- [active]
+ [processing]
);
- useEffect(() => {
- setOptions(TYPE_2_OPTIONS[active]);
- setActiveOptions([]);
- }, [active]);
+ const toggleOption = useCallback(
+ (option: string) => {
+ if (processing) return;
+ setTypeAndOptions(prev => {
+ if (prev.type === undefined) {
+ console.error(`No type is selected`);
+ return getUndefinedTypeAndOptions();
+ }
+ if (prev.options.includes(option)) {
+ // toggle off
+ const index = prev.options.indexOf(option);
+ const newOptions = prev.options.slice(0, index);
+ return { type: prev.type, options: newOptions };
+ }
+
+ return getActiveTypeAndOptions(prev.type, option);
+ });
+ },
+ [processing]
+ );
return (
-
- {" "}
- {children}{" "}
+
+ {children}
);
}
@@ -51,11 +107,41 @@ export function useCoursework() {
// return active options based on the active coursework and the selected option
// active options are the options that are before the selected option
// if option A occur before option B, then A is implictly active
-export function getActiveOptions(active: CouseworkType, selectedOption: string) {
+function getActiveOptions(active: CourseworkType | undefined, selectedOption: string) {
+ if (!active) {
+ console.warn(`No type is selected`);
+ return [];
+ }
const allowedOptions = TYPE_2_OPTIONS[active];
const index = allowedOptions.indexOf(selectedOption);
if (index === -1) {
- throw new Error(`Invalid option ${selectedOption} for ${active}, allowed options are ${allowedOptions}`);
+ if (TYPE_2_DEFAULT_OPTION[active]) {
+ return getActiveOptions(active, TYPE_2_DEFAULT_OPTION[active]);
+ } else {
+ return [];
+ }
+ } else {
+ return allowedOptions.slice(0, index + 1);
}
- return allowedOptions.slice(0, index + 1);
+}
+
+function getUndefinedTypeAndOptions(): TypeAndOptions {
+ return {
+ type: undefined,
+ options: [],
+ };
+}
+
+function getDefaultTypeAndOptions(type: CourseworkType): TypeAndOptions {
+ return {
+ type,
+ options: getActiveOptions(type, TYPE_2_DEFAULT_OPTION[type]),
+ };
+}
+
+function getActiveTypeAndOptions(type: CourseworkType, option: string): TypeAndOptions {
+ return {
+ type,
+ options: getActiveOptions(type, option),
+ };
}
diff --git a/src/components/shader/shaderFramework.ts b/src/components/shader/shaderFramework.ts
new file mode 100644
index 0000000..1793d8e
--- /dev/null
+++ b/src/components/shader/shaderFramework.ts
@@ -0,0 +1,606 @@
+// Compile and initialize shader stuff
+
+function _compileShader(gl: WebGLRenderingContext, type: GLenum, shaderCode: string) {
+ const shader = gl.createShader(type);
+
+ if (shader === null) {
+ console.error("Error while creating shader");
+ return null;
+ }
+
+ gl.shaderSource(shader, shaderCode);
+ gl.compileShader(shader);
+
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
+ console.log("Error while compiling shader");
+ console.log(gl.getShaderInfoLog(shader));
+ return null;
+ } else {
+ return shader;
+ }
+}
+
+function _initShaderProgram(gl: WebGLRenderingContext, shaderInfos: { type: GLenum; code: string }[]) {
+ const shaderProgram = gl.createProgram();
+
+ if (shaderProgram === null) {
+ console.error("Error while creating shader program");
+ return null;
+ }
+
+ shaderInfos.forEach(info => {
+ const shader = _compileShader(gl, info.type, info.code);
+ if (shader) {
+ gl.attachShader(shaderProgram, shader);
+ }
+ });
+
+ gl.linkProgram(shaderProgram);
+
+ if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
+ console.log("Error linking shader program");
+ console.log(gl.getProgramInfoLog(shaderProgram));
+ }
+
+ gl.useProgram(shaderProgram);
+ return shaderProgram;
+}
+
+function _initGL(canvasId: string) {
+ const canvas = document.getElementById(canvasId) as HTMLCanvasElement;
+ if (canvas == null) {
+ throw new Error(`Error: canvas not found for gl, given id=${canvasId}`);
+ }
+ const gl = canvas.getContext("webgl");
+
+ if (!gl) {
+ console.error(`Error: Could not initialize gl context from canvas id ${canvasId}`);
+ return null;
+ } else {
+ return gl;
+ }
+}
+
+function _initScreenBuffer(gl: WebGLRenderingContext): WebGLBuffer | null {
+ const screenVerticesBuffer = gl.createBuffer();
+ if (screenVerticesBuffer === null) {
+ console.error("Error: screenVerticesBuffer is null");
+ return null;
+ }
+ gl.bindBuffer(gl.ARRAY_BUFFER, screenVerticesBuffer);
+
+ const vertices = [
+ -1, -1, 0, -1, 1, 0, 1, 1, 0,
+
+ -1, -1, 0, 1, -1, 0, 1, 1, 0,
+ ];
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
+
+ return screenVerticesBuffer;
+}
+
+function _getSourceCode(id: string) {
+ const node = document.getElementById(id) as HTMLScriptElement;
+ if (node == null) {
+ console.log(`Error: did not find source code with id: ${id}`);
+ }
+
+ if (node && node.src) {
+ return fetch(node.src).then(res => res.text());
+ } else {
+ return Promise.resolve(node.innerHTML);
+ }
+}
+
+function _findShaderScripts(
+ gl: WebGLRenderingContext,
+ vertexShaderId: string,
+ fragmentShaderId: string,
+ customFragmentShaderCode = ""
+) {
+ return [
+ _getSourceCode(vertexShaderId).then(code => {
+ return {
+ type: gl.VERTEX_SHADER,
+ code: code,
+ };
+ }),
+ _getSourceCode(fragmentShaderId).then(code => {
+ return {
+ type: gl.FRAGMENT_SHADER,
+ code: customFragmentShaderCode + code,
+ };
+ }),
+ ];
+}
+
+async function _initShaders(
+ gl: WebGLRenderingContext,
+ vertexShaderId: string,
+ fragmentShaderId: string,
+ customFragmentShaderCode = "",
+ screenBuffer: WebGLBuffer
+) {
+ const shaderScripts = await Promise.all(
+ _findShaderScripts(gl, vertexShaderId, fragmentShaderId, customFragmentShaderCode)
+ );
+ const shaderProgram = _initShaderProgram(gl, shaderScripts);
+
+ if (shaderProgram === null) {
+ console.error("Error: shader program is null");
+ return null;
+ }
+
+ // set static input to shader program
+
+ // position
+ const aPosition = gl.getAttribLocation(shaderProgram, "position");
+ gl.enableVertexAttribArray(aPosition);
+ gl.bindBuffer(gl.ARRAY_BUFFER, screenBuffer);
+ gl.vertexAttribPointer(aPosition, 3, gl.FLOAT, false, 0, 0); // 3 vertex per triangle
+
+ // projectionMatrix
+ const uProjectionMatrix = gl.getUniformLocation(shaderProgram, "projectionMatrix");
+ gl.uniformMatrix4fv(uProjectionMatrix, false, _getIdentityMatrix4f());
+
+ // modelViewMatrix
+ const uModelviewMatrix = gl.getUniformLocation(shaderProgram, "modelViewMatrix");
+ gl.uniformMatrix4fv(uModelviewMatrix, false, _getIdentityMatrix4f());
+
+ return shaderProgram;
+}
+
+async function _initPathTracer(
+ gl: WebGLRenderingContext,
+ canvasId: string,
+ screenBuffer: WebGLBuffer,
+ vertexShaderId: string,
+ tonemapShaderId: string
+) {
+ const canvas = document.getElementById(canvasId) as HTMLCanvasElement;
+
+ // add copyProgram
+ const script = await Promise.all(_findShaderScripts(gl, vertexShaderId, tonemapShaderId));
+ const copyProgram = _initShaderProgram(gl, script);
+ if (copyProgram === null) {
+ console.error("Error: copy program is null");
+ return;
+ }
+
+ const aPosition = gl.getAttribLocation(copyProgram, "position");
+ gl.enableVertexAttribArray(aPosition);
+ gl.bindBuffer(gl.ARRAY_BUFFER, screenBuffer);
+ gl.vertexAttribPointer(aPosition, 3, gl.FLOAT, false, 0, 0); // 3 vertex per triangle
+
+ // extension needed for float texture
+ const floatTextures = gl.getExtension("OES_texture_float");
+ if (!floatTextures) {
+ console.error("Error: no floating point texture support");
+ return;
+ }
+
+ // add texture
+ const rttTexture = gl.createTexture();
+ if (!rttTexture) {
+ console.error("Error: create texture failed");
+ }
+ gl.bindTexture(gl.TEXTURE_2D, rttTexture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0, gl.RGBA, gl.FLOAT, null);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+
+ // add framebuffer
+ const rttFramebuffer = gl.createFramebuffer();
+ if (!rttFramebuffer) {
+ console.log("Error create framebuffer failed");
+ }
+ gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
+ gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, rttTexture, 0);
+
+ if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
+ console.log("Error: frame buffer status is not complete");
+ }
+
+ return {
+ copyProgram: copyProgram,
+ rttTexture: rttTexture,
+ rttFramebuffer: rttFramebuffer,
+ };
+}
+
+function _drawCanvas(
+ gl: WebGLRenderingContext,
+ canvasId: string,
+ time: number,
+ shaderProgram: WebGLProgram,
+ screenBuffer: WebGLBuffer
+) {
+ const canvas = document.getElementById(canvasId) as HTMLCanvasElement;
+ const width = canvas.width;
+ const height = canvas.height;
+
+ // vertex shader input
+ const aPosition = gl.getAttribLocation(shaderProgram, "position");
+ gl.enableVertexAttribArray(aPosition);
+ gl.bindBuffer(gl.ARRAY_BUFFER, screenBuffer);
+ gl.vertexAttribPointer(aPosition, 3, gl.FLOAT, false, 0, 0);
+
+ // fragment shader input
+
+ // viewport
+ gl.viewport(0, 0, width, height);
+ gl.uniform2iv(gl.getUniformLocation(shaderProgram, "viewport"), [width, height]);
+
+ // time
+ gl.uniform1f(gl.getUniformLocation(shaderProgram, "time"), time);
+
+ // draw scene, just 2 triangles, 3 vertices each: 2*3=6
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
+}
+
+function _drawPathTracerCanvas(
+ gl: WebGLRenderingContext,
+ canvasId: string,
+ rttFramebuffer: WebGLFramebuffer | null,
+ rttTexture: WebGLTexture | null,
+ shaderProgram: WebGLProgram,
+ copyProgram: WebGLProgram,
+ currentFrame: number
+) {
+ const canvas = document.getElementById(canvasId) as HTMLCanvasElement;
+ const width = canvas.width;
+ const height = canvas.height;
+
+ // render in the texture first
+ gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
+ gl.viewport(0, 0, width, height);
+
+ // use the actual shading program
+ gl.useProgram(shaderProgram);
+
+ // set some input variables
+ gl.uniform1i(gl.getUniformLocation(shaderProgram, "globalSeed"), Math.random() * 32768.0);
+ gl.uniform1i(gl.getUniformLocation(shaderProgram, "baseSampleIndex"), currentFrame);
+ gl.uniform2i(gl.getUniformLocation(shaderProgram, "resolution"), width, height);
+
+ // blend previous with current
+ gl.disable(gl.DEPTH_TEST);
+ gl.enable(gl.BLEND);
+ gl.blendFunc(gl.ONE, gl.ONE);
+
+ // draw
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
+
+ /////////////////////////////////////////////////////
+
+ // switch to actual canvas
+ gl.bindFramebuffer(gl.FRAMEBUFFER, null);
+
+ // use the accumulated texture
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, rttTexture);
+ gl.viewport(0, 0, width, height);
+
+ // use the program that divide the color by number of samples
+ gl.useProgram(copyProgram);
+
+ // set input variables, including the sampleCount which used to divide the color sum
+ gl.uniform1i(gl.getUniformLocation(copyProgram, "sampleCount"), currentFrame + 1);
+ gl.uniform1i(gl.getUniformLocation(copyProgram, "radianceTexture"), 0);
+ gl.uniform2i(gl.getUniformLocation(copyProgram, "resolution"), width, height);
+
+ // disable blending
+ gl.disable(gl.DEPTH_TEST);
+ gl.disable(gl.BLEND);
+
+ // actual drawing
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
+}
+
+// utilities
+
+function _getTime() {
+ const date = new Date();
+ return date.getMinutes() * 60.0 + date.getSeconds() + date.getMilliseconds() / 1000.0;
+}
+
+function _getIdentityMatrix4f() {
+ const arr = new Float32Array(16);
+ arr[0] = arr[5] = arr[10] = arr[15] = 1.0;
+ return arr;
+}
+
+function _activeSolutionsToCode(solutions: Set) {
+ let code = "";
+ for (const solution of solutions) {
+ switch (String(solution)) {
+ // cwk 1
+ case "SOLUTION_FRESNEL":
+ code += "#define SOLUTION_FRESNEL\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_REFLECTION_REFRACTION":
+ code += "#define SOLUTION_REFLECTION_REFRACTION\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_SHADOW":
+ code += "#define SOLUTION_SHADOW\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_CYLINDER_AND_PLANE":
+ code += "#define SOLUTION_CYLINDER_AND_PLANE\n";
+ break;
+ // cwk 2
+ case "SOLUTION_AALIAS":
+ code += "#define SOLUTION_AALIAS\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_ZBUFFERING":
+ code += "#define SOLUTION_ZBUFFERING\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_INTERPOLATION":
+ code += "#define SOLUTION_INTERPOLATION\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_CLIPPING":
+ code += "#define SOLUTION_CLIPPING\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_RASTERIZATION":
+ code += "#define SOLUTION_RASTERIZATION\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_PROJECTION":
+ code += "#define SOLUTION_PROJECTION\n";
+ break;
+ // cwk 3
+ case "SOLUTION_MIS":
+ code += "#define LIGHT_INTENSITY_WEIGHTED\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_IS":
+ code += "#define SOLUTION_IS\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_AA":
+ code += "#define SOLUTION_AA\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_HALTON":
+ code += "#define SOLUTION_HALTON\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_THROUGHPUT":
+ code += "#define SOLUTION_THROUGHPUT\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_BOUNCE":
+ code += "#define SOLUTION_BOUNCE\n";
+ // eslint-disable-next-line no-fallthrough
+ case "SOLUTION_LIGHT":
+ code += "#define SOLUTION_LIGHT\n";
+ break;
+ // cwk 3 custom
+ case "CHANGE_LIGHT_POSITION":
+ code += "#define CHANGE_LIGHT_POSITION\n";
+ break;
+ case "CHANGE_LIGHT_INTENSITY":
+ code += "#define CHANGE_LIGHT_INTENSITY\n";
+ break;
+
+ default:
+ console.warn(`Provided solution is not one of available options: ${solution}`);
+ break;
+ }
+ }
+ return code;
+}
+
+export class ShaderFramework {
+ private gl: WebGLRenderingContext | null = null;
+ private screenBuffer: WebGLBuffer | null = null;
+ private shaderProgram: WebGLProgram | null = null;
+
+ // for path tracer
+ private rttFramebuffer: WebGLFramebuffer | null = null;
+ private rttTexture: WebGLTexture | null = null;
+ private copyProgram: WebGLProgram | null = null;
+ private currentFrame = 0;
+ private maxFrame = 0;
+
+ // interaction
+ private running = false;
+ private frameCallback: (() => void) | null = null;
+
+ private activeSolutions: Set = new Set();
+
+ public constructor(
+ private readonly canvasId: string,
+ private readonly vertexShaderId: string,
+ private readonly fragmentShaderId: string,
+ private readonly isPathTracer = false,
+ private readonly tonemapShaderId: string | null = null
+ ) {}
+
+ async _initializePathTracer() {
+ if (!this.gl) {
+ console.log("Error: gl is not initialized");
+ return;
+ }
+ if (!this.screenBuffer) {
+ console.error("Error: screen buffer is not initialized");
+ return;
+ }
+ if (!this.tonemapShaderId) {
+ console.error("Error: tonemapShaderId is not initialized");
+ return;
+ }
+ const res = await _initPathTracer(
+ this.gl,
+ this.canvasId,
+ this.screenBuffer,
+ this.vertexShaderId,
+ this.tonemapShaderId
+ );
+ if (!res) {
+ console.error("Error: path tracer initialization failed");
+ return;
+ }
+ this.rttFramebuffer = res.rttFramebuffer;
+ this.rttTexture = res.rttTexture;
+ this.copyProgram = res.copyProgram;
+ }
+
+ _incrementFrame() {
+ this.currentFrame += 1;
+ }
+
+ _run() {
+ if (this.running && this.getCurrentFrame() < this.getMaxFrame()) {
+ this.drawCanvas();
+ this._incrementFrame();
+ this.frameCallback && this.frameCallback();
+ window.requestAnimationFrame(() => this._run());
+ }
+ }
+
+ _resetGL() {
+ this.gl = _initGL(this.canvasId);
+ }
+
+ _resetBuffer() {
+ if (!this.gl) {
+ console.error("Error: gl is not initialized");
+ return;
+ }
+ this.screenBuffer = _initScreenBuffer(this.gl);
+ }
+
+ async _resetShaders() {
+ const customFragmentShaderCode = _activeSolutionsToCode(this.activeSolutions);
+ if (!this.gl) {
+ console.error("Error: gl is not initialized");
+ return;
+ }
+ if (!this.screenBuffer) {
+ console.error("Error: screen buffer is not initialized");
+ return;
+ }
+ this.shaderProgram = await _initShaders(
+ this.gl,
+ this.vertexShaderId,
+ this.fragmentShaderId,
+ customFragmentShaderCode,
+ this.screenBuffer
+ );
+ }
+
+ //
+ // function for framework user
+ //
+
+ start(maxFrame = 1) {
+ this.setCurrentFrame(0);
+ this.setMaxFrame(maxFrame);
+
+ this.running = true;
+ this._run();
+ }
+
+ stop() {
+ this.running = false;
+ }
+
+ continue() {
+ this.running = true;
+ this._run();
+ }
+
+ restart() {
+ this.stop();
+ this.initialize().then(() => this.start(this.getMaxFrame()));
+ }
+
+ getCurrentFrame() {
+ return this.currentFrame;
+ }
+
+ getMaxFrame() {
+ return this.maxFrame;
+ }
+
+ setMaxFrame(maxFrame: number) {
+ this.maxFrame = maxFrame;
+ }
+
+ setCurrentFrame(frame: number) {
+ this.currentFrame = frame;
+ }
+
+ setFrameCallback(callback: () => void) {
+ this.frameCallback = callback;
+ }
+
+ addSolution(solution: string) {
+ this.activeSolutions.add(solution);
+ }
+
+ hasSolution(solution: string) {
+ return this.activeSolutions.has(solution);
+ }
+
+ clearSolutions() {
+ this.activeSolutions.clear();
+ }
+
+ toggleSolution(solution: string) {
+ if (this.hasSolution(solution)) {
+ this.removeSolution(solution);
+ } else {
+ this.addSolution(solution);
+ }
+ }
+
+ removeSolution(solution: string) {
+ this.activeSolutions.delete(solution);
+ }
+
+ async initialize() {
+ this._resetGL();
+ this._resetBuffer();
+ await this._resetShaders();
+ if (!this.gl) {
+ console.error("Error: gl is not initialized");
+ return;
+ }
+ this.gl.clearColor(0.5, 0.5, 0.5, 1.0); // default gray background
+ this.gl.clear(this.gl.COLOR_BUFFER_BIT);
+
+ if (this.isPathTracer) {
+ await this._initializePathTracer();
+ }
+ }
+
+ drawCanvas() {
+ if (!this.gl) {
+ console.error("Error: gl is not initialized");
+ return;
+ }
+ if (!this.shaderProgram) {
+ console.error("Error: shader program is not initialized");
+ return;
+ }
+ if (!this.screenBuffer) {
+ console.error("Error: screen buffer is not initialized");
+ return;
+ }
+
+ if (this.isPathTracer) {
+ if (this.copyProgram === null) {
+ console.error("Error: copy program is not initialized");
+ return;
+ }
+ _drawPathTracerCanvas(
+ this.gl,
+ this.canvasId,
+ this.rttFramebuffer,
+ this.rttTexture,
+ this.shaderProgram,
+ this.copyProgram,
+ this.getCurrentFrame()
+ );
+ } else {
+ _drawCanvas(this.gl, this.canvasId, _getTime(), this.shaderProgram, this.screenBuffer);
+ }
+ }
+}
diff --git a/src/public/messages/hi.json b/src/public/messages/hi.json
index 62dd8d7..e94aaea 100644
--- a/src/public/messages/hi.json
+++ b/src/public/messages/hi.json
@@ -70,6 +70,7 @@
"header.home": "घर",
"header.more": "अधिक",
"header.rss": "आरएसएस",
+ "header.shader": "शेडर",
"header.source": "मेरा github",
"header.theme.dark": "अँधेरा",
"header.theme.light": "रोशनी",
diff --git a/src/public/messages/iw.json b/src/public/messages/iw.json
index b9bb075..af40a26 100644
--- a/src/public/messages/iw.json
+++ b/src/public/messages/iw.json
@@ -70,6 +70,7 @@
"header.home": "בית",
"header.more": "יותר",
"header.rss": "RSS",
+ "header.shader": "שאדר",
"header.source": "מָקוֹר",
"header.theme.dark": "אפל",
"header.theme.light": "אוֹר",
diff --git a/src/public/messages/jp.json b/src/public/messages/jp.json
index fc1146f..76b4905 100644
--- a/src/public/messages/jp.json
+++ b/src/public/messages/jp.json
@@ -70,6 +70,7 @@
"header.home": "家",
"header.more": "もっと",
"header.rss": "RSS",
+ "header.shader": "シェーダー",
"header.source": "ソース",
"header.theme.dark": "暗い",
"header.theme.light": "ライト",
diff --git a/src/public/messages/tw.json b/src/public/messages/tw.json
index ec3b5c3..059fe21 100644
--- a/src/public/messages/tw.json
+++ b/src/public/messages/tw.json
@@ -70,6 +70,7 @@
"header.home": "家",
"header.more": "更多",
"header.rss": "訂閱服務",
+ "header.shader": "著色器",
"header.source": "來源",
"header.theme.dark": "黑暗的",
"header.theme.light": "光",
diff --git a/src/public/messages/zh.json b/src/public/messages/zh.json
index 0c4837c..6fa3bbb 100644
--- a/src/public/messages/zh.json
+++ b/src/public/messages/zh.json
@@ -70,6 +70,7 @@
"header.home": "主页",
"header.more": "更多",
"header.rss": "RSS",
+ "header.shader": "着色器",
"header.source": "源码",
"header.theme.dark": "黑暗",
"header.theme.light": "光明",
diff --git a/src/public/shaders/cwk1/fragmentShader.glsl b/src/public/shaders/cwk1/fragmentShader.glsl
new file mode 100644
index 0000000..4829e3c
--- /dev/null
+++ b/src/public/shaders/cwk1/fragmentShader.glsl
@@ -0,0 +1,675 @@
+
+// #define SOLUTION_CYLINDER_AND_PLANE
+// #define SOLUTION_SHADOW
+// #define SOLUTION_REFLECTION_REFRACTION
+// #define SOLUTION_FRESNEL
+// #define SOLUTION_BLOB
+
+precision highp float;
+uniform ivec2 viewport;
+
+struct PointLight {
+ vec3 position;
+ vec3 color;
+};
+
+struct Material {
+ vec3 diffuse;
+ vec3 specular;
+ float glossiness;
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ float reflectionFactor;
+ float refractionIndex;
+ float refractionFactor;
+#endif
+};
+
+struct Sphere {
+ vec3 position;
+ float radius;
+ Material material;
+};
+
+struct Plane {
+ vec3 normal;
+ float d;
+ Material material;
+};
+
+struct Cylinder {
+ vec3 position;
+ vec3 direction;
+ float radius;
+ Material material;
+};
+
+const int blobSphereCount = 3;
+
+struct Blob {
+ float isoValue;
+ vec3 spherePositions[blobSphereCount];
+ Material material;
+};
+
+const int lightCount = 2;
+const int sphereCount = 3;
+const int planeCount = 1;
+const int cylinderCount = 2;
+
+struct Scene {
+ vec3 ambient;
+ PointLight[lightCount] lights;
+ Sphere[sphereCount] spheres;
+ Plane[planeCount] planes;
+ Cylinder[cylinderCount] cylinders;
+ Blob blob;
+};
+
+struct Ray {
+ vec3 origin;
+ vec3 direction;
+};
+
+// Contains all information pertaining to a ray/object intersection
+struct HitInfo {
+ bool hit;
+ float t;
+ vec3 position;
+ vec3 normal;
+ Material material;
+ bool enteringPrimitive;
+};
+
+HitInfo getEmptyHit() {
+ return HitInfo(
+ false,
+ 0.0,
+ vec3(0.0),
+ vec3(0.0),
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ Material(vec3(0.0), vec3(0.0), 0.0, 0.0, 0.0, 0.0), // default 1, 1 for empty hit
+#else
+ Material(vec3(0.0), vec3(0.0), 0.0),
+#endif
+ false);
+}
+
+// Sorts the two t values such that t1 is smaller than t2
+void sortT(inout float t1, inout float t2) {
+ // Make t1 the smaller t
+ if(t2 < t1) {
+ float temp = t1;
+ t1 = t2;
+ t2 = temp;
+ }
+}
+
+// Tests if t is in an interval
+bool isTInInterval(const float t, const float tMin, const float tMax) {
+ return t > tMin && t < tMax;
+}
+
+// Get the smallest t in an interval
+bool getSmallestTInInterval(float t0, float t1, const float tMin, const float tMax, inout float smallestTInInterval) {
+
+ sortT(t0, t1);
+
+ // As t0 is smaller, test this first
+ if(isTInInterval(t0, tMin, tMax)) {
+ smallestTInInterval = t0;
+ return true;
+ }
+
+ // If t0 was not in the interval, still t1 could be
+ if(isTInInterval(t1, tMin, tMax)) {
+ smallestTInInterval = t1;
+ return true;
+ }
+
+ // None was
+ return false;
+}
+
+HitInfo intersectSphere(const Ray ray, const Sphere sphere, const float tMin, const float tMax) {
+
+ vec3 to_sphere = ray.origin - sphere.position;
+
+ float a = dot(ray.direction, ray.direction);
+ float b = 2.0 * dot(ray.direction, to_sphere);
+ float c = dot(to_sphere, to_sphere) - sphere.radius * sphere.radius;
+ float D = b * b - 4.0 * a * c;
+ if (D > 0.0)
+ {
+ float t0 = (-b - sqrt(D)) / (2.0 * a);
+ float t1 = (-b + sqrt(D)) / (2.0 * a);
+
+ float smallestTInInterval;
+ if(!getSmallestTInInterval(t0, t1, tMin, tMax, smallestTInInterval)) {
+ return getEmptyHit();
+ }
+
+ vec3 hitPosition = ray.origin + smallestTInInterval * ray.direction;
+
+ //Checking if we're inside the sphere by checking if the ray's origin is inside. If we are, then the normal
+ //at the intersection surface points towards the center. Otherwise, if we are outside the sphere, then the normal
+ //at the intersection surface points outwards from the sphere's center. This is important for refraction.
+ vec3 normal =
+ length(ray.origin - sphere.position) < sphere.radius + 0.001?
+ -normalize(hitPosition - sphere.position):
+ normalize(hitPosition - sphere.position);
+
+ //Checking if we're inside the sphere by checking if the ray's origin is inside,
+ // but this time for IOR bookkeeping.
+ //If we are inside, set a flag to say we're leaving. If we are outside, set the flag to say we're entering.
+ //This is also important for refraction.
+ bool enteringPrimitive =
+ length(ray.origin - sphere.position) < sphere.radius + 0.001 ?
+ false:
+ true;
+
+ return HitInfo(
+ true,
+ smallestTInInterval,
+ hitPosition,
+ normal,
+ sphere.material,
+ enteringPrimitive);
+ }
+ return getEmptyHit();
+}
+
+HitInfo intersectPlane(const Ray ray,const Plane plane, const float tMin, const float tMax) {
+#ifdef SOLUTION_CYLINDER_AND_PLANE
+ float d_n = dot(ray.direction, plane.normal);
+ if (d_n == 0.0) {
+ // plane parallel to ray
+ return getEmptyHit();
+ }
+ vec3 planePosition = plane.normal * plane.d;
+ float t = (dot(planePosition, plane.normal) - dot(ray.origin, plane.normal)) / d_n;
+
+ vec3 normal;
+ bool enteringPrimitive;
+ if (ray.origin.y > planePosition.y) { // if hitting from above
+ normal = plane.normal;
+ //enteringPrimitive = true;
+ } else {
+ normal = -plane.normal;
+ //enteringPrimitive = false;
+ }
+ if (t >= 0.0 && isTInInterval(t, tMin, tMax)) {
+ return HitInfo(
+ true,
+ t,
+ ray.origin + t * ray.direction,
+ normal,
+ plane.material,
+ false
+ );
+ }
+#endif
+ return getEmptyHit();
+}
+
+float lengthSquared(vec3 x) {
+ return dot(x, x);
+}
+
+HitInfo intersectCylinder(const Ray ray, const Cylinder cylinder, const float tMin, const float tMax) {
+#ifdef SOLUTION_CYLINDER_AND_PLANE
+ // pa = cylinder position
+ // va = cylinder direction
+ // r = radius
+ // p = ray position
+ // v = ray direction
+ // t = ray t
+
+ // a = (v-(v,va)va)^2
+ // b =2(v-(v,va)va, delta_p - (delta_p, va)va)
+ // c = (delta_p - (delta_p, va)va)^2 -r^2
+ // delta_p = p-pa
+
+ //struct Cylinder {
+ // vec3 position;
+ // vec3 direction;
+ // float radius;
+ // Material material;
+ //};
+
+ vec3 delta_p = ray.origin - cylinder.position;
+ float vva = dot(ray.direction, cylinder.direction);
+ vec3 vvvava = ray.direction - (vva * cylinder.direction);
+ float a = dot(vvvava, vvvava);
+ float b = 2.0 * dot(vvvava, delta_p - dot(delta_p, cylinder.direction) * cylinder.direction);
+
+ vec3 dpdpvava = delta_p - dot(delta_p, cylinder.direction) * cylinder.direction;
+ float c = dot(dpdpvava, dpdpvava) - pow(cylinder.radius, 2.0);
+
+ float determinant = (b * b) - (4.0 * a * c);
+ if (determinant > 0.0)
+ {
+ float t0 = (-b - sqrt(determinant)) / (2.0 * a);
+ float t1 = (-b + sqrt(determinant)) / (2.0 * a);
+
+ float smallestTInInterval;
+ if(!getSmallestTInInterval(t0, t1, tMin, tMax, smallestTInInterval)) {
+ return getEmptyHit();
+ }
+ vec3 hitPosition = ray.origin + smallestTInInterval * ray.direction;
+ vec3 hitPositionOnAxis = cylinder.position + dot((hitPosition-cylinder.position), cylinder.direction) * cylinder.direction;
+ vec3 axisToHitPosition = hitPosition - hitPositionOnAxis;
+ vec3 normal = normalize(axisToHitPosition);
+ bool enteringPrimitive = length(ray.origin - hitPositionOnAxis) >= (cylinder.radius + 0.001);
+
+ return HitInfo(
+ true,
+ smallestTInInterval,
+ hitPosition,
+ normal,
+ cylinder.material,
+ enteringPrimitive
+ );
+ }
+
+#endif
+ return getEmptyHit();
+}
+
+uniform float time;
+
+HitInfo intersectBlob(const Ray ray, const Blob blob, const float tMin, const float tMax) {
+#ifdef SOLUTION_BLOB
+#else
+ // Put your blob intersection code here!
+#endif
+ return getEmptyHit();
+}
+
+HitInfo getBetterHitInfo(const HitInfo oldHitInfo, const HitInfo newHitInfo) {
+ if(newHitInfo.hit)
+ if(newHitInfo.t < oldHitInfo.t) // No need to test for the interval, this has to be done per-primitive
+ return newHitInfo;
+ return oldHitInfo;
+}
+
+HitInfo intersectScene(const Scene scene, const Ray ray, const float tMin, const float tMax) {
+ HitInfo bestHitInfo;
+ bestHitInfo.t = tMax;
+ bestHitInfo.hit = false;
+
+ bool blobSolo = false;
+#ifdef SOLUTION_BLOB
+ blobSolo = true;
+#endif
+
+ if(blobSolo) {
+ bestHitInfo = getBetterHitInfo(bestHitInfo, intersectBlob(ray, scene.blob, tMin, tMax));
+ } else {
+ for (int i = 0; i < planeCount; ++i) {
+ bestHitInfo = getBetterHitInfo(bestHitInfo, intersectPlane(ray, scene.planes[i], tMin, tMax));
+ }
+
+ for (int i = 0; i < sphereCount; ++i) {
+ bestHitInfo = getBetterHitInfo(bestHitInfo, intersectSphere(ray, scene.spheres[i], tMin, tMax));
+ }
+
+ for (int i = 0; i < cylinderCount; ++i) {
+ bestHitInfo = getBetterHitInfo(bestHitInfo, intersectCylinder(ray, scene.cylinders[i], tMin, tMax));
+ }
+ }
+
+ return bestHitInfo;
+}
+
+vec3 shadeFromLight(
+ const Scene scene,
+ const Ray ray,
+ const HitInfo hit_info,
+ const PointLight light)
+{
+ vec3 hitToLight = light.position - hit_info.position;
+
+ vec3 lightDirection = normalize(hitToLight);
+ vec3 viewDirection = normalize(hit_info.position - ray.origin);
+ vec3 reflectedDirection = reflect(viewDirection, hit_info.normal);
+ float diffuse_term = max(0.0, dot(lightDirection, hit_info.normal));
+ float specular_term = pow(max(0.0, dot(lightDirection, reflectedDirection)), hit_info.material.glossiness);
+
+#ifdef SOLUTION_SHADOW
+ float visibility = 1.0;
+ if (hit_info.hit) {
+ Ray hitToLightRay;
+ hitToLightRay.origin = hit_info.position;
+ hitToLightRay.direction = lightDirection;
+ HitInfo hitToLightHitInfo = intersectScene(scene, hitToLightRay, 0.0001, length(hitToLight));
+ if (hitToLightHitInfo.hit) {
+ visibility = 0.0;
+ }
+ }
+#else
+ // Put your shadow test here
+ float visibility = 1.0;
+#endif
+
+ return visibility *
+ light.color * (
+ specular_term * hit_info.material.specular +
+ diffuse_term * hit_info.material.diffuse);
+}
+
+vec3 background(const Ray ray) {
+ // A simple implicit sky that can be used for the background
+ return vec3(0.2) + vec3(0.8, 0.6, 0.5) * max(0.0, ray.direction.y);
+}
+
+// It seems to be a WebGL issue that the third parameter needs to be inout instea dof const on Tobias' machine
+vec3 shade(const Scene scene, const Ray ray, inout HitInfo hitInfo) {
+
+ if(!hitInfo.hit) {
+ return background(ray);
+ }
+
+ //return vec3(0.5) * hitInfo.normal + vec3(0.5);
+
+ vec3 shading = scene.ambient * hitInfo.material.diffuse;
+ for (int i = 0; i < lightCount; ++i) {
+ shading += shadeFromLight(scene, ray, hitInfo, scene.lights[i]);
+ }
+ return shading;
+}
+
+
+Ray getFragCoordRay(const vec2 frag_coord) {
+ float sensorDistance = 1.0;
+ vec2 sensorMin = vec2(-1, -0.5);
+ vec2 sensorMax = vec2(1, 0.5);
+ vec2 pixelSize = (sensorMax- sensorMin) / vec2(viewport.x, viewport.y);
+ vec3 origin = vec3(0, 0, sensorDistance);
+ vec3 direction = normalize(vec3(sensorMin + pixelSize * frag_coord, -sensorDistance));
+
+ return Ray(origin, direction);
+}
+
+void reflect(const in Ray ray, const in HitInfo hitInfo, out vec3 reflectDirection) {
+ vec3 hitToRay = normalize(ray.origin - hitInfo.position);
+ reflectDirection = -hitToRay + 2.0 * max(0.0, dot(hitInfo.normal, hitToRay)) * hitInfo.normal;
+}
+
+void refract(const in Ray ray, const in HitInfo hitInfo, const in float sourceIOR, const in float destIOR,
+ out bool totalInternalReflection, out vec3 refractDirection) {
+ vec3 hitToRay = normalize(ray.origin - hitInfo.position);
+
+ float eta12 = sourceIOR / destIOR;
+ float alpha = acos(dot(hitToRay, hitInfo.normal));
+ float root = 1.0 + pow(eta12, 2.0) * (pow(cos(alpha), 2.0) - 1.0);
+
+ totalInternalReflection = root < 0.0;
+ refractDirection = (-eta12 * hitToRay) + (hitInfo.normal * (eta12 * cos(alpha) - sqrt(root)));
+}
+
+float fresnel(const in Ray ray, const in HitInfo hitInfo, const in float eta2, const in float eta1) {
+#ifdef SOLUTION_FRESNEL
+ // https://www.scratchapixel.com/lessons/3d-basic-rendering/introduction-to-shading/reflection-refraction-fresnel
+ bool totalInternalReflection;
+ vec3 refractDirection;
+ refract(ray, hitInfo, eta2, eta1, totalInternalReflection, refractDirection);
+ if (totalInternalReflection) {
+ return 1.0;
+ }
+
+ vec3 rayToHit = normalize(hitInfo.position - ray.origin);
+ float cos1 = dot(rayToHit, hitInfo.normal);
+ float cos2 = dot(refractDirection, hitInfo.normal);
+
+ float reflectS = pow(((eta2 * cos1 - eta1 * cos2) / (eta2 * cos1 + eta1 * cos2)), 2.0);
+ float reflectP = pow(((eta1 * cos2 - eta2 * cos1) / (eta1 * cos2 + eta2 * cos1)), 2.0);
+
+ float reflectance = (reflectS + reflectP) / 2.0;
+
+ return reflectance;
+#else
+ // Put your code to compute the Fresnel effect here
+ return 1.0;
+#endif
+}
+
+vec3 colorForFragment(const Scene scene, const vec2 fragCoord) {
+
+ Ray initialRay = getFragCoordRay(fragCoord);
+ HitInfo initialHitInfo = intersectScene(scene, initialRay, 0.001, 10000.0);
+ vec3 result = shade(scene, initialRay, initialHitInfo);
+
+ Ray currentRay;
+ HitInfo currentHitInfo;
+
+ // Compute the reflection
+ currentRay = initialRay;
+ currentHitInfo = initialHitInfo;
+
+ // The initial strength of the reflection
+ float reflectionWeight = 1.0;
+
+ float currentIOR = 1.0;
+
+ const int maxReflectionStepCount = 2;
+ for(int i = 0; i < maxReflectionStepCount; i++) {
+
+ if(!currentHitInfo.hit) break;
+
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ reflectionWeight *= currentHitInfo.material.reflectionFactor;
+#else
+#endif
+
+#ifdef SOLUTION_FRESNEL
+ float reflectRate = fresnel(currentRay, currentHitInfo, currentIOR, currentHitInfo.material.refractionIndex);
+ reflectionWeight *= reflectRate;
+#else
+ // Replace with Fresnel code
+#endif
+
+ Ray nextRay;
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ nextRay.origin = currentHitInfo.position;
+ reflect(currentRay, currentHitInfo, nextRay.direction);
+ currentIOR = currentHitInfo.material.refractionIndex;
+#else
+#endif
+ currentRay = nextRay;
+
+ currentHitInfo = intersectScene(scene, currentRay, 0.001, 10000.0);
+
+ result += reflectionWeight * shade(scene, currentRay, currentHitInfo);
+ }
+
+ // Compute the refraction
+ currentRay = initialRay;
+ currentHitInfo = initialHitInfo;
+
+ // The initial medium is air
+ currentIOR = 1.0;
+
+ // The initial strength of the refraction.
+ float refractionWeight = 1.0;
+
+ const int maxRefractionStepCount = 2;
+ for(int i = 0; i < maxRefractionStepCount; i++) {
+
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ refractionWeight *= currentHitInfo.material.refractionFactor;
+#else
+ // Replace with Fresnel code
+#endif
+
+#ifdef SOLUTION_FRESNEL
+ float reflectRate = fresnel(currentRay, currentHitInfo, currentIOR, currentHitInfo.material.refractionIndex);
+ refractionWeight *= (1.0 - reflectRate);
+#else
+ // Put Fresnel code here
+#endif
+
+ Ray nextRay;
+
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ float sourceIOR = currentIOR;
+ float destIOR = currentHitInfo.enteringPrimitive ? currentHitInfo.material.refractionIndex : 1.0;
+
+ nextRay.origin = currentHitInfo.position;
+
+ bool totalInternalRefraction;
+ refract(currentRay, currentHitInfo, sourceIOR, destIOR, totalInternalRefraction, nextRay.direction);
+ if (totalInternalRefraction) {
+ break;
+ }
+
+ currentRay = nextRay;
+ currentIOR = destIOR;
+#else
+ // Put your code to compute the reflection ray and track the IOR
+#endif
+ currentHitInfo = intersectScene(scene, currentRay, 0.001, 10000.0);
+
+ result += refractionWeight * shade(scene, currentRay, currentHitInfo);
+
+ if(!currentHitInfo.hit) break;
+ }
+ return result;
+}
+
+
+Material getDefaultMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.3), vec3(0), 0.0, 0.0, 0.0, 0.0); // default reflection and refrative index = 1
+#else
+ return Material(vec3(0.3), vec3(0), 0.0);
+#endif
+}
+
+Material getPaperMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.7, 0.7, 0.7),
+ vec3(0, 0, 0),
+ 5.0,
+ 0.0,
+ 0.0, // https://www.engineeringtoolbox.com/light-material-reflecting-factor-d_1842.html
+ 0.0 // https://www.spiedigitallibrary.org/conference-proceedings-of-spie/6053/60530X/Determination-of-the-refractive-index-of-paper-with-clearing-agents/10.1117/12.660416.short
+ );
+#else
+ return Material(vec3(0.7, 0.7, 0.7), vec3(0, 0, 0), 5.0);
+#endif
+}
+
+Material getPlasticMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.9, 0.3, 0.1),
+ vec3(1.0),
+ 10.0,
+ 0.5, // lowered to make it closer to cwk pdf https://www.3rxing.org/question/91b899a2cd644117448.html#:~:text=%E4%BA%BA%E7%9A%84%E6%89%8B%E6%8E%8C%E5%BF%83%20%20%20%20%20%20%20%20%20%20%20%2075%25-,%E4%B8%8D%E9%80%8F%E6%98%8E%E7%99%BD%E8%89%B2%E5%A1%91%E6%96%99%20%20%20%20%20%20%20%2087%25,-%E7%99%BD%E7%94%BB%E7%BA%B8
+ 1.45, // https://www.addoptics.nl/optics-explained/refractive-index-of-plastic/#:~:text=Most%20plastics%20have%20a%20refractive,range%20of%201.3%20to%201.6.
+ 0.0
+ );
+#else
+ return Material(vec3(0.9, 0.3, 0.1), vec3(1.0), 10.0);
+#endif
+}
+
+Material getGlassMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.0),
+ vec3(0.0),
+ 5.0,
+ // http://www1.udel.edu/chem/sneal/sln_tchng/CHEM620/CHEM620/Chi_4._Light_at_Interfaces.html#:~:text=For%20glass%20and%20air%2C%20which,the%20reflection
+ 0.96,
+ 1.5,
+ 1.0
+ );
+#else
+ return Material(vec3(0.0), vec3(0.0), 5.0);
+#endif
+}
+
+Material getSteelMirrorMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.1),
+ vec3(0.3),
+ 20.0,
+ 0.5, // https://www.engineeringtoolbox.com/light-material-reflecting-factor-d_1842.html
+ 3.3, // https://www.filmetrics.com/refractive-index-database/Stainless-Steel#:~:text=For%20a%20typical%20sample%20of,nm%20are%202.75681%20and%203.792016.
+ 0.0
+ );
+#else
+ return Material(vec3(0.1), vec3(0.3), 20.0);
+#endif
+}
+
+Material getMetaMaterial() {
+#ifdef SOLUTION_REFLECTION_REFRACTION
+ return Material(vec3(0.1, 0.2, 0.5), vec3(0.3, 0.7, 0.9), 20.0, 0.0, 0.0, 0.0); // TODO: what is meta material
+#else
+ return Material(vec3(0.1, 0.2, 0.5), vec3(0.3, 0.7, 0.9), 20.0);
+#endif
+}
+
+vec3 tonemap(const vec3 radiance) {
+ const float monitorGamma = 2.0;
+ return pow(radiance, vec3(1.0 / monitorGamma));
+}
+
+void main() {
+ // Setup scene
+ Scene scene;
+ scene.ambient = vec3(0.12, 0.15, 0.2);
+
+ // Lights
+ scene.lights[0].position = vec3(5, 15, -5);
+ scene.lights[0].color = 0.5 * vec3(0.9, 0.5, 0.1);
+
+ scene.lights[1].position = vec3(-15, 5, 2);
+ scene.lights[1].color = 0.5 * vec3(0.1, 0.3, 1.0);
+
+ // Primitives
+ scene.spheres[0].position = vec3(10, -5, -16);
+ scene.spheres[0].radius = 6.0;
+ scene.spheres[0].material = getPaperMaterial();
+
+ scene.spheres[1].position = vec3(-7, -2, -13);
+ scene.spheres[1].radius = 4.0;
+ scene.spheres[1].material = getPlasticMaterial();
+
+ scene.spheres[2].position = vec3(0, 0.5, -5);
+ scene.spheres[2].radius = 2.0;
+ scene.spheres[2].material = getGlassMaterial();
+
+ scene.planes[0].normal = normalize(vec3(0, 0.8, 0));
+ scene.planes[0].d = -4.5;
+ scene.planes[0].material = getSteelMirrorMaterial();
+
+ scene.cylinders[0].position = vec3(-1, 1, -26);
+ scene.cylinders[0].direction = normalize(vec3(-2, 2, -1));
+ scene.cylinders[0].radius = 1.5;
+ scene.cylinders[0].material = getPaperMaterial();
+
+ scene.cylinders[1].position = vec3(4, 1, -5);
+ scene.cylinders[1].direction = normalize(vec3(1, 4, 1));
+ scene.cylinders[1].radius = 0.4;
+ scene.cylinders[1].material = getPlasticMaterial();
+
+ scene.blob.isoValue = 0.3;
+ scene.blob.material = getMetaMaterial();
+
+ const bool animateBlobs = false;
+ if(animateBlobs) {
+ scene.blob.spherePositions[0] = vec3(+3.0 + sin(time * 3.0) * 3.0, -1, -12);
+ scene.blob.spherePositions[1] = vec3(-1, +3, -12.0 + 3.0 * sin(time));
+ scene.blob.spherePositions[2] = vec3(-3, -1.0 + 3.0 * cos(2.0 * time), -9);
+ } else {
+ scene.blob.spherePositions[0] = vec3(+3, -0, -11);
+ scene.blob.spherePositions[1] = vec3(-1, +4, -12);
+ scene.blob.spherePositions[2] = vec3(-2, -2, -9);
+ }
+
+ // compute color for fragment
+ gl_FragColor.rgb = tonemap(colorForFragment(scene, gl_FragCoord.xy));
+ gl_FragColor.a = 1.0;
+
+}
diff --git a/src/public/shaders/cwk1/vertexShader.glsl b/src/public/shaders/cwk1/vertexShader.glsl
new file mode 100644
index 0000000..5d2afdb
--- /dev/null
+++ b/src/public/shaders/cwk1/vertexShader.glsl
@@ -0,0 +1,7 @@
+attribute vec3 position;
+uniform mat4 modelViewMatrix;
+uniform mat4 projectionMatrix;
+
+void main(void) {
+ gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
+}
diff --git a/src/public/shaders/cwk2/fragmentShader.glsl b/src/public/shaders/cwk2/fragmentShader.glsl
new file mode 100644
index 0000000..3e30cd8
--- /dev/null
+++ b/src/public/shaders/cwk2/fragmentShader.glsl
@@ -0,0 +1,558 @@
+// #define SOLUTION_PROJECTION
+// #define SOLUTION_RASTERIZATION
+// #define SOLUTION_CLIPPING
+// #define SOLUTION_INTERPOLATION
+// #define SOLUTION_ZBUFFERING
+// #define SOLUTION_AALIAS
+
+precision highp float;
+uniform float time;
+
+// Polygon / vertex functionality
+const int MAX_VERTEX_COUNT = 8;
+
+uniform ivec2 viewport;
+
+struct Vertex {
+ vec4 position;
+ vec3 color;
+};
+
+struct Polygon {
+ // Numbers of vertices, i.e., points in the polygon
+ int vertexCount;
+ // The vertices themselves
+ Vertex vertices[MAX_VERTEX_COUNT];
+};
+
+// Appends a vertex to a polygon
+void appendVertexToPolygon(inout Polygon polygon, Vertex element) {
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i == polygon.vertexCount) {
+ polygon.vertices[i] = element;
+ }
+ }
+ polygon.vertexCount++;
+}
+
+// Copy Polygon source to Polygon destination
+void copyPolygon(inout Polygon destination, Polygon source) {
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ destination.vertices[i] = source.vertices[i];
+ }
+ destination.vertexCount = source.vertexCount;
+}
+
+// Get the i-th vertex from a polygon, but when asking for the one behind the last, get the first again
+Vertex getWrappedPolygonVertex(Polygon polygon, int index) {
+ if (index >= polygon.vertexCount) index -= polygon.vertexCount;
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i == index) return polygon.vertices[i];
+ }
+}
+
+// Creates an empty polygon
+void makeEmptyPolygon(out Polygon polygon) {
+ polygon.vertexCount = 0;
+}
+
+
+// SOLUTION_RASTERIZATION and culling part
+
+#define INNER_SIDE 0
+#define OUTER_SIDE 1
+
+// Assuming a clockwise (vertex-wise) polygon, returns whether the input point
+// is on the inner or outer side of the edge (ab)
+int edge(vec2 point, Vertex a, Vertex b) {
+#ifdef SOLUTION_RASTERIZATION
+ // TODO
+
+ // assumed vertex a is the starting point
+ // we have a -> b and a -> point
+ // a
+ // / // / -> // b point
+ // if z is pointing out of the screen: < 0 then it is shown above
+ // else point is at left of the b which means it is outside
+ vec2 ab = b.position.xy - a.position.xy;
+ vec2 ap = point.xy - a.position.xy;
+ float z = ab.x * ap.y - ab.y * ap.x; // z component of the cross product
+ if (z < 0.0) {
+ return INNER_SIDE;
+ }
+
+#endif
+ return OUTER_SIDE;
+}
+
+// Clipping part
+
+#define ENTERING 0
+#define LEAVING 1
+#define OUTSIDE 2
+#define INSIDE 3
+
+int getCrossType(Vertex poli1, Vertex poli2, Vertex wind1, Vertex wind2) {
+#ifdef SOLUTION_CLIPPING
+ // TODO
+
+ // check the position of both vertex poli1 -> poli2 with respect to the side defined by wind1 -> wind2
+ int side1 = edge(poli1.position.xy, wind1, wind2);
+ int side2 = edge(poli2.position.xy, wind1, wind2);
+
+ if (side1 == INNER_SIDE && side2 == INNER_SIDE) {
+ return INSIDE;
+ } else if (side1 == OUTER_SIDE && side2 == OUTER_SIDE) {
+ return OUTSIDE;
+ } else if (side1 == INNER_SIDE && side2 == OUTER_SIDE) {
+ return LEAVING;
+ } else {
+ return ENTERING;
+ }
+
+#else
+ return INSIDE;
+#endif
+}
+
+// This function assumes that the segments are not parallel or collinear.
+Vertex intersect2D(Vertex a, Vertex b, Vertex c, Vertex d) {
+#ifdef SOLUTION_CLIPPING
+ // TODO
+
+ // line equation 1
+ float m1 = (b.position.y - a.position.y) / (b.position.x - a.position.x);
+ float c1 = b.position.y - (m1 * b.position.x);
+
+ // line equation 2
+ float m2 = (d.position.y - c.position.y) / (d.position.x - c.position.x);
+ float c2 = d.position.y - (m2 * d.position.x);
+
+ // intersection
+ float intersect_x = (c2 - c1) / (m1 - m2);
+ float intersect_y = m1 * intersect_x + c1;
+
+ // calculate the z depth
+ float s = length(a.position.xy - b.position.xy);
+ float z1 = a.position.z;
+ float z2 = b.position.z;
+ float zt = 1.0 / ((1.0/z1) + s * ((1.0/z2) - (1.0/z1)));
+
+ return Vertex(vec4(intersect_x, intersect_y, zt, 1), a.color);
+
+#else
+ return a;
+#endif
+}
+
+void sutherlandHodgmanClip(Polygon unclipped, Polygon clipWindow, out Polygon result) {
+ Polygon clipped;
+ copyPolygon(clipped, unclipped);
+
+ // Loop over the clip window
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i >= clipWindow.vertexCount) break;
+
+ // Make a temporary copy of the current clipped polygon
+ Polygon oldClipped;
+ copyPolygon(oldClipped, clipped);
+
+ // Set the clipped polygon to be empty
+ makeEmptyPolygon(clipped);
+
+ // Loop over the current clipped polygon
+ for (int j = 0; j < MAX_VERTEX_COUNT; ++j) {
+ if (j >= oldClipped.vertexCount) break;
+
+ // Handle the j-th vertex of the clipped polygon. This should make use of the function
+ // intersect() to be implemented above.
+
+#ifdef SOLUTION_CLIPPING
+ // TODO
+ Vertex wind1 = getWrappedPolygonVertex(clipWindow, i);
+ Vertex wind2 = getWrappedPolygonVertex(clipWindow, i+1);
+ Vertex poli1 = getWrappedPolygonVertex(oldClipped, j);
+ Vertex poli2 = getWrappedPolygonVertex(oldClipped, j+1);
+
+ int crossType = getCrossType(poli1, poli2, wind1, wind2);
+
+ // handle the poli1 and intersection but not poli2, as it will be handled in the next iteration
+ if (crossType == INSIDE) {
+ appendVertexToPolygon(clipped, getWrappedPolygonVertex(oldClipped, j));
+ } else if (crossType == LEAVING) {
+ appendVertexToPolygon(clipped, getWrappedPolygonVertex(oldClipped, j));
+ appendVertexToPolygon(clipped, intersect2D(poli1, poli2, wind1, wind2));
+ } else if (crossType == ENTERING) {
+ appendVertexToPolygon(clipped, intersect2D(poli1, poli2, wind1, wind2));
+ }
+#else
+ appendVertexToPolygon(clipped, getWrappedPolygonVertex(oldClipped, j));
+#endif
+ }
+ }
+
+ // Copy the last version to the output
+ copyPolygon(result, clipped);
+}
+
+
+
+// Returns if a point is inside a polygon or not
+bool isPointInPolygon(vec2 point, Polygon polygon) {
+ // Don't evaluate empty polygons
+ if (polygon.vertexCount == 0) return false;
+ // Check against each edge of the polygon
+ bool rasterise = true;
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i < polygon.vertexCount) {
+#ifdef SOLUTION_RASTERIZATION
+ // TODO
+ Vertex a = getWrappedPolygonVertex(polygon, i);
+ Vertex b = getWrappedPolygonVertex(polygon, i+1);
+ // if any edge rejected, return false immediately
+ if (edge(point, a, b) == OUTER_SIDE) {
+ return false;
+ }
+#else
+ rasterise = false;
+#endif
+ }
+ }
+ return rasterise;
+}
+
+bool isPointOnPolygonVertex(vec2 point, Polygon polygon) {
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i < polygon.vertexCount) {
+ ivec2 pixelDifference = ivec2(abs(polygon.vertices[i].position.xy - point) * vec2(viewport));
+ int pointSize = viewport.x / 200;
+ if( pixelDifference.x <= pointSize && pixelDifference.y <= pointSize) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+float triangleArea(vec2 a, vec2 b, vec2 c) {
+ // https://en.wikipedia.org/wiki/Heron%27s_formula
+ float ab = length(a - b);
+ float bc = length(b - c);
+ float ca = length(c - a);
+ float s = (ab + bc + ca) / 2.0;
+ return sqrt(max(0.0, s * (s - ab) * (s - bc) * (s - ca)));
+}
+
+Vertex interpolateVertex(vec2 point, Polygon polygon) {
+ vec3 colorSum = vec3(0.0);
+ vec4 positionSum = vec4(0.0);
+ float weight_sum = 0.0;
+ float weight_corr_sum = 0.0;
+
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i < polygon.vertexCount) {
+#if defined(SOLUTION_INTERPOLATION) || defined(SOLUTION_ZBUFFERING)
+ // TODO
+ Vertex v = getWrappedPolygonVertex(polygon, i);
+ Vertex v1 = getWrappedPolygonVertex(polygon, i+1);
+ Vertex v2 = getWrappedPolygonVertex(polygon, i+2);
+ float weight = triangleArea(point, v1.position.xy, v2.position.xy);
+ float area = triangleArea(v.position.xy, v1.position.xy, v2.position.xy);
+ weight_sum += weight;
+ weight_corr_sum += weight / v.position.z;
+#endif
+
+#ifdef SOLUTION_ZBUFFERING
+ // TODO
+
+ // without divide is closer to pdf result, but should probably be used
+ //positionSum += v.position * weight / v.position.z;
+ positionSum += v.position * weight;
+#endif
+
+#ifdef SOLUTION_INTERPOLATION
+ // TODO
+ colorSum += v.color * weight / v.position.z;
+#endif
+ }
+ }
+ Vertex result = polygon.vertices[0];
+
+#ifdef SOLUTION_INTERPOLATION
+ // TODO
+ result.color = colorSum / weight_corr_sum;
+#endif
+
+#ifdef SOLUTION_ZBUFFERING
+ // TODO
+
+ // weight_sum is closer to pdf result, but weight_corr_sum should probably be used
+ result.position = positionSum / weight_sum;
+#endif
+
+ return result;
+}
+
+// Projection part
+
+// Used to generate a projection matrix.
+mat4 computeProjectionMatrix() {
+ mat4 projectionMatrix = mat4(1);
+
+ float aspect = float(viewport.x) / float(viewport.y);
+ float imageDistance = 2.0;
+
+ float xMin = -0.5;
+ float yMin = -0.5;
+ float xMax = 0.5;
+ float yMax = 0.5;
+
+#ifdef SOLUTION_PROJECTION
+
+ float w = xMax - xMin;
+ float h = yMax - yMin;
+ float x = xMax + xMin;
+ float y = yMax + yMin;
+ float d = imageDistance;
+
+ // scale to match aspect
+ w *= aspect;
+
+ // matrix below are tranposed as glsl is column major
+
+ // step 1 & 2
+ mat4 translate = mat4(
+ 1, 0, 0, -x,
+ 0, 1, 0, -y,
+ 0, 0, 1, -d,
+ 0, 0, 0, 1
+ );
+
+ // step 3
+ mat4 sheer = mat4(
+ d/w, 0,-x/w, 0,
+ 0, d/h,-y/h, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1
+ );
+
+ // step 4
+ mat4 scale = mat4(
+ 1.0/d, 0, 0, 0,
+ 0, 1.0/d, 0, 0,
+ 0, 0, 1.0/d, 0,
+ 0, 0, 0, 1
+ );
+
+ // perspective
+ mat4 perspective = mat4(
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0,1.0/d, 1
+ );
+
+ // apply from left to right
+ projectionMatrix = translate * sheer * scale * perspective;
+#endif
+
+ return projectionMatrix;
+}
+
+// Used to generate a simple "look-at" camera.
+mat4 computeViewMatrix(vec3 VRP, vec3 TP, vec3 VUV) {
+ mat4 viewMatrix = mat4(1);
+
+#ifdef SOLUTION_PROJECTION
+ // TODO
+
+ // find the "look at" vector
+ vec3 VPN = TP - VRP;
+
+ // compute n, u, v that define the camera position
+ vec3 n = normalize(VPN);
+ vec3 u = normalize(cross(VUV, n));
+ vec3 v = cross(n, u);
+ float qu = dot(VRP, u);
+ float qv = dot(VRP, v);
+ float qn = dot(VRP, n);
+
+ viewMatrix = mat4(
+ u[0], u[1], u[2],-qu,
+ v[0], v[1], v[2],-qv,
+ n[0], n[1], n[2],-qn,
+ 0, 0, 0, 1
+ );
+
+#endif
+ return viewMatrix;
+}
+
+vec3 getCameraPosition() {
+ //return 10.0 * vec3(sin(time * 1.3), 0, cos(time * 1.3));
+ return 10.0 * vec3(sin(0.0), 0, cos(0.0));
+}
+
+// Takes a single input vertex and projects it using the input view and projection matrices
+vec4 projectVertexPosition(vec4 position) {
+
+ // Set the parameters for the look-at camera.
+ vec3 TP = vec3(0, 0, 0);
+ vec3 VRP = getCameraPosition();
+ vec3 VUV = vec3(0, 1, 0);
+
+ // Compute the view matrix.
+ mat4 viewMatrix = computeViewMatrix(VRP, TP, VUV);
+
+ // Compute the projection matrix.
+ mat4 projectionMatrix = computeProjectionMatrix();
+
+#ifdef SOLUTION_PROJECTION
+ // TODO
+
+ // view first then projection
+ position = position * viewMatrix * projectionMatrix;
+
+ // perspective division
+ position = position / position.w;
+
+ return position;
+#else
+ return position;
+#endif
+}
+
+// Projects all the vertices of a polygon
+void projectPolygon(inout Polygon projectedPolygon, Polygon polygon) {
+ copyPolygon(projectedPolygon, polygon);
+ for (int i = 0; i < MAX_VERTEX_COUNT; ++i) {
+ if (i < polygon.vertexCount) {
+ projectedPolygon.vertices[i].position = projectVertexPosition(polygon.vertices[i].position);
+ }
+ }
+}
+
+// Draws a polygon by projecting, clipping, ratserizing and interpolating it
+void drawPolygon(
+ vec2 point,
+ Polygon clipWindow,
+ Polygon oldPolygon,
+ inout vec3 color,
+ inout float depth)
+{
+ Polygon projectedPolygon;
+ projectPolygon(projectedPolygon, oldPolygon);
+
+ Polygon clippedPolygon;
+ sutherlandHodgmanClip(projectedPolygon, clipWindow, clippedPolygon);
+
+ if (isPointInPolygon(point, clippedPolygon)) {
+
+ Vertex interpolatedVertex = interpolateVertex(point, projectedPolygon);
+
+#if defined(SOLUTION_ZBUFFERING)
+ // TODO: Put your code to handle z buffering here
+ if (depth > interpolatedVertex.position.z) {
+ // we found a interpolatedVertex nearer to the screen
+ color = interpolatedVertex.color;
+ depth = interpolatedVertex.position.z;
+ }
+#else
+ color = interpolatedVertex.color;
+ depth = interpolatedVertex.position.z;
+#endif
+ }
+
+ if (isPointOnPolygonVertex(point, clippedPolygon)) {
+ color = vec3(1);
+ }
+}
+
+// Main function calls
+
+void drawScene(vec2 pixelCoord, inout vec3 color) {
+ color = vec3(0.3, 0.3, 0.3);
+
+ // Convert from GL pixel coordinates 0..N-1 to our screen coordinates -1..1
+ vec2 point = 2.0 * pixelCoord / vec2(viewport) - vec2(1.0);
+
+ Polygon clipWindow;
+ clipWindow.vertices[0].position = vec4(-0.65, 0.95, 1.0, 1.0);
+ clipWindow.vertices[1].position = vec4( 0.65, 0.75, 1.0, 1.0);
+ clipWindow.vertices[2].position = vec4( 0.75, -0.65, 1.0, 1.0);
+ clipWindow.vertices[3].position = vec4(-0.75, -0.85, 1.0, 1.0);
+ clipWindow.vertexCount = 4;
+
+ // Draw the area outside the clip region to be dark
+ color = isPointInPolygon(point, clipWindow) ? vec3(0.5) : color;
+
+ const int triangleCount = 2;
+ Polygon triangles[triangleCount];
+
+ triangles[0].vertices[0].position = vec4(-3, -2, 0.0, 1.0);
+ triangles[0].vertices[1].position = vec4(4, 0, 3.0, 1.0);
+ triangles[0].vertices[2].position = vec4(-1, 2, 0.0, 1.0);
+ triangles[0].vertices[0].color = vec3(1.0, 1.0, 0.2);
+ triangles[0].vertices[1].color = vec3(0.8, 0.8, 0.8);
+ triangles[0].vertices[2].color = vec3(0.5, 0.2, 0.5);
+ triangles[0].vertexCount = 3;
+
+ triangles[1].vertices[0].position = vec4(3.0, 2.0, -2.0, 1.0);
+ triangles[1].vertices[2].position = vec4(0.0, -2.0, 3.0, 1.0);
+ triangles[1].vertices[1].position = vec4(-1.0, 2.0, 4.0, 1.0);
+ triangles[1].vertices[1].color = vec3(0.2, 1.0, 0.1);
+ triangles[1].vertices[2].color = vec3(1.0, 1.0, 1.0);
+ triangles[1].vertices[0].color = vec3(0.1, 0.2, 1.0);
+ triangles[1].vertexCount = 3;
+
+ float depth = 10000.0;
+ // Project and draw all the triangles
+ for (int i = 0; i < triangleCount; i++) {
+ drawPolygon(point, clipWindow, triangles[i], color, depth);
+ }
+}
+
+vec3 drawOffsetedPixel(in float xOffset, in float yOffset) {
+ vec2 coord = gl_FragCoord.xy;
+ coord.x += xOffset;
+ coord.y += yOffset;
+ vec3 color;
+ drawScene(coord, color);
+ return color;
+}
+
+void main() {
+
+ vec3 color = vec3(0);
+
+#ifdef SOLUTION_AALIAS
+
+ // This is the "sample rate" for each side
+ // as it need to be a square. Use this to trade quality and speed.
+ // samplesPerSide = 1: no antialias
+ // samplesPerSide < 1: undefined
+ // samplesPerSide > 1: antialias that control the samples per side, so the actual number of samples is samplesPerSide * samplesPerSide
+ const int samplesPerSide = 3;
+
+ // use the following code for averging the neighboring pixels
+ float stepSize = 1.0 / float(samplesPerSide);
+
+ vec3 colorSum = vec3(0);
+ for (int i = 0; i < samplesPerSide; i++) {
+ float xLocal = float(i) * stepSize + stepSize / 2.0;
+ for (int j = 0; j < samplesPerSide; j++) {
+ float yLocal = float(j) * stepSize + stepSize / 2.0;
+ colorSum += drawOffsetedPixel(xLocal-0.5, yLocal-0.5);
+ }
+ }
+
+ color = colorSum / (float(samplesPerSide) * float(samplesPerSide));
+
+
+#else
+ drawScene(gl_FragCoord.xy, color);
+#endif
+
+ gl_FragColor.rgb = color;
+ gl_FragColor.a = 1.0;
+}
\ No newline at end of file
diff --git a/src/public/shaders/cwk2/vertexShader.glsl b/src/public/shaders/cwk2/vertexShader.glsl
new file mode 100644
index 0000000..fbf9c48
--- /dev/null
+++ b/src/public/shaders/cwk2/vertexShader.glsl
@@ -0,0 +1,8 @@
+attribute vec3 position;
+
+uniform mat4 modelViewMatrix;
+uniform mat4 projectionMatrix;
+
+void main(void) {
+ gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
+}
\ No newline at end of file
diff --git a/src/public/shaders/cwk3/fragmentShader.glsl b/src/public/shaders/cwk3/fragmentShader.glsl
new file mode 100644
index 0000000..92003d1
--- /dev/null
+++ b/src/public/shaders/cwk3/fragmentShader.glsl
@@ -0,0 +1,904 @@
+// #define SOLUTION_LIGHT
+// #define SOLUTION_BOUNCE
+// #define SOLUTION_THROUGHPUT
+// #define SOLUTION_HALTON
+// #define SOLUTION_AA
+// #define SOLUTION_IS
+
+// this is a flag to turn on unit test
+// the pixels on screen should converge to (0.5, 0.5, 0.5), which is a gray color
+// this is because I converted direction to color by changing the range of values from (-1, 1) to (0, 1), so that we can see
+// the convergence more smoothly (because color has no negative value, but direction has) and therefore the expectation becomes 0.5,
+// this also test if the direction has volume of 1.0, which made use of the lengthSquare method (the third method), if this fails the pixel color will be black.
+// It is very ambiguous in the pdf what test it is referring to, so I implemented the above two tests.
+// btw unit test is meant to be deterministic, self-contained, quickly executed and can be automated, which fails to satisify here.
+//#define TEST_RANDOM_DIRECTION
+
+// for enabling direct light sampling (light intensity/emissiveness weighted in pdf) importance sampling
+// this method is extended to multiple light sources and verified if spheres has very different positions,
+// you can use the flags below to test it.
+// #define LIGHT_INTENSITY_WEIGHTED
+
+// uncomment to return the direction of the first iteration sample instead.
+// use this to test if the direct light sampling importance sampling is really doing the correct thing
+// some color references:
+// red: left
+// green: upwards
+// blue: backwards
+// black: downwards
+//#define LIGHT_DIRECTION_TEST
+
+// uncomment to change the position to verify implementation,
+// this swap the y-axis of the two lights
+//#define CHANGE_LIGHT_POSITION
+
+// uncomment change the intensity to verify implementation
+// this reduce the intensity of the small ball (left ball),
+// a obvious observation will be it converge to black slower when LIGHT_DIRECTION_TEST is also turned on
+//#define CHANGE_LIGHT_INTENSITY
+
+precision highp float;
+
+#define M_PI 3.1415
+
+struct Material {
+#ifdef SOLUTION_LIGHT
+ float intensity;
+ vec3 emissiveness;
+#endif
+ vec3 diffuse;
+ vec3 specular;
+ float glossiness;
+};
+
+struct Sphere {
+ vec3 position;
+ float radius;
+ Material material;
+};
+
+struct Plane {
+ vec3 normal;
+ float d;
+ Material material;
+};
+
+const int sphereCount = 4;
+const int planeCount = 4;
+const int emittingSphereCount = 2;
+#ifdef SOLUTION_BOUNCE
+const int maxPathLength = 3;
+#else
+const int maxPathLength = 1;
+#endif
+
+struct Scene {
+ Sphere[sphereCount] spheres;
+ Plane[planeCount] planes;
+};
+
+struct Ray {
+ vec3 origin;
+ vec3 direction;
+};
+
+// Contains all information pertaining to a ray/object intersection
+struct HitInfo {
+ bool hit;
+ float t;
+ vec3 position;
+ vec3 normal;
+ Material material;
+};
+
+// Contains info to sample a direction and this directions probability
+struct DirectionSample {
+ vec3 direction;
+ float probability;
+};
+
+HitInfo getEmptyHit() {
+ Material emptyMaterial;
+#ifdef SOLUTION_LIGHT
+ emptyMaterial.emissiveness = vec3(0.0);
+#endif
+ emptyMaterial.diffuse = vec3(0.0);
+ emptyMaterial.specular = vec3(0.0);
+ emptyMaterial.glossiness = 0.0;
+ return HitInfo(false, 0.0, vec3(0.0), vec3(0.0), emptyMaterial);
+}
+
+// Sorts the two t values such that t1 is smaller than t2
+void sortT(inout float t1, inout float t2) {
+ // Make t1 the smaller t
+ if(t2 < t1) {
+ float temp = t1;
+ t1 = t2;
+ t2 = temp;
+ }
+}
+
+// Tests if t is in an interval
+bool isTInInterval(const float t, const float tMin, const float tMax) {
+ return t > tMin && t < tMax;
+}
+
+// Get the smallest t in an interval
+bool getSmallestTInInterval(float t0, float t1, const float tMin, const float tMax, inout float smallestTInInterval) {
+
+ sortT(t0, t1);
+
+ // As t0 is smaller, test this first
+ if(isTInInterval(t0, tMin, tMax)) {
+ smallestTInInterval = t0;
+ return true;
+ }
+
+ // If t0 was not in the interval, still t1 could be
+ if(isTInInterval(t1, tMin, tMax)) {
+ smallestTInInterval = t1;
+ return true;
+ }
+
+ // None was
+ return false;
+}
+
+HitInfo intersectSphere(const Ray ray, const Sphere sphere, const float tMin, const float tMax) {
+
+ vec3 to_sphere = ray.origin - sphere.position;
+
+ float a = dot(ray.direction, ray.direction);
+ float b = 2.0 * dot(ray.direction, to_sphere);
+ float c = dot(to_sphere, to_sphere) - sphere.radius * sphere.radius;
+ float D = b * b - 4.0 * a * c;
+ if (D > 0.0)
+ {
+ float t0 = (-b - sqrt(D)) / (2.0 * a);
+ float t1 = (-b + sqrt(D)) / (2.0 * a);
+
+ float smallestTInInterval;
+ if(!getSmallestTInInterval(t0, t1, tMin, tMax, smallestTInInterval)) {
+ return getEmptyHit();
+ }
+
+ vec3 hitPosition = ray.origin + smallestTInInterval * ray.direction;
+
+ vec3 normal =
+ length(ray.origin - sphere.position) < sphere.radius + 0.001?
+ -normalize(hitPosition - sphere.position) :
+ normalize(hitPosition - sphere.position);
+
+ return HitInfo(
+ true,
+ smallestTInInterval,
+ hitPosition,
+ normal,
+ sphere.material);
+ }
+ return getEmptyHit();
+}
+
+HitInfo intersectPlane(Ray ray, Plane plane) {
+ float t = -(dot(ray.origin, plane.normal) + plane.d) / dot(ray.direction, plane.normal);
+ vec3 hitPosition = ray.origin + t * ray.direction;
+ return HitInfo(
+ true,
+ t,
+ hitPosition,
+ normalize(plane.normal),
+ plane.material);
+ return getEmptyHit();
+}
+
+float lengthSquared(const vec3 x) {
+ return dot(x, x);
+}
+
+HitInfo intersectScene(Scene scene, Ray ray, const float tMin, const float tMax)
+{
+ HitInfo best_hit_info;
+ best_hit_info.t = tMax;
+ best_hit_info.hit = false;
+
+ for (int i = 0; i < sphereCount; ++i) {
+ Sphere sphere = scene.spheres[i];
+ HitInfo hit_info = intersectSphere(ray, sphere, tMin, tMax);
+
+ if( hit_info.hit &&
+ hit_info.t < best_hit_info.t &&
+ hit_info.t > tMin)
+ {
+ best_hit_info = hit_info;
+ }
+ }
+
+ for (int i = 0; i < planeCount; ++i) {
+ Plane plane = scene.planes[i];
+ HitInfo hit_info = intersectPlane(ray, plane);
+
+ if( hit_info.hit &&
+ hit_info.t < best_hit_info.t &&
+ hit_info.t > tMin)
+ {
+ best_hit_info = hit_info;
+ }
+ }
+
+ return best_hit_info;
+}
+
+// Converts a random integer in 15 bits to a float in (0, 1)
+float randomInetegerToRandomFloat(int i) {
+ return float(i) / 32768.0;
+}
+
+// Returns a random integer for every pixel and dimension that remains the same in all iterations
+int pixelIntegerSeed(const int dimensionIndex) {
+ vec3 p = vec3(gl_FragCoord.xy, dimensionIndex);
+ vec3 r = vec3(23.14069263277926, 2.665144142690225,7.358926345 );
+ return int(32768.0 * fract(cos(dot(p,r)) * 123456.0));
+}
+
+// Returns a random float for every pixel that remains the same in all iterations
+float pixelSeed(const int dimensionIndex) {
+ return randomInetegerToRandomFloat(pixelIntegerSeed(dimensionIndex));
+}
+
+// The global random seed of this iteration
+// It will be set to a new random value in each step
+uniform int globalSeed;
+int randomSeed;
+void initRandomSequence() {
+ randomSeed = globalSeed + pixelIntegerSeed(0);
+}
+
+// Computes integer x modulo y not available in most WEBGL SL implementations
+int mod(const int x, const int y) {
+ return int(float(x) - floor(float(x) / float(y)) * float(y));
+}
+
+// Returns the next integer in a pseudo-random sequence
+int rand() {
+ randomSeed = randomSeed * 1103515245 + 12345;
+ return mod(randomSeed / 65536, 32768);
+}
+
+// Returns the next float in this pixels pseudo-random sequence
+float uniformRandom() {
+ return randomInetegerToRandomFloat(rand());
+}
+
+// Returns the ith prime number for the first 20
+const int maxDimensionCount = 10;
+int prime(const int index) {
+ if(index == 0) return 2;
+ if(index == 1) return 3;
+ if(index == 2) return 5;
+ if(index == 3) return 7;
+ if(index == 4) return 11;
+ if(index == 5) return 13;
+ if(index == 6) return 17;
+ if(index == 7) return 19;
+ if(index == 8) return 23;
+ if(index == 9) return 29;
+ if(index == 10) return 31;
+ if(index == 11) return 37;
+ if(index == 12) return 41;
+ if(index == 13) return 43;
+ if(index == 14) return 47;
+ if(index == 15) return 53;
+ return 2;
+}
+
+#ifdef SOLUTION_HALTON
+#endif
+
+float halton(const int sampleIndex, const int dimensionIndex) {
+#ifdef SOLUTION_HALTON
+ // https://en.wikipedia.org/wiki/Halton_sequence
+ float b = float(prime(dimensionIndex));
+ float i = float(sampleIndex);
+
+ float f = 1.0;
+ float r = 0.0;
+
+ for(int _=0; _ >= 0; _++) { // a loop that is always true
+ // while true loop should have condition break at the top
+ if (i <= 0.0) {
+ break;
+ }
+ f /= b; // move to current position at the decimal part
+ r += f * mod(i, b); // get the current position value and offset to that position, concatenate to the result
+ i = floor(i / b); // move to next position, this also ensure this loop will always terminate because it divide by a number larger than 1 at every iteration
+ }
+
+ // The halton sequence is a great way to generate pseudo random number, so that we can cover the space evenly and avoid unlucky
+ // however, since the algorithm is deterministic pixel-wise,
+ // it will generate the image with some undesired regular pattern,
+ // to avoid this, we can simply add a offset that is unique to each individual pixel,
+ // so that every pixel has different sample sequence.
+ // To do this, I use the pixelSeed method which returns a random float for every pixel
+ // that remains the same in all iterations per dimension, so that we only offset the pixel by some amount
+ // without changing the pattern the of sequence.
+ // Note that the pixelSeed method always return a positive float with 16 bits in the integer side and 16 bits in the decimal side,
+ // So next I take only the fraction part of the offseted value because it should remain in (0, 1).
+ // Note that to avoid increment offset only, we should subtract by 8 bits, however, we only take the decimal side,
+ // and the overflowed value will rotate back starting from zero so it does not matter.
+ float offset = pixelSeed(dimensionIndex);
+ r = fract(r + offset);
+ return r;
+#else
+ // Put your implementation of halton in the #ifdef above
+ return 0.0;
+#endif
+}
+
+// This is the index of the sample controlled by the framework.
+// It increments by one in every call of this shader
+uniform int baseSampleIndex;
+
+// Returns a well-distributed number in (0,1) for the dimension dimensionIndex
+float sample(const int dimensionIndex) {
+#ifdef SOLUTION_HALTON
+ return halton(baseSampleIndex, dimensionIndex);
+#else
+ // Use the Halton sequence for variance reduction in the #ifdef above
+ return uniformRandom();
+#endif
+}
+
+// This is a helper function to sample two-dimensionaly in dimension dimensionIndex
+vec2 sample2(const int dimensionIndex) {
+ return vec2(sample(dimensionIndex + 0), sample(dimensionIndex + 1));
+}
+
+vec3 sample3(const int dimensionIndex) {
+ return vec3(sample(dimensionIndex + 0), sample(dimensionIndex + 1), sample(dimensionIndex + 2));
+}
+
+// This is a register of all dimensions that we will want to sample.
+// Thanks to Iliyan Georgiev from Solid Angle for explaining proper housekeeping of sample dimensions in ranomdized Quasi-Monte Carlo
+//
+// So if we want to use lens sampling, we call sample(LENS_SAMPLE_DIMENSION).
+//
+// There are infinitely many path sampling dimensions.
+// These start at PATH_SAMPLE_DIMENSION.
+// The 2D sample pair for vertex i is at PATH_SAMPLE_DIMENSION + PATH_SAMPLE_DIMENSION_MULTIPLIER * i + 0
+#define ANTI_ALIAS_SAMPLE_DIMENSION 0
+#define LENS_SAMPLE_DIMENSION 2
+#define PATH_SAMPLE_DIMENSION 4
+
+// This is 2 for two dimensions and 2 as we use it for two purposese: NEE and path connection
+#define PATH_SAMPLE_DIMENSION_MULTIPLIER (2 * 2)
+
+vec3 getEmission(const Material material, const vec3 normal) {
+#ifdef SOLUTION_LIGHT
+
+ // Gamma:
+ // The use of gamma is to modify the physical brightness to perceived brightness, this process is called gamma correction.
+ // The reason for it is that human eye has greater sensitivity in dark differences than bright differences,
+ // and in our coursework, it is defined by the power-law expression: output = A * pow(input, gamma),
+ // where A is a constant defined by luminance and gamma is an exponent less than 1 (encoding gamma).
+
+ return material.emissiveness * material.intensity;
+#else
+ // This is wrong. It just returns the diffuse color so that you see something to be sure it is working.
+ return material.diffuse;
+#endif
+}
+
+vec3 getReflectance(const Material material, const vec3 normal, const vec3 inDirection, const vec3 outDirection) {
+#ifdef SOLUTION_THROUGHPUT
+ // physically correct phong model
+ float n = material.glossiness;
+ vec3 reflectDirection = normalize(reflect(inDirection, normal));
+
+ // for energy conservation
+ float normFactor = (n + 2.0) / (2.0 * M_PI);
+ // the scaling of reflected light it received, which decrease exponentially according to the n parameter of the phong model
+ // depending on the angle between the current scattered direction and the actual reflecting direction
+ float weight = pow(max(0.0, dot(outDirection, reflectDirection)), n);
+
+ return material.specular * normFactor * weight;
+#else
+ return vec3(1.0);
+#endif
+}
+
+vec3 getGeometricTerm(const Material material, const vec3 normal, const vec3 inDirection, const vec3 outDirection) {
+#ifdef SOLUTION_THROUGHPUT
+ // the geometry term is cosine angle between normal and outgoing direction
+ // we no need to check if this is negative because it is impossible (unless some floating point error near horizon but I did not observe any), but we can do it if we want
+ return vec3(dot(normal, outDirection));
+#else
+ return vec3(1.0);
+#endif
+}
+
+vec3 sphericalToEuclidean(float theta, float phi) {
+ float x = sin(theta) * cos(phi);
+ float y = sin(theta) * sin(phi);
+ float z = cos(theta);
+ return vec3(x, y, z);
+}
+
+// return true if two given float numbers are equal, by a threshold
+bool floatEqual(const in float value1, const in float value2) {
+ const float threshold = 0.0001;
+
+ return abs(value1 - value2) < threshold;
+}
+
+
+vec3 getRandomDirection(const int dimensionIndex) {
+
+#ifdef SOLUTION_BOUNCE
+
+ // The two logical parts are the sample2 and sphericalToEuclidean functions
+
+ vec2 xi = sample2(dimensionIndex);
+
+ float theta_polar = acos(2.0 * xi[0] - 1.0);
+ float phi_azimuthal = xi[1] * 2.0 * M_PI;
+
+ vec3 euclideanDirection = sphericalToEuclidean(theta_polar, phi_azimuthal);
+
+#ifdef TEST_RANDOM_DIRECTION
+ // also check out my test here: https://github.com/Redcxx/3d_sphere_random_sampling
+ if (!floatEqual(lengthSquared(euclideanDirection), 1.0)) {
+ // if we did not receive a unit vector, then give it black color,
+ return vec3(0.0);
+ }
+#endif
+
+ return euclideanDirection;
+#else
+ // Put your code to compute a random direction in 3D in the #ifdef above
+ return vec3(0);
+#endif
+}
+
+mat3 transpose(mat3 m) {
+ return mat3(
+ m[0][0], m[1][0], m[2][0],
+ m[0][1], m[1][1], m[2][1],
+ m[0][2], m[1][2], m[2][2]
+ );
+}
+
+// This function creates a matrix to transform from global space into a local space oriented around the normal.
+// Might be useful for importance sampling BRDF / the geometric term.
+mat3 makeLocalFrame(const vec3 normal) {
+#ifdef SOLUTION_IS
+
+ // find any new axis for local coordinate system that is perpendicular to normal
+ vec3 perpendicular;
+ if (abs(normal.x) > abs(normal.y)) { // whether to use x or y to construct perpendicular axis
+ perpendicular = vec3(normal.z, 0, -normal.x) / sqrt(normal.x * normal.x + normal.z * normal.z);
+ } else {
+ perpendicular = vec3(0, -normal.z, normal.y) / sqrt(normal.y * normal.y + normal.z * normal.z);
+ }
+
+ // find the third axis by cross product
+ vec3 thirdAxis = cross(perpendicular, normal);
+
+ // use these three axis to construct a transformation that turn global coordinate system to local
+ return mat3(perpendicular, thirdAxis, normal);
+
+#else
+ return mat3(1.0);
+#endif
+}
+
+// for importance sampling, we have 3 distributions: 2 light sources + 1 cosine weighted normal
+#define N_DISTS 3
+
+// this function normalize the given weights, so that it keeps the relative ratio and sum to 1.0
+// effectively same as: weights /= sum(weights)
+void normalizeWeights(inout float[N_DISTS] weights) {
+ float sum = 0.0;
+ for (int i = 0; i < N_DISTS; i++) {
+ sum += weights[i];
+ }
+ for (int i = 0; i < N_DISTS; i++) {
+ weights[i] /= sum;
+ }
+}
+
+// this function sample according to the given weight, returning the chosen index
+int sampleByWeights(float[N_DISTS] weights, int dimensionIndex) {
+ float r = sample(dimensionIndex);
+
+ // check which weights does that random number falls into
+ float accum = 0.0;
+ for (int i = 0; i < N_DISTS; i++) {
+ accum += weights[i];
+ if (r < accum) {
+ // it falls into this range
+ return i;
+ }
+ }
+
+ // should not happen, if this happened then sum of weights is less than 1.0, but it should be equal to 1.0
+ return -1;
+}
+
+// square the given number, for readability
+float square(float x) {
+ return x * x;
+}
+
+// sample a direction from the samplePosition to the sphere
+vec3 sampleSphericalLight(const int dimensionIndex, const vec3 samplePosition, const Sphere sphere) {
+ vec2 xi = sample2(dimensionIndex);
+
+ float cosMaxAngle = sqrt(1.0 - square(sphere.radius / length(sphere.position - samplePosition)));
+ float theta = acos(1.0 - xi[0] + xi[0] * cosMaxAngle);
+ float phi = 2.0 * M_PI * xi[1];
+
+ return sphericalToEuclidean(theta, phi);
+}
+
+// sample a direction on the hemisphere cosine distribution on the z-axis
+vec3 sampleCosine(const int dimensionIndex) {
+ vec2 xi = sample2(dimensionIndex);
+
+ float theta = asin(sqrt(xi[0]));
+ float phi = 2.0 * M_PI * xi[1];
+
+ return sphericalToEuclidean(theta, phi);
+}
+
+// return the probability of the sample direction in the distribution of the light sphere sampling,
+// return zero if it is not in the sphere direction
+float sphereLightProbability(vec3 sampledDirection, Sphere sphere, vec3 samplePosition) {
+
+ // tolerence for sampled direction should stay within the light direction
+ const float tolerence = 1e-3;
+
+ float cosAngleMax = sqrt(1.0 - square(sphere.radius / length(samplePosition - sphere.position)));
+ vec3 lightNormal = normalize(sphere.position - samplePosition);
+
+ if (dot(lightNormal, sampledDirection) < (cosAngleMax - tolerence)) {
+ // not within direction towards the spherical light
+ return 0.0;
+ }
+
+ return 1.0 / (2.0 * M_PI * (1.0 - cosAngleMax));
+}
+
+// return the probability of the sample direction in the cosine distribution on given normal
+float cosineProbability(vec3 sampledDirection, vec3 normal) {
+ float cosTheta = dot(sampledDirection, normal);
+ return max(0.0, 1.0 / M_PI * cosTheta); // if less than 0.0, then it is not within the hemisphere
+}
+
+// return the sum (weighted so that it still sum to 1.0) probability of the sample direction
+float lightIntensityProbability(vec3 sampledDirection, Sphere sphere1, Sphere sphere2, float[3] weights, vec3 samplePosition, vec3 normal) {
+
+ float light1Prob = sphereLightProbability(sampledDirection, sphere1, samplePosition);
+ float light2Prob = sphereLightProbability(sampledDirection, sphere2, samplePosition);
+ float cosineProb = cosineProbability(sampledDirection, normal);
+
+ return light1Prob * weights[0] + light2Prob * weights[1] + cosineProb * weights[2];
+}
+
+DirectionSample sampleDirection(const vec3 normal, const int dimensionIndex, const vec3 hitPosition, const Scene scene) {
+
+ DirectionSample result;
+
+#ifdef SOLUTION_IS
+ // The most basic job I expect from importance sampling is quicker convergence, because the sampling distribution is closer to the actual distribution therefore paying more attention
+ // to direction that contribute more light, we also divide by the pdf so that they have the same expectation and thus converge to the same result.
+ // For infinitely many samples, for with / without importance sampling, there should not be any difference because they have the same expectation.
+
+ #ifdef LIGHT_INTENSITY_WEIGHTED
+ // here I implement multiple light importance sampling, integrating with the original cosine importance sampling to ensure completeness and convergence to same result.
+
+ // first define the weights and normals for each light source
+ // weighted by intensity
+ float weights[3];
+ weights[0] = scene.spheres[0].material.intensity;
+ weights[1] = scene.spheres[1].material.intensity;
+ // if divide by 2 then all three distributions will be the same, but since light is more important than the cosine term, we divide by something larger than 2
+ weights[2] = (weights[0] + weights[1]) / 5.0;
+ normalizeWeights(weights); // ensure weights sum to 1.0 while keeping relative difference
+
+ // next define the normals towards the light position
+ // sample a normal by their weights
+ vec3 sampleNormal;
+ vec3 sampleDirection;
+
+ const int WEIGHT_SAMPLE_INDEX = maxPathLength * 2 + 6; // an index that is not used anywhere else: 2 more than the max possible PATH_SAMPLE_INDEX
+
+ // choose a distribution and sample accordingly
+ int sampledIndex = sampleByWeights(weights, WEIGHT_SAMPLE_INDEX);
+ if (sampledIndex == 0) {
+ sampleDirection = sampleSphericalLight(dimensionIndex, hitPosition, scene.spheres[0]);
+ sampleNormal = normalize(scene.spheres[0].position - hitPosition);
+
+ } else if(sampledIndex == 1) {
+ sampleDirection = sampleSphericalLight(dimensionIndex, hitPosition, scene.spheres[1]);
+ sampleNormal = normalize(scene.spheres[1].position - hitPosition);
+
+ } else {
+ sampleDirection = sampleCosine(dimensionIndex);
+ sampleNormal = normal;
+ }
+
+ result.direction = makeLocalFrame(sampleNormal) * sampleDirection;
+ result.probability = lightIntensityProbability(result.direction, scene.spheres[0], scene.spheres[1], weights, hitPosition, normal);
+
+ #else
+ // sample and convert to local sampled position
+ result.direction = makeLocalFrame(normal) * sampleCosine(dimensionIndex);
+ // 1/pi because it is the constant of the probability integral
+ // cos(theta) because the probability is now cosine weighted
+ result.probability = cosineProbability(result.direction, normal);
+ #endif
+
+#else
+ // Put yout code to compute Importance Sampling in the #ifdef above
+ result.direction = getRandomDirection(dimensionIndex);
+ result.probability = 1.0;
+#endif
+ return result;
+}
+
+
+vec3 directionToColor(vec3 direction) {
+ return abs(direction);
+}
+
+vec3 samplePath(const Scene scene, const Ray initialRay) {
+
+#ifdef TEST_RANDOM_DIRECTION
+ // in this case the involved function is directionToColor, so that we can project color to screen and check it
+ return directionToColor(getRandomDirection(0));
+#endif
+
+ // Initial result is black
+ vec3 result = vec3(0);
+
+ Ray incomingRay = initialRay;
+ vec3 throughput = vec3(1.0);
+ for(int i = 0; i < maxPathLength; i++) {
+ HitInfo hitInfo = intersectScene(scene, incomingRay, 0.001, 10000.0);
+
+ if(!hitInfo.hit) return result;
+
+ result += throughput * getEmission(hitInfo.material, hitInfo.normal);
+
+ Ray outgoingRay;
+ DirectionSample directionSample;
+#ifdef SOLUTION_BOUNCE
+ outgoingRay.origin = hitInfo.position;
+
+ int dimensionIndex = PATH_SAMPLE_DIMENSION + 2 * i;
+ directionSample = sampleDirection(hitInfo.normal, dimensionIndex, hitInfo.position, scene);
+ #ifdef LIGHT_DIRECTION_TEST
+ return directionSample.direction;
+ #endif
+ outgoingRay.direction = directionSample.direction;
+
+#else
+ // Put your code to compute the next ray in the #ifdef above
+#endif
+
+#ifdef SOLUTION_THROUGHPUT
+ vec3 geometryTerm = getGeometricTerm(hitInfo.material, hitInfo.normal, incomingRay.direction, outgoingRay.direction);
+ vec3 reflectance = getReflectance(hitInfo.material, hitInfo.normal, incomingRay.direction, outgoingRay.direction);
+
+ // we need to multiply by the geometry term due to the weakening of irradiance
+ vec3 specularTerm = reflectance * geometryTerm;
+ vec3 diffuseTerm = hitInfo.material.diffuse * geometryTerm / M_PI; // divide by pi as explained by tobias
+
+ // throughput is scaled by the sum of specular and diffuse term
+ throughput *= specularTerm + diffuseTerm;
+
+#else
+ // Compute the proper throughput in the #ifdef above
+ throughput *= 0.1;
+#endif
+
+#ifdef SOLUTION_IS
+ // divide by the probability of this sample to get the same expectation, so that they converge to same result
+ // multiply by another 4 pi to match the incorrect framework, because it did not divide by 1/(4pi) for uniform distribution (which it really should).
+ throughput /= directionSample.probability * 4.0 * M_PI;
+#else
+ // Without Importance Sampling, there is nothing to do here.
+ // Put your Importance Sampling code in the #ifdef above
+#endif
+
+#ifdef SOLUTION_BOUNCE
+ incomingRay = outgoingRay;
+#else
+ // Put some handling of the next and the current ray in the #ifdef above
+#endif
+ }
+ return result;
+}
+
+uniform ivec2 resolution;
+Ray getFragCoordRay(const vec2 fragCoord) {
+
+ float sensorDistance = 1.0;
+ vec3 origin = vec3(0, 0, sensorDistance);
+ vec2 sensorMin = vec2(-1, -0.5);
+ vec2 sensorMax = vec2(1, 0.5);
+ vec2 pixelSize = (sensorMax - sensorMin) / vec2(resolution);
+ vec3 direction = normalize(vec3(sensorMin + pixelSize * fragCoord, -sensorDistance));
+
+ float apertureSize = 0.0;
+ float focalPlane = 100.0;
+ vec3 sensorPosition = origin + focalPlane * direction;
+ origin.xy += apertureSize * (sample2(LENS_SAMPLE_DIMENSION) - vec2(0.5));
+ direction = normalize(sensorPosition - origin);
+
+ return Ray(origin, direction);
+}
+
+vec3 colorForFragment(const Scene scene, const vec2 fragCoord) {
+ initRandomSequence();
+
+#ifdef SOLUTION_AA
+
+ #define USE_INFINITE_PRECISION
+ //#define USE_NxN_BOX_FILTER
+
+ #ifdef USE_INFINITE_PRECISION
+ // The framework will average for us, so we can just sample a random point in the current pixel area with uniform offset.
+ // get random offset from range: (-0.5, 0.5), and we can get a average color at a larger resolution.
+ vec2 offset = sample2(ANTI_ALIAS_SAMPLE_DIMENSION) - 0.5;
+ vec2 sampleCoord = fragCoord + offset;
+ #endif
+
+ #ifdef USE_NxN_BOX_FILTER
+ // alternatively, we divide the current pixel to (samplesPerSide x samplesPerSide)
+ // grid and choose one region in the grid with equal probability (definition of box filter).
+
+ // This specify the the amount of samples around the fragCoord to sample of the box filter.
+ // It should be an positive integer and samplesPerSide <= 0 is undefined.
+ // e.g.: 3 = 3x3 box filter
+ const int samplesPerSide = 3;
+
+ // first randomly sample two integer of range [0, samplesPerSide), this is the index coordinate
+ // index is the coordinate at the top left of the sampleCoord
+ vec2 index = floor(sample2(ANTI_ALIAS_SAMPLE_DIMENSION)) * float(samplesPerSide));
+ // to find the offset needed to move from top left coordinate to middle coordinate,
+ // we first calculate the side length of each sample
+ float sideLengthPerSample = 1.0 / float(samplesPerSide);
+ // then we move the top left coordinate to middle by adding half of side length
+ vec2 coord = index + (sideLengthPerSample / 2.0);
+ // then we find the offset needed to move fragCoord to the sample coordinate,
+ // we have assumed the fragCoord is at the middle of a 1x1 pixels (0.5, 0.5)
+ vec2 offset = coord - 0.5;
+ // apply to offset to the sample coordinate
+ vec2 sampleCoord = fragCoord + offset;
+ #endif
+
+#else
+ // Put your anti-aliasing code in the #ifdef above
+ vec2 sampleCoord = fragCoord;
+#endif
+ return samplePath(scene, getFragCoordRay(sampleCoord));
+}
+
+
+void loadScene1(inout Scene scene) {
+
+#ifdef CHANGE_LIGHT_POSITION
+ scene.spheres[0].position = vec3(7, 3, -12);
+#else
+ scene.spheres[0].position = vec3(7, -2, -12);
+#endif
+ scene.spheres[0].radius = 2.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.spheres[0].material.emissiveness = vec3(0.9, 0.9, 0.5);
+ scene.spheres[0].material.intensity = 150.0;
+
+#endif
+
+ scene.spheres[0].material.diffuse = vec3(0.0);
+ scene.spheres[0].material.specular = vec3(0.0);
+ scene.spheres[0].material.glossiness = 10.0;
+
+#ifdef CHANGE_LIGHT_POSITION
+ scene.spheres[1].position = vec3(-8, -2, -13);
+#else
+ scene.spheres[1].position = vec3(-8, 4, -13);
+#endif
+ scene.spheres[1].radius = 1.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.spheres[1].material.emissiveness = vec3(0.8, 0.3, 0.1);
+
+#ifdef CHANGE_LIGHT_INTENSITY
+ scene.spheres[1].material.intensity = 30.0;
+#else
+ scene.spheres[1].material.intensity = 150.0;
+#endif
+#endif
+ scene.spheres[1].material.diffuse = vec3(0.0);
+ scene.spheres[1].material.specular = vec3(0.0);
+ scene.spheres[1].material.glossiness = 10.0;
+
+ scene.spheres[2].position = vec3(-2, -2, -12);
+ scene.spheres[2].radius = 3.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.spheres[2].material.emissiveness = vec3(0.0);
+ scene.spheres[2].material.intensity = 0.0;
+#endif
+ scene.spheres[2].material.diffuse = vec3(0.2, 0.5, 0.8);
+ scene.spheres[2].material.specular = vec3(0.8);
+ scene.spheres[2].material.glossiness = 40.0;
+
+ scene.spheres[3].position = vec3(3, -3.5, -14);
+ scene.spheres[3].radius = 1.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.spheres[3].material.emissiveness = vec3(0.0);
+ scene.spheres[3].material.intensity = 0.0;
+#endif
+ scene.spheres[3].material.diffuse = vec3(0.9, 0.8, 0.8);
+ scene.spheres[3].material.specular = vec3(1.0);
+ scene.spheres[3].material.glossiness = 10.0;
+
+ scene.planes[0].normal = vec3(0, 1, 0);
+ scene.planes[0].d = 4.5;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.planes[0].material.emissiveness = vec3(0.0);
+ scene.planes[0].material.intensity = 0.0;
+#endif
+ scene.planes[0].material.diffuse = vec3(0.8);
+ scene.planes[0].material.specular = vec3(0);
+ scene.planes[0].material.glossiness = 50.0;
+
+ scene.planes[1].normal = vec3(0, 0, 1);
+ scene.planes[1].d = 18.5;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.planes[1].material.emissiveness = vec3(0.0);
+ scene.planes[1].material.intensity = 0.0;
+#endif
+ scene.planes[1].material.diffuse = vec3(0.9, 0.6, 0.3);
+ scene.planes[1].material.specular = vec3(0.02);
+ scene.planes[1].material.glossiness = 3000.0;
+
+ scene.planes[2].normal = vec3(1, 0,0);
+ scene.planes[2].d = 10.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.planes[2].material.emissiveness = vec3(0.0);
+ scene.planes[2].material.intensity = 0.0;
+#endif
+ scene.planes[2].material.diffuse = vec3(0.2);
+ scene.planes[2].material.specular = vec3(0.1);
+ scene.planes[2].material.glossiness = 100.0;
+
+ scene.planes[3].normal = vec3(-1, 0,0);
+ scene.planes[3].d = 10.0;
+#ifdef SOLUTION_LIGHT
+ // Set the value of the missing property
+ scene.planes[3].material.emissiveness = vec3(0.0);
+ scene.planes[3].material.intensity = 0.0;
+#endif
+ scene.planes[3].material.diffuse = vec3(0.2);
+ scene.planes[3].material.specular = vec3(0.1);
+ scene.planes[3].material.glossiness = 100.0;
+}
+
+
+void main() {
+ // Setup scene
+ Scene scene;
+ loadScene1(scene);
+
+ // compute color for fragment
+ gl_FragColor.rgb = colorForFragment(scene, gl_FragCoord.xy);
+ gl_FragColor.a = 1.0;
+}
diff --git a/src/public/shaders/cwk3/tonemapShader.glsl b/src/public/shaders/cwk3/tonemapShader.glsl
new file mode 100644
index 0000000..92c79ee
--- /dev/null
+++ b/src/public/shaders/cwk3/tonemapShader.glsl
@@ -0,0 +1,20 @@
+precision highp float;
+
+uniform sampler2D radianceTexture;
+uniform int sampleCount;
+uniform ivec2 resolution;
+
+vec3 tonemap(vec3 color, float maxLuminance, float gamma) {
+ float luminance = length(color);
+ //float scale = luminance / maxLuminance;
+ float scale = luminance / (maxLuminance * luminance + 0.0000001);
+ return max(vec3(0.0), pow(scale * color, vec3(1.0 / gamma)));
+}
+
+void main(void) {
+ vec3 texel = texture2D(radianceTexture, gl_FragCoord.xy / vec2(resolution)).rgb;
+ vec3 radiance = texel / float(sampleCount);
+ gl_FragColor.rgb = tonemap(radiance, 1.0, 1.6);
+// gl_FragColor.rgb = radiance;
+ gl_FragColor.a = 1.0;
+}
\ No newline at end of file
diff --git a/src/public/shaders/cwk3/vertexShader.glsl b/src/public/shaders/cwk3/vertexShader.glsl
new file mode 100644
index 0000000..0f0347e
--- /dev/null
+++ b/src/public/shaders/cwk3/vertexShader.glsl
@@ -0,0 +1,5 @@
+attribute vec3 position;
+
+void main(void) {
+ gl_Position = vec4(position, 1.0);
+}
\ No newline at end of file
diff --git a/src/shared/i18n/type.ts b/src/shared/i18n/type.ts
index 1d54f15..8d135e2 100644
--- a/src/shared/i18n/type.ts
+++ b/src/shared/i18n/type.ts
@@ -70,6 +70,7 @@ export interface Messages {
"header.home": string;
"header.more": string;
"header.rss": string;
+ "header.shader": string;
"header.source": string;
"header.theme.dark": string;
"header.theme.light": string;