diff --git a/.all-contributorsrc b/.all-contributorsrc index dfe053dedf..3249483545 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -5351,6 +5351,16 @@ "bug", "doc" ] + }, + { + "login": "acamposuribe", + "name": "Alejandro", + "avatar_url": "https://avatars.githubusercontent.com/u/121937906?v=4", + "profile": "http://art.arqtistic.com", + "contributions": [ + "bug", + "code" + ] } ], "repoType": "github", diff --git a/README.md b/README.md index 7f7b73c7e9..1b7b3e9c14 100644 --- a/README.md +++ b/README.md @@ -915,6 +915,7 @@ We recognize all types of contributions. This project follows the [all-contribut perminder-17
perminder-17

💻 Yash Pandey
Yash Pandey

🐛 💻 Aditya Deshpande
Aditya Deshpande

🐛 📖 + Alejandro
Alejandro

🐛 💻 diff --git a/contributor_docs/project_wrapups/README.md b/contributor_docs/project_wrapups/README.md index b9e4aff5c7..d046bb8965 100644 --- a/contributor_docs/project_wrapups/README.md +++ b/contributor_docs/project_wrapups/README.md @@ -6,6 +6,7 @@ This folder contains wrap-up reports from p5.js related [Google Summer of Code]( ## Google Summer of Code ### Google Summer of Code 2023 +* [Improving p5.js WebGL/3d functionality(On-going)](https://github.com/processing/p5.js/blob/main/contributor_docs/project_wrapups/aryan_koundal_gsoc_2023.md) by Aryan Koundal, 2023 * [Mobile/Responsive Design Implementation of p5.js Web Editor](https://github.com/processing/p5.js/blob/main/contributor_docs/project_wrapups/dewansDT_gsoc_2023.md) by Dewansh Thakur, 2023 * [Friendly Error System(FES) and Documentation](https://github.com/processing/p5.js/blob/main/contributor_docs/project_wrapups/ayush23dash_gsoc_2023.md) by Ayush Shankar, 2023 * [Supporting shader-based filters in p5js](https://github.com/processing/p5.js/blob/main/contributor_docs/project_wrapups/wong_gsoc_2023.md) by Justin Wong, 2023 diff --git a/contributor_docs/project_wrapups/aryan_koundal_gsoc_2023.md b/contributor_docs/project_wrapups/aryan_koundal_gsoc_2023.md new file mode 100644 index 0000000000..8194878721 --- /dev/null +++ b/contributor_docs/project_wrapups/aryan_koundal_gsoc_2023.md @@ -0,0 +1,149 @@ +# Improving p5.js WebGL/3d functionality + +#### By Aryan Koundal(@AryanKoundal) +#### Mentored by : Dave Pagurek(@davepagurek ), Tanvi Kumar(@TanviKumar ) + +## Overview + +In p5.js, there are two render modes: P2D (default renderer) and WebGL. WEBGL +enables the user to draw in 3D. There are many ways to implement lighting. +Currently, p5js has implemented 8. To add lighting to a 3D object, one can +use these functionalities. But there is another technique to light objects, +not by direct light, but using the surrounding environment as a single light +source, which we call Image-Based lighting. +This project aims to add lighting to a 3D object, using the surrounding +environment as a single light source, which is generally called Image-Based +lighting. In simple words, one can very quickly drop in an image from real +life to use as surrounding lights, rather than continuously tweaking the colors +and positions of ambient, point, etc lights. + +1. Diffused IBL + 1. Create scene's irradiance. + 1. Calculation of Scene’s irradiance for any direction. + 1. Sample the scene’s radiance from all possible directions, within the + hemisphere, for each fragment. + 1. Take a finite number of samples of the scene's irradiance and pre-compute them. + 1. Create a pre-computed irradiance map. + 1. Sample the pre-computed irradiance map to retrieve diffuse irradiance. +1. Specular IBL + 1. We need to preconvolute it. + 1. Use split sum approximation to divide the specular part into 2 further parts + 1. First one is a prefiltered environment map. + 1. Finalize Pre filter convolution using low discrepancy hammersley sequence + and sample generation defined. + 1. Capturing prefilter mipmap levels, storing the results in prefiltered + environment cubemap + 1. Second one is BDRF + 1. pre -calculate the BRDF’s response given input as roughness and angle + between n and wi. + Precalculations done using BRDF’S geometry function and Fresnel-Schlicks approximation. + 1. Stored in 2D lookup texture known as BRDF integration map. + 1. Combine the prefiltered environment map and BRDF integration map +1. Combine the diffused and specular parts + + +## Current State of The Project + +The project is completed and here are some screenshots and videos demonstrating the work. + +#### Image of Example 1 +![example 1](https://github.com/processing/p5.js/assets/77334487/8d818df0-17a8-4332-b369-bcb79a5afc67) + +#### Video of Example 1 +https://github.com/processing/p5.js/assets/77334487/44b30c77-33c1-41d0-ada5-282424978832 + +#### Image oF Example 2 +![example 2](https://github.com/processing/p5.js/assets/77334487/e46f24b8-2713-4d2b-8392-744585da6a5b) + +#### Video of Example 2 +https://github.com/processing/p5.js/assets/77334487/a0a6b3f9-b25b-451f-961e-b2970cb9e907 + +## Pull Request + +- Pull Request: https://github.com/processing/p5.js/pull/6255 +- Work Reports: + - [p5js Week 1 Report | GSoC’23 Processing Foundation](https://aryankoundal.medium.com/p5js-week-1-report-gsoc23-processing-foundation-9910934112e5) + - [p5js Week 2 Report | GSoC’23 Processing Foundation](https://aryankoundal.medium.com/p5js-week-2-report-gsoc23-processing-foundation-c8a36f5cf34) + - [p5js Week 3 Report | GSoC’23 Processing Foundation](https://aryankoundal.medium.com/p5js-week-3-report-gsoc23-processing-foundation-39043d0363e2) + - and more... + + +## Work Done + +### src/webgl/light.js +1. This includes the function `imageLight()` which provides the whole functionality. +1. 2 Examples for the `imageLight()` are also included in this files. + +### src/webgl/p5.RendererGL.js +1. This includes 2 new maps, which are diffusedTextures and specularTextures for storing the p5 graphics. +1. Then the function `getDiffusedTexture()` which gets called from _setImageLightUniforms. It's function is to create a blurry +image from the input non blurry img, if it doesn't already exist. +1. Also the function `getSpecularTexture()` which also gets called from _setImageLightUniforms. It's function is too create a texture + from the input non blurry image, if it doesn't already exist +1. Then the function `_setImageLightUniforms()`. It creates the textures and sets those textures in the shader being processed. + +### src/webgl/p5.Texture.js +1. This includes the `MipmapTexture` class. + + +### src/webgl/shaders/imageLight.vert +1. It is a vertex shader only for the image Light feature. + +### src/webgl/shaders/imageLightDiffused.frag +1. It is the Fragment shader for the calculations of the Diffused Part of Image Light. + +### src/webgl/shaders/imageLightSpecular.frag +1. It is the Fragment shader for the calculations of the Specular Part of Image Light. +1. It has the functions `HammersleyNoBitOps()` , `VanDerCorput()`and `ImportanceSampleGGX()` which are +referenced from "https://learnopengl.com/PBR/IBL/Specular-IBL" + +### src/webgl/shaders/lighting.glsl +1. This includes `calculateImageDiffuse()` and `calculateImageSpecular()` which actually calculates the output textures. These are calculated only when `imageLight()` is called. + +### List of All shaders modified to improve webGL compatibility to newer versions +1. src/webgl/shaders/basic.frag +1. src/webgl/shaders/immediate.vert +1. src/webgl/shaders/light.vert +1. src/webgl/shaders/light_texture.frag +1. src/webgl/shaders/line.frag +1. src/webgl/shaders/line.vert +1. src/webgl/shaders/normal.frag +1. src/webgl/shaders/normal.vert +1. src/webgl/shaders/phong.frag +1. src/webgl/shaders/phong.vert +1. src/webgl/shaders/point.frag +1. src/webgl/shaders/point.vert +1. src/webgl/shaders/vertexColor.frag +1. src/webgl/shaders/vertexColor.vert + +### Sketches I made, which might be useful +1. Example 1 in imagelight https://editor.p5js.org/aryan_koundal/sketches/OEsyk6uZI +1. Example 2 in imagelight https://editor.p5js.org/aryan_koundal/sketches/XjalPP7s4 +1. Final Prototype Diffused + Roughness (Variable Roughness) https://editor.p5js.org/aryan_koundal/sketches/4V790dB9Z +1. Diffuse light final prototype https://editor.p5js.org/aryan_koundal/sketches/q_Zg37OB2 +1. Mipmap Specular roughness Prototype (visible variation) https://editor.p5js.org/aryan_koundal/sketches/Bi2BN7zjK +1. Specular prefilterconvolution with chessboard https://editor.p5js.org/aryan_koundal/sketches/qaIhxRZHI +1. Prefilterconvolution cubemaps prototypes 8 levels https://editor.p5js.org/aryan_koundal/sketches/YJTSFKhqt +1. Specular prefilterconvolution with image https://editor.p5js.org/aryan_koundal/sketches/8divJgdQxO +1. Specular mipmaps prototype with colors https://editor.p5js.org/aryan_koundal/sketches/3V9iDH8ax +1. Sphere with point Lights https://editor.p5js.org/aryan_koundal/sketches/9NdeCtfdW +1. Diffused texture Shader Prototype https://editor.p5js.org/aryan_koundal/sketches/DcFDcOFUn +1. Resolution Pixelwise https://editor.p5js.org/aryan_koundal/sketches/5RfDIy2I9 +1. Cube reflecting mountain spheremap https://editor.p5js.org/aryan_koundal/sketches/2QS5-Fy0V +1. Cube reflecting sky spheremap https://editor.p5js.org/aryan_koundal/sketches/O1NwI4ufp + +## Further Improvement +While working on this project, I realised that there is scope for improvement. Like increasing efficiency by using cubemaps instead of environment maps for Diffused Lighting. Also using framebuffers would improve the efficiency and reduce the time taken to render the lights. + +## Conclusion + +My GSoC experience was genuinely transformative, leading to significant personal and professional +development within the open-source domain. I successfully overcame initial obstacles related to +setting up the project and navigating the codebase, steadily advancing through my contributions. +Throughout this summer, I actively participated in the p5.js open-source community, surpassing my +initial expectations for my first major open-source venture and further igniting my enthusiasm. I +want to express my heartfelt appreciation to my mentors, Dave Pagurek(@davepagurek ), and +Tanvi Kumar(@TanviKumar ), for their invaluable guidance in navigating coding challenges. +Effective teamwork, collaboration, and communication proved essential in this open-source journey. +I eagerly anticipate expanding my contributions and honing my skills. This summer has instilled +a sense of purpose in me, and I'm profoundly grateful for the mentorship and knowledge gained. diff --git a/docs/yuidoc-p5-theme/assets/outdoor_image.jpg b/docs/yuidoc-p5-theme/assets/outdoor_image.jpg new file mode 100644 index 0000000000..3e60040595 Binary files /dev/null and b/docs/yuidoc-p5-theme/assets/outdoor_image.jpg differ diff --git a/docs/yuidoc-p5-theme/assets/outdoor_spheremap.jpg b/docs/yuidoc-p5-theme/assets/outdoor_spheremap.jpg new file mode 100644 index 0000000000..69b0bb17aa Binary files /dev/null and b/docs/yuidoc-p5-theme/assets/outdoor_spheremap.jpg differ diff --git a/package-lock.json b/package-lock.json index 0d3ee5ac43..51095eb0b4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3915,9 +3915,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001399", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001399.tgz", - "integrity": "sha512-4vQ90tMKS+FkvuVWS5/QY1+d805ODxZiKFzsU8o/RsVJz49ZSRR8EjykLJbqhzdPgadbX6wB538wOzle3JniRA==", + "version": "1.0.30001549", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001549.tgz", + "integrity": "sha512-qRp48dPYSCYaP+KurZLhDYdVE+yEyht/3NlmcJgVQ2VMGt6JL36ndQ/7rgspdZsJuxDPFIo/OzBT2+GmIJ53BA==", "dev": true, "funding": [ { @@ -3927,6 +3927,10 @@ { "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" } ] }, diff --git a/src/webgl/light.js b/src/webgl/light.js index 3fbab91e7b..e8c8079248 100644 --- a/src/webgl/light.js +++ b/src/webgl/light.js @@ -494,6 +494,99 @@ p5.prototype.pointLight = function(v1, v2, v3, x, y, z) { return this; }; +/** + * Creates an image light with the given image. + * + * The image light simulates light from all the directions. + * This is done by using the image as a texture for an infinitely + * large sphere light. This sphere contains + * or encapsulates the whole scene/drawing. + * It will have different effect for varying shininess of the + * object in the drawing. + * Under the hood it is mainly doing 2 types of calculations, + * the first one is creating an irradiance map the + * environment map of the input image. + * The second one is managing reflections based on the shininess + * or roughness of the material used in the scene. + * + * Note: The image's diffuse light will be affected by fill() + * and the specular reflections will be affected by specularMaterial() + * and shininess(). + * + * @method imageLight + * @param {p5.image} img image for the background + * @example + *
+ * + * let img; + * function preload() { + * img = loadImage('assets/outdoor_image.jpg'); + * } + * function setup() { + * createCanvas(100, 100, WEBGL); + * } + * function draw() { + * background(220); + * imageMode(CENTER); + * push(); + * translate(0, 0, -200); + * scale(2); + * image(img, 0, 0, width, height); + * pop(); + * ambientLight(50); + * imageLight(img); + * specularMaterial(20); + * noStroke(); + * scale(2); + * rotateX(frameCount * 0.005); + * rotateY(frameCount * 0.005); + * box(25); + * } + * + *
+ * @alt + * image light example + * @example + *
+ * + * let img; + * let slider; + * function preload() { + * img = loadImage('assets/outdoor_spheremap.jpg'); + * } + * function setup() { + * createCanvas(100, 100, WEBGL); + * slider = createSlider(0, 400, 100, 1); + * slider.position(0, height); + * } + * function draw() { + * background(220); + * imageMode(CENTER); + * push(); + * translate(0, 0, -200); + * scale(2); + * image(img, 0, 0, width, height); + * pop(); + * ambientLight(50); + * imageLight(img); + * specularMaterial(20); + * shininess(slider.value()); + * noStroke(); + * scale(2); + * sphere(15); + * } + * + *
+ * @alt + * light with slider having a slider for varying roughness + */ +p5.prototype.imageLight = function(img){ + // activeImageLight property is checked by _setFillUniforms + // for sending uniforms to the fillshader + this._renderer.activeImageLight = img; + this._renderer._enableLighting = true; +}; + /** * Places an ambient and directional light in the scene. * The lights are set to ambientLight(128, 128, 128) and diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 90162f00bc..485d62038d 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -9,6 +9,7 @@ import './p5.Matrix'; import './p5.Framebuffer'; import { readFileSync } from 'fs'; import { join } from 'path'; +import { MipmapTexture } from './p5.Texture'; const STROKE_CAP_ENUM = {}; const STROKE_JOIN_ENUM = {}; @@ -67,17 +68,21 @@ const defaultShaders = { phongFrag: lightingShader + readFileSync(join(__dirname, '/shaders/phong.frag'), 'utf-8'), - fontVert: webgl2CompatibilityShader + - readFileSync(join(__dirname, '/shaders/font.vert'), 'utf-8'), - fontFrag: webgl2CompatibilityShader + - readFileSync(join(__dirname, '/shaders/font.frag'), 'utf-8'), + fontVert: readFileSync(join(__dirname, '/shaders/font.vert'), 'utf-8'), + fontFrag: readFileSync(join(__dirname, '/shaders/font.frag'), 'utf-8'), lineVert: lineDefs + readFileSync(join(__dirname, '/shaders/line.vert'), 'utf-8'), lineFrag: lineDefs + readFileSync(join(__dirname, '/shaders/line.frag'), 'utf-8'), pointVert: readFileSync(join(__dirname, '/shaders/point.vert'), 'utf-8'), - pointFrag: readFileSync(join(__dirname, '/shaders/point.frag'), 'utf-8') + pointFrag: readFileSync(join(__dirname, '/shaders/point.frag'), 'utf-8'), + imageLightVert : readFileSync(join(__dirname, '/shaders/imageLight.vert'), 'utf-8'), + imageLightDiffusedFrag : readFileSync(join(__dirname, '/shaders/imageLightDiffused.frag'), 'utf-8'), + imageLightSpecularFrag : readFileSync(join(__dirname, '/shaders/imageLightSpecular.frag'), 'utf-8') }; +for (const key in defaultShaders) { + defaultShaders[key] = webgl2CompatibilityShader + defaultShaders[key]; +} const filterShaderFrags = { [constants.GRAY]: @@ -468,6 +473,19 @@ p5.RendererGL = class RendererGL extends p5.Renderer { this.spotLightAngle = []; this.spotLightConc = []; + // This property contains the input image if imageLight function + // is called. + // activeImageLight is checked by _setFillUniforms + // for sending uniforms to the fillshader + this.activeImageLight = null; + // If activeImageLight property is Null, diffusedTextures, + // specularTextures are Empty. + // Else, it maps a p5.Image used by imageLight() to a p5.Graphics. + // p5.Graphics for this are calculated in getDiffusedTexture function + this.diffusedTextures = new Map(); + // p5.Graphics for this are calculated in getSpecularTexture function + this.specularTextures = new Map(); + this.drawMode = constants.FILL; this.curFillColor = this._cachedFillStyle = [1, 1, 1, 1]; @@ -1165,12 +1183,11 @@ p5.RendererGL = class RendererGL extends p5.Renderer { erase(opacityFill, opacityStroke) { if (!this._isErasing) { - this._applyBlendMode(constants.REMOVE); + this._cachedBlendMode = this.curBlendMode; this._isErasing = true; - + this.blendMode(constants.REMOVE); this._cachedFillStyle = this.curFillColor.slice(); this.curFillColor = [1, 1, 1, opacityFill / 255]; - this._cachedStrokeStyle = this.curStrokeColor.slice(); this.curStrokeColor = [1, 1, 1, opacityStroke / 255]; } @@ -1178,10 +1195,14 @@ p5.RendererGL = class RendererGL extends p5.Renderer { noErase() { if (this._isErasing) { - this._isErasing = false; this.curFillColor = this._cachedFillStyle.slice(); this.curStrokeColor = this._cachedStrokeStyle.slice(); + // It's necessary to restore post-erase state. Needs rework + let temp = this.curBlendMode; this.blendMode(this._cachedBlendMode); + this._cachedBlendMode = temp; // If we don't do this, applyBlendMode() returns null + this._isErasing = false; + this._applyBlendMode(); // This sets _cachedBlendMode back to the original blendmode } } @@ -1606,6 +1627,9 @@ p5.RendererGL = class RendererGL extends p5.Renderer { properties._currentNormal = this._currentNormal; properties.curBlendMode = this.curBlendMode; + // So that the activeImageLight gets reset in push/pop + properties.activeImageLight = this.activeImageLight; + return style; } pop(...args) { @@ -1743,14 +1767,18 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (this._pInst._glAttributes.perPixelLighting) { this._defaultLightShader = new p5.Shader( this, - defaultShaders.phongVert, - defaultShaders.phongFrag + this._webGL2CompatibilityPrefix('vert', 'highp') + + defaultShaders.phongVert, + this._webGL2CompatibilityPrefix('frag', 'highp') + + defaultShaders.phongFrag ); } else { this._defaultLightShader = new p5.Shader( this, - defaultShaders.lightVert, - defaultShaders.lightTextureFrag + this._webGL2CompatibilityPrefix('vert', 'highp') + + defaultShaders.lightVert, + this._webGL2CompatibilityPrefix('frag', 'highp') + + defaultShaders.lightTextureFrag ); } } @@ -1762,8 +1790,10 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (!this._defaultImmediateModeShader) { this._defaultImmediateModeShader = new p5.Shader( this, - defaultShaders.immediateVert, - defaultShaders.vertexColorFrag + this._webGL2CompatibilityPrefix('vert', 'mediump') + + defaultShaders.immediateVert, + this._webGL2CompatibilityPrefix('frag', 'mediump') + + defaultShaders.vertexColorFrag ); } @@ -1774,8 +1804,10 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (!this._defaultNormalShader) { this._defaultNormalShader = new p5.Shader( this, - defaultShaders.normalVert, - defaultShaders.normalFrag + this._webGL2CompatibilityPrefix('vert', 'mediump') + + defaultShaders.normalVert, + this._webGL2CompatibilityPrefix('frag', 'mediump') + + defaultShaders.normalFrag ); } @@ -1786,8 +1818,10 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (!this._defaultColorShader) { this._defaultColorShader = new p5.Shader( this, - defaultShaders.normalVert, - defaultShaders.basicFrag + this._webGL2CompatibilityPrefix('vert', 'mediump') + + defaultShaders.normalVert, + this._webGL2CompatibilityPrefix('frag', 'mediump') + + defaultShaders.basicFrag ); } @@ -1798,8 +1832,10 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (!this._defaultPointShader) { this._defaultPointShader = new p5.Shader( this, - defaultShaders.pointVert, - defaultShaders.pointFrag + this._webGL2CompatibilityPrefix('vert', 'mediump') + + defaultShaders.pointVert, + this._webGL2CompatibilityPrefix('frag', 'mediump') + + defaultShaders.pointFrag ); } return this._defaultPointShader; @@ -1809,8 +1845,10 @@ p5.RendererGL = class RendererGL extends p5.Renderer { if (!this._defaultLineShader) { this._defaultLineShader = new p5.Shader( this, - defaultShaders.lineVert, - defaultShaders.lineFrag + this._webGL2CompatibilityPrefix('vert', 'mediump') + + defaultShaders.lineVert, + this._webGL2CompatibilityPrefix('frag', 'mediump') + + defaultShaders.lineFrag ); } @@ -1877,6 +1915,88 @@ p5.RendererGL = class RendererGL extends p5.Renderer { this.textures.set(src, tex); return tex; } + /* + * used in imageLight, + * To create a blurry image from the input non blurry img, if it doesn't already exist + * Add it to the diffusedTexture map, + * Returns the blurry image + * maps a p5.Image used by imageLight() to a p5.Graphics + */ + getDiffusedTexture(input){ + // if one already exists for a given input image + if(this.diffusedTextures.get(input)!=null){ + return this.diffusedTextures.get(input); + } + // if not, only then create one + let newGraphic; // maybe switch to framebuffer + // hardcoded to 200px, because it's going to be blurry and smooth + let smallWidth = 200; + let width = smallWidth; + let height = Math.floor(smallWidth * (input.height / input.width)); + newGraphic = this._pInst.createGraphics(width, height, constants.WEBGL); + // create graphics is like making a new sketch, all functions on main + // sketch it would be available on graphics + let irradiance = newGraphic.createShader( + defaultShaders.imageLightVert, + defaultShaders.imageLightDiffusedFrag + ); + newGraphic.shader(irradiance); + irradiance.setUniform('environmentMap', input); + newGraphic.noStroke(); + newGraphic.rectMode(newGraphic.CENTER); + newGraphic.rect(0, 0, newGraphic.width, newGraphic.height); + this.diffusedTextures.set(input, newGraphic); + return newGraphic; + } + + /* + * used in imageLight, + * To create a texture from the input non blurry image, if it doesn't already exist + * Creating 8 different levels of textures according to different + * sizes and atoring them in `levels` array + * Creating a new Mipmap texture with that `levels` array + * Storing the texture for input image in map called `specularTextures` + * maps the input p5.Image to a p5.MipmapTexture + */ + getSpecularTexture(input){ + // check if already exits (there are tex of diff resolution so which one to check) + // currently doing the whole array + if(this.specularTextures.get(input)!=null){ + return this.specularTextures.get(input); + } + // Hardcoded size + const size = 512; + let tex; + const levels = []; + const graphic = this._pInst.createGraphics(size, size, constants.WEBGL); + let count = Math.log(size)/Math.log(2); + graphic.pixelDensity(1); + // currently only 8 levels + // This loop calculates 8 graphics of varying size of canvas + // and corresponding different roughness levels. + // Roughness increases with the decrease in canvas size, + // because rougher surfaces have less detailed/more blurry reflections. + for (let w = size; w >= 1; w /= 2) { + graphic.resizeCanvas(w, w); + let currCount = Math.log(w)/Math.log(2); + let roughness = 1-currCount/count; + let myShader = graphic.createShader( + defaultShaders.imageLightVert, + defaultShaders.imageLightSpecularFrag + ); + graphic.shader(myShader); + graphic.clear(); + myShader.setUniform('environmentMap', input); + myShader.setUniform('roughness', roughness); + graphic.noStroke(); + graphic.plane(w, w); + levels.push(graphic.get().drawingContext.getImageData(0, 0, w, w)); + } + graphic.remove(); + tex = new MipmapTexture(this, levels, {}); + this.specularTextures.set(input,tex); + return tex; + } /** * @method activeFramebuffer @@ -1923,6 +2043,8 @@ p5.RendererGL = class RendererGL extends p5.Renderer { fillShader.setUniform('uEmissive', this._useEmissiveMaterial); fillShader.setUniform('uShininess', this._useShininess); + this._setImageLightUniforms(fillShader); + fillShader.setUniform('uUseLighting', this._enableLighting); const pointLightCount = this.pointLightDiffuseColors.length / 3; @@ -1973,6 +2095,29 @@ p5.RendererGL = class RendererGL extends p5.Renderer { fillShader.bindTextures(); } + // getting called from _setFillUniforms + _setImageLightUniforms(shader){ + //set uniform values + shader.setUniform('uUseImageLight', this.activeImageLight != null ); + // true + if (this.activeImageLight) { + // this.activeImageLight has image as a key + // look up the texture from the diffusedTexture map + let diffusedLight = this.getDiffusedTexture(this.activeImageLight); + shader.setUniform('environmentMapDiffused', diffusedLight); + let specularLight = this.getSpecularTexture(this.activeImageLight); + // In p5js the range of shininess is >= 1, + // Therefore roughness range will be ([0,1]*8)*20 or [0, 160] + // The factor of 8 is because currently the getSpecularTexture + // only calculated 8 different levels of roughness + // The factor of 20 is just to spread up this range so that, + // [1, max] of shininess is converted to [0,160] of roughness + let roughness = 20/this._useShininess; + shader.setUniform('levelOfDetail', roughness*8); + shader.setUniform('environmentMapSpecular', specularLight); + } + } + _setPointUniforms(pointShader) { pointShader.bindShader(); diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index f9dbe31439..55a4d3ed82 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -337,17 +337,8 @@ p5.Texture = class Texture { setInterpolation (downScale, upScale) { const gl = this._renderer.GL; - if (downScale === constants.NEAREST) { - this.glMinFilter = gl.NEAREST; - } else { - this.glMinFilter = gl.LINEAR; - } - - if (upScale === constants.NEAREST) { - this.glMagFilter = gl.NEAREST; - } else { - this.glMagFilter = gl.LINEAR; - } + this.glMinFilter = this.glFilter(downScale); + this.glMagFilter = this.glFilter(upScale); this.bindTexture(); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this.glMinFilter); @@ -355,6 +346,15 @@ p5.Texture = class Texture { this.unbindTexture(); } + glFilter(filter) { + const gl = this._renderer.GL; + if (filter === constants.NEAREST) { + return gl.NEAREST; + } else { + return gl.LINEAR; + } + } + /** * Sets the texture wrapping mode. This controls how textures behave * when their uv's go outside of the 0 - 1 range. There are three options: @@ -452,6 +452,51 @@ p5.Texture = class Texture { } }; +export class MipmapTexture extends p5.Texture { + constructor(renderer, levels, settings) { + super(renderer, levels, settings); + const gl = this._renderer.GL; + if (this.glMinFilter === gl.LINEAR) { + this.glMinFilter = gl.LINEAR_MIPMAP_LINEAR; + } + } + + glFilter(_filter) { + const gl = this._renderer.GL; + // TODO: support others + return gl.LINEAR_MIPMAP_LINEAR; + } + + _getTextureDataFromSource() { + return this.src; + } + + init(levels) { + const gl = this._renderer.GL; + this.glTex = gl.createTexture(); + + this.bindTexture(); + for (let level = 0; level < levels.length; level++) { + gl.texImage2D( + this.glTarget, + level, + this.glFormat, + this.glFormat, + this.glDataType, + levels[level] + ); + } + + this.glMinFilter = gl.LINEAR_MIPMAP_LINEAR; + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, this.glMagFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this.glMinFilter); + + this.unbindTexture(); + } + + update() {} +} + export function checkWebGLCapabilities({ GL, webglVersion }) { const gl = GL; const supportsFloat = webglVersion === constants.WEBGL2 diff --git a/src/webgl/shaders/basic.frag b/src/webgl/shaders/basic.frag index be191e1c34..11b14ea09c 100644 --- a/src/webgl/shaders/basic.frag +++ b/src/webgl/shaders/basic.frag @@ -1,5 +1,4 @@ -precision mediump float; -varying vec4 vColor; +IN vec4 vColor; void main(void) { - gl_FragColor = vec4(vColor.rgb, 1.) * vColor.a; + OUT_COLOR = vec4(vColor.rgb, 1.) * vColor.a; } diff --git a/src/webgl/shaders/imageLight.vert b/src/webgl/shaders/imageLight.vert new file mode 100644 index 0000000000..6f68e6d092 --- /dev/null +++ b/src/webgl/shaders/imageLight.vert @@ -0,0 +1,33 @@ +precision highp float; +attribute vec3 aPosition; +attribute vec3 aNormal; +attribute vec2 aTexCoord; + +varying vec3 localPos; +varying vec3 vWorldNormal; +varying vec3 vWorldPosition; +varying vec2 vTexCoord; + +uniform mat4 uModelViewMatrix; +uniform mat4 uProjectionMatrix; +uniform mat3 uNormalMatrix; + +void main() { + // Multiply the position by the matrix. + vec4 viewModelPosition = uModelViewMatrix * vec4(aPosition, 1.0); + gl_Position = uProjectionMatrix * viewModelPosition; + + // orient the normals and pass to the fragment shader + vWorldNormal = uNormalMatrix * aNormal; + + // send the view position to the fragment shader + vWorldPosition = (uModelViewMatrix * vec4(aPosition, 1.0)).xyz; + + localPos = vWorldPosition; + vTexCoord = aTexCoord; +} + + +/* +in the vertex shader we'll compute the world position and world oriented normal of the vertices and pass those to the fragment shader as varyings. +*/ diff --git a/src/webgl/shaders/imageLightDiffused.frag b/src/webgl/shaders/imageLightDiffused.frag new file mode 100644 index 0000000000..60867f81ac --- /dev/null +++ b/src/webgl/shaders/imageLightDiffused.frag @@ -0,0 +1,74 @@ +precision highp float; +varying vec3 localPos; + +// the HDR cubemap converted (can be from an equirectangular environment map.) +uniform sampler2D environmentMap; +varying vec2 vTexCoord; + +const float PI = 3.14159265359; + +vec2 nTOE( vec3 v ){ + // x = r sin(phi) cos(theta) + // y = r cos(phi) + // z = r sin(phi) sin(theta) + float phi = acos( v.y ); + // if phi is 0, then there are no x, z components + float theta = 0.0; + // else + theta = acos(v.x / sin(phi)); + float sinTheta = v.z / sin(phi); + if (sinTheta < 0.0) { + // Turn it into -theta, but in the 0-2PI range + theta = 2.0 * PI - theta; + } + theta = theta / (2.0 * 3.14159); + phi = phi / 3.14159 ; + + vec2 angles = vec2( phi, theta ); + return angles; +} + +void main() +{ + // the sample direction equals the hemisphere's orientation + float phi = vTexCoord.x * 2.0 * PI; + float theta = vTexCoord.y * PI; + float x = sin(theta) * cos(phi); + float y = sin(theta) * sin(phi); + float z = cos(theta); + vec3 normal = vec3( x, y, z); + + // Discretely sampling the hemisphere given the integral's + // spherical coordinates translates to the following fragment code: + vec3 irradiance = vec3(0.0); + vec3 up = vec3(0.0, 1.0, 0.0); + vec3 right = normalize(cross(up, normal)); + up = normalize(cross(normal, right)); + + // We specify a fixed sampleDelta delta value to traverse + // the hemisphere; decreasing or increasing the sample delta + // will increase or decrease the accuracy respectively. + const float sampleDelta = 0.025; + float nrSamples = 0.0; + + for(float phi = 0.0; phi < 2.0 * PI; phi += sampleDelta) + { + for(float theta = 0.0; theta < ( 0.5 ) * PI; theta += sampleDelta) + { + // spherical to cartesian (in tangent space) // tangent space to world // add each sample result to irradiance + float x = sin(theta) * cos(phi); + float y = sin(theta) * sin(phi); + float z = cos(theta); + vec3 tangentSample = vec3( x, y, z); + + vec3 sampleVec = tangentSample.x * right + tangentSample.y * up + tangentSample.z * normal; + irradiance += (texture2D(environmentMap, nTOE(sampleVec)).xyz) * cos(theta) * sin(theta); + nrSamples++; + } + } + // divide by the total number of samples taken, giving us the average sampled irradiance. + irradiance = PI * irradiance * (1.0 / float(nrSamples )) ; + + + gl_FragColor = vec4(irradiance, 1.0); +} \ No newline at end of file diff --git a/src/webgl/shaders/imageLightSpecular.frag b/src/webgl/shaders/imageLightSpecular.frag new file mode 100644 index 0000000000..3c4ab9f316 --- /dev/null +++ b/src/webgl/shaders/imageLightSpecular.frag @@ -0,0 +1,112 @@ +precision highp float; +varying vec3 localPos; +varying vec2 vTexCoord; + +// our texture +uniform sampler2D environmentMap; +uniform float roughness; + +const float PI = 3.14159265359; + +float VanDerCorput(int bits); +vec2 HammersleyNoBitOps(int i, int N); +vec3 ImportanceSampleGGX(vec2 Xi, vec3 N, float roughness); + + +vec2 nTOE( vec3 v ){ + // x = r sin(phi) cos(theta) + // y = r cos(phi) + // z = r sin(phi) sin(theta) + float phi = acos( v.y ); + // if phi is 0, then there are no x, z components + float theta = 0.0; + // else + theta = acos(v.x / sin(phi)); + float sinTheta = v.z / sin(phi); + if (sinTheta < 0.0) { + // Turn it into -theta, but in the 0-2PI range + theta = 2.0 * PI - theta; + } + theta = theta / (2.0 * 3.14159); + phi = phi / 3.14159 ; + + vec2 angles = vec2( phi, theta ); + return angles; +} + + +void main(){ + const int SAMPLE_COUNT = 1024; // 4096 + float totalWeight = 0.0; + vec3 prefilteredColor = vec3(0.0); + float phi = vTexCoord.x * 2.0 * PI; + float theta = vTexCoord.y * PI; + float x = sin(theta) * cos(phi); + float y = sin(theta) * sin(phi); + float z = cos(theta); + vec3 N = vec3(x,y,z); + vec3 V = N; + for (int i = 0; i < SAMPLE_COUNT; ++i) + { + vec2 Xi = HammersleyNoBitOps(i, SAMPLE_COUNT); + vec3 H = ImportanceSampleGGX(Xi, N, roughness); + vec3 L = normalize(2.0 * dot(V, H) * H - V); + + float NdotL = max(dot(N, L), 0.0); + if (NdotL > 0.0) + { + prefilteredColor += texture2D(environmentMap, nTOE(L)).xyz * NdotL; + totalWeight += NdotL; + } + } + prefilteredColor = prefilteredColor / totalWeight; + + gl_FragColor = vec4(prefilteredColor, 1.0); +} + +vec3 ImportanceSampleGGX(vec2 Xi, vec3 N, float roughness){ + float a = roughness * roughness; + + float phi = 2.0 * PI * Xi.x; + float cosTheta = sqrt((1.0 - Xi.y) / (1.0 + (a * a - 1.0) * Xi.y)); + float sinTheta = sqrt(1.0 - cosTheta * cosTheta); + // from spherical coordinates to cartesian coordinates + vec3 H; + H.x = cos(phi) * sinTheta; + H.y = sin(phi) * sinTheta; + H.z = cosTheta; + + // from tangent-space vector to world-space sample vector + vec3 up = abs(N.z) < 0.999 ? vec3(0.0, 0.0, 1.0) : vec3(1.0, 0.0, 0.0); + vec3 tangent = normalize(cross(up, N)); + vec3 bitangent = cross(N, tangent); + + vec3 sampleVec = tangent * H.x + bitangent * H.y + N * H.z; + return normalize(sampleVec); +} + + +float VanDerCorput(int n, int base) +{ + float invBase = 1.0 / float(base); + float denom = 1.0; + float result = 0.0; + + for (int i = 0; i < 32; ++i) + { + if (n > 0) + { + denom = mod(float(n), 2.0); + result += denom * invBase; + invBase = invBase / 2.0; + n = int(float(n) / 2.0); + } + } + + return result; +} + +vec2 HammersleyNoBitOps(int i, int N) +{ + return vec2(float(i) / float(N), VanDerCorput(i, 2)); +} diff --git a/src/webgl/shaders/immediate.vert b/src/webgl/shaders/immediate.vert index 396e69eca2..d430e60302 100644 --- a/src/webgl/shaders/immediate.vert +++ b/src/webgl/shaders/immediate.vert @@ -1,12 +1,12 @@ -attribute vec3 aPosition; -attribute vec4 aVertexColor; +IN vec3 aPosition; +IN vec4 aVertexColor; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; uniform float uResolution; uniform float uPointSize; -varying vec4 vColor; +OUT vec4 vColor; void main(void) { vec4 positionVec4 = vec4(aPosition, 1.0); gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4; diff --git a/src/webgl/shaders/light.vert b/src/webgl/shaders/light.vert index b79713cd26..c0d3b25939 100644 --- a/src/webgl/shaders/light.vert +++ b/src/webgl/shaders/light.vert @@ -1,9 +1,9 @@ // include lighting.glgl -attribute vec3 aPosition; -attribute vec3 aNormal; -attribute vec2 aTexCoord; -attribute vec4 aVertexColor; +IN vec3 aPosition; +IN vec3 aNormal; +IN vec2 aTexCoord; +IN vec4 aVertexColor; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; @@ -12,10 +12,10 @@ uniform mat3 uNormalMatrix; uniform bool uUseVertexColor; uniform vec4 uMaterialColor; -varying highp vec2 vVertTexCoord; -varying vec3 vDiffuseColor; -varying vec3 vSpecularColor; -varying vec4 vColor; +OUT highp vec2 vVertTexCoord; +OUT vec3 vDiffuseColor; +OUT vec3 vSpecularColor; +OUT vec4 vColor; void main(void) { diff --git a/src/webgl/shaders/light_texture.frag b/src/webgl/shaders/light_texture.frag index 5db6a60e84..e02083b97b 100644 --- a/src/webgl/shaders/light_texture.frag +++ b/src/webgl/shaders/light_texture.frag @@ -1,28 +1,26 @@ -precision highp float; - uniform vec4 uTint; uniform sampler2D uSampler; uniform bool isTexture; uniform bool uEmissive; -varying highp vec2 vVertTexCoord; -varying vec3 vDiffuseColor; -varying vec3 vSpecularColor; -varying vec4 vColor; +IN highp vec2 vVertTexCoord; +IN vec3 vDiffuseColor; +IN vec3 vSpecularColor; +IN vec4 vColor; void main(void) { if(uEmissive && !isTexture) { - gl_FragColor = vColor; + OUT_COLOR = vColor; } else { vec4 baseColor = isTexture // Textures come in with premultiplied alpha. To apply tint and still have // premultiplied alpha output, we need to multiply the RGB channels by the // tint RGB, and all channels by the tint alpha. - ? texture2D(uSampler, vVertTexCoord) * vec4(uTint.rgb/255., 1.) * (uTint.a/255.) + ? TEXTURE(uSampler, vVertTexCoord) * vec4(uTint.rgb/255., 1.) * (uTint.a/255.) // Colors come in with unmultiplied alpha, so we need to multiply the RGB // channels by alpha to convert it to premultiplied alpha. : vec4(vColor.rgb * vColor.a, vColor.a); - gl_FragColor = vec4(baseColor.rgb * vDiffuseColor + vSpecularColor, baseColor.a); + OUT_COLOR = vec4(baseColor.rgb * vDiffuseColor + vSpecularColor, baseColor.a); } } diff --git a/src/webgl/shaders/lighting.glsl b/src/webgl/shaders/lighting.glsl index e2eb47563a..131a0767e1 100644 --- a/src/webgl/shaders/lighting.glsl +++ b/src/webgl/shaders/lighting.glsl @@ -1,3 +1,5 @@ +#define PI 3.141592 + precision highp float; precision highp int; @@ -33,6 +35,16 @@ uniform float uConstantAttenuation; uniform float uLinearAttenuation; uniform float uQuadraticAttenuation; +// setting from _setImageLightUniforms() +// boolean to initiate the calculateImageDiffuse and calculateImageSpecular +uniform bool uUseImageLight; +// texture for use in calculateImageDiffuse +uniform sampler2D environmentMapDiffused; +// texture for use in calculateImageSpecular +uniform sampler2D environmentMapSpecular; +// roughness for use in calculateImageSpecular +uniform float levelOfDetail; + const float specularFactor = 2.0; const float diffuseFactor = 0.73; @@ -67,6 +79,60 @@ LightResult _light(vec3 viewDirection, vec3 normal, vec3 lightVector) { return lr; } +// converts the range of "value" from [min1 to max1] to [min2 to max2] +float map(float value, float min1, float max1, float min2, float max2) { + return min2 + (value - min1) * (max2 - min2) / (max1 - min1); +} + +vec2 mapTextureToNormal( vec3 v ){ + // x = r sin(phi) cos(theta) + // y = r cos(phi) + // z = r sin(phi) sin(theta) + float phi = acos( v.y ); + // if phi is 0, then there are no x, z components + float theta = 0.0; + // else + theta = acos(v.x / sin(phi)); + float sinTheta = v.z / sin(phi); + if (sinTheta < 0.0) { + // Turn it into -theta, but in the 0-2PI range + theta = 2.0 * PI - theta; + } + theta = theta / (2.0 * 3.14159); + phi = phi / 3.14159 ; + + vec2 angles = vec2( fract(theta + 0.25), 1.0 - phi ); + return angles; +} + + +vec3 calculateImageDiffuse( vec3 vNormal, vec3 vViewPosition ){ + // make 2 seperate builds + vec3 worldCameraPosition = vec3(0.0, 0.0, 0.0); // hardcoded world camera position + vec3 worldNormal = normalize(vNormal); + vec2 newTexCoor = mapTextureToNormal( worldNormal ); + vec4 texture = TEXTURE( environmentMapDiffused, newTexCoor ); + // this is to make the darker sections more dark + // png and jpg usually flatten the brightness so it is to reverse that + return smoothstep(vec3(0.0), vec3(0.8), texture.xyz); +} + +vec3 calculateImageSpecular( vec3 vNormal, vec3 vViewPosition ){ + vec3 worldCameraPosition = vec3(0.0, 0.0, 0.0); + vec3 worldNormal = normalize(vNormal); + vec3 lightDirection = normalize( vViewPosition - worldCameraPosition ); + vec3 R = reflect(lightDirection, worldNormal); + vec2 newTexCoor = mapTextureToNormal( R ); +#ifdef WEBGL2 + vec4 outColor = textureLod(environmentMapSpecular, newTexCoor, levelOfDetail); +#else + vec4 outColor = TEXTURE(environmentMapSpecular, newTexCoor); +#endif + // this is to make the darker sections more dark + // png and jpg usually flatten the brightness so it is to reverse that + return pow(outColor.xyz, vec3(10.0)); +} + void totalLight( vec3 modelPosition, vec3 normal, @@ -138,6 +204,11 @@ void totalLight( } } + if( uUseImageLight ){ + totalDiffuse += calculateImageDiffuse(normal, modelPosition); + totalSpecular += calculateImageSpecular(normal, modelPosition); + } + totalDiffuse *= diffuseFactor; totalSpecular *= specularFactor; } diff --git a/src/webgl/shaders/line.frag b/src/webgl/shaders/line.frag index e38e9b66ee..42c24edcff 100644 --- a/src/webgl/shaders/line.frag +++ b/src/webgl/shaders/line.frag @@ -1,4 +1,3 @@ -precision mediump float; precision mediump int; uniform vec4 uMaterialColor; @@ -6,13 +5,13 @@ uniform int uStrokeCap; uniform int uStrokeJoin; uniform float uStrokeWeight; -varying vec4 vColor; -varying vec2 vTangent; -varying vec2 vCenter; -varying vec2 vPosition; -varying float vMaxDist; -varying float vCap; -varying float vJoin; +IN vec4 vColor; +IN vec2 vTangent; +IN vec2 vCenter; +IN vec2 vPosition; +IN float vMaxDist; +IN float vCap; +IN float vJoin; float distSquared(vec2 a, vec2 b) { vec2 aToB = b - a; @@ -47,5 +46,5 @@ void main() { } // Use full area for MITER } - gl_FragColor = vec4(vColor.rgb, 1.) * vColor.a; + OUT_COLOR = vec4(vColor.rgb, 1.) * vColor.a; } diff --git a/src/webgl/shaders/line.vert b/src/webgl/shaders/line.vert index 4b6d032a1f..bd608181a9 100644 --- a/src/webgl/shaders/line.vert +++ b/src/webgl/shaders/line.vert @@ -18,7 +18,6 @@ #define PROCESSING_LINE_SHADER -precision mediump float; precision mediump int; uniform mat4 uModelViewMatrix; @@ -32,19 +31,19 @@ uniform vec4 uViewport; uniform int uPerspective; uniform int uStrokeJoin; -attribute vec4 aPosition; -attribute vec3 aTangentIn; -attribute vec3 aTangentOut; -attribute float aSide; -attribute vec4 aVertexColor; - -varying vec4 vColor; -varying vec2 vTangent; -varying vec2 vCenter; -varying vec2 vPosition; -varying float vMaxDist; -varying float vCap; -varying float vJoin; +IN vec4 aPosition; +IN vec3 aTangentIn; +IN vec3 aTangentOut; +IN float aSide; +IN vec4 aVertexColor; + +OUT vec4 vColor; +OUT vec2 vTangent; +OUT vec2 vCenter; +OUT vec2 vPosition; +OUT float vMaxDist; +OUT float vCap; +OUT float vJoin; vec2 lineIntersection(vec2 aPoint, vec2 aDir, vec2 bPoint, vec2 bDir) { // Rotate and translate so a starts at the origin and goes out to the right diff --git a/src/webgl/shaders/normal.frag b/src/webgl/shaders/normal.frag index d9613af243..6b0e370158 100644 --- a/src/webgl/shaders/normal.frag +++ b/src/webgl/shaders/normal.frag @@ -1,5 +1,4 @@ -precision mediump float; -varying vec3 vVertexNormal; +IN vec3 vVertexNormal; void main(void) { - gl_FragColor = vec4(vVertexNormal, 1.0); -} \ No newline at end of file + OUT_COLOR = vec4(vVertexNormal, 1.0); +} diff --git a/src/webgl/shaders/normal.vert b/src/webgl/shaders/normal.vert index 8a94e83ed1..a428dbdd27 100644 --- a/src/webgl/shaders/normal.vert +++ b/src/webgl/shaders/normal.vert @@ -1,7 +1,7 @@ -attribute vec3 aPosition; -attribute vec3 aNormal; -attribute vec2 aTexCoord; -attribute vec4 aVertexColor; +IN vec3 aPosition; +IN vec3 aNormal; +IN vec2 aTexCoord; +IN vec4 aVertexColor; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; @@ -10,9 +10,9 @@ uniform mat3 uNormalMatrix; uniform vec4 uMaterialColor; uniform bool uUseVertexColor; -varying vec3 vVertexNormal; -varying highp vec2 vVertTexCoord; -varying vec4 vColor; +OUT vec3 vVertexNormal; +OUT highp vec2 vVertTexCoord; +OUT vec4 vColor; void main(void) { vec4 positionVec4 = vec4(aPosition, 1.0); diff --git a/src/webgl/shaders/phong.frag b/src/webgl/shaders/phong.frag index fa80efa267..c22531087d 100644 --- a/src/webgl/shaders/phong.frag +++ b/src/webgl/shaders/phong.frag @@ -1,5 +1,4 @@ // include lighting.glsl -precision highp float; precision highp int; uniform bool uHasSetAmbient; @@ -11,11 +10,11 @@ uniform vec4 uTint; uniform sampler2D uSampler; uniform bool isTexture; -varying vec3 vNormal; -varying vec2 vTexCoord; -varying vec3 vViewPosition; -varying vec3 vAmbientColor; -varying vec4 vColor; +IN vec3 vNormal; +IN vec2 vTexCoord; +IN vec3 vViewPosition; +IN vec3 vAmbientColor; +IN vec4 vColor; void main(void) { @@ -29,11 +28,11 @@ void main(void) { // Textures come in with premultiplied alpha. To apply tint and still have // premultiplied alpha output, we need to multiply the RGB channels by the // tint RGB, and all channels by the tint alpha. - ? texture2D(uSampler, vTexCoord) * vec4(uTint.rgb/255., 1.) * (uTint.a/255.) + ? TEXTURE(uSampler, vTexCoord) * vec4(uTint.rgb/255., 1.) * (uTint.a/255.) // Colors come in with unmultiplied alpha, so we need to multiply the RGB // channels by alpha to convert it to premultiplied alpha. : vec4(vColor.rgb * vColor.a, vColor.a); - gl_FragColor = vec4(diffuse * baseColor.rgb + + OUT_COLOR = vec4(diffuse * baseColor.rgb + vAmbientColor * ( uHasSetAmbient ? uAmbientMatColor.rgb : baseColor.rgb ) + diff --git a/src/webgl/shaders/phong.vert b/src/webgl/shaders/phong.vert index 4a6a7f1400..5667709162 100644 --- a/src/webgl/shaders/phong.vert +++ b/src/webgl/shaders/phong.vert @@ -1,10 +1,9 @@ -precision highp float; precision highp int; -attribute vec3 aPosition; -attribute vec3 aNormal; -attribute vec2 aTexCoord; -attribute vec4 aVertexColor; +IN vec3 aPosition; +IN vec3 aNormal; +IN vec2 aTexCoord; +IN vec4 aVertexColor; uniform vec3 uAmbientColor[5]; @@ -16,11 +15,11 @@ uniform int uAmbientLightCount; uniform bool uUseVertexColor; uniform vec4 uMaterialColor; -varying vec3 vNormal; -varying vec2 vTexCoord; -varying vec3 vViewPosition; -varying vec3 vAmbientColor; -varying vec4 vColor; +OUT vec3 vNormal; +OUT vec2 vTexCoord; +OUT vec3 vViewPosition; +OUT vec3 vAmbientColor; +OUT vec4 vColor; void main(void) { diff --git a/src/webgl/shaders/point.frag b/src/webgl/shaders/point.frag index eeafb4ef83..5185794d37 100644 --- a/src/webgl/shaders/point.frag +++ b/src/webgl/shaders/point.frag @@ -1,7 +1,6 @@ -precision mediump float; precision mediump int; uniform vec4 uMaterialColor; -varying float vStrokeWeight; +IN float vStrokeWeight; void main(){ float mask = 0.0; @@ -24,5 +23,5 @@ void main(){ discard; } - gl_FragColor = vec4(uMaterialColor.rgb, 1.) * uMaterialColor.a; + OUT_COLOR = vec4(uMaterialColor.rgb, 1.) * uMaterialColor.a; } diff --git a/src/webgl/shaders/point.vert b/src/webgl/shaders/point.vert index 24d9a405b7..9df67d1588 100644 --- a/src/webgl/shaders/point.vert +++ b/src/webgl/shaders/point.vert @@ -1,6 +1,6 @@ -attribute vec3 aPosition; +IN vec3 aPosition; uniform float uPointSize; -varying float vStrokeWeight; +OUT float vStrokeWeight; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; void main() { @@ -8,4 +8,4 @@ void main() { gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4; gl_PointSize = uPointSize; vStrokeWeight = uPointSize; -} \ No newline at end of file +} diff --git a/src/webgl/shaders/vertexColor.frag b/src/webgl/shaders/vertexColor.frag index be191e1c34..11b14ea09c 100644 --- a/src/webgl/shaders/vertexColor.frag +++ b/src/webgl/shaders/vertexColor.frag @@ -1,5 +1,4 @@ -precision mediump float; -varying vec4 vColor; +IN vec4 vColor; void main(void) { - gl_FragColor = vec4(vColor.rgb, 1.) * vColor.a; + OUT_COLOR = vec4(vColor.rgb, 1.) * vColor.a; } diff --git a/src/webgl/shaders/vertexColor.vert b/src/webgl/shaders/vertexColor.vert index b91c914bf4..3dc3df2434 100644 --- a/src/webgl/shaders/vertexColor.vert +++ b/src/webgl/shaders/vertexColor.vert @@ -1,10 +1,10 @@ -attribute vec3 aPosition; -attribute vec4 aVertexColor; +IN vec3 aPosition; +IN vec4 aVertexColor; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; -varying vec4 vColor; +OUT vec4 vColor; void main(void) { vec4 positionVec4 = vec4(aPosition, 1.0);