【图形学】Shader Monthly笔记
聪头 游戏开发萌新

GSN Composer:Shader Monthly

时间:2023年8月31日14:01:48

链接:

P1 What are Shaders?

Shader:告诉计算机如何渲染场景的计算机程序

  • 通常是并行执行
  • 在GPU上运行(GPU,Graphics Processing Unit)
image image

实践

绿色节点:数据

蓝色节点:计算节点

image

P2 Perspective Projection in GLSL

URL:https://www.youtube.com/watch?v=_pGik1nuZbw

本节概述:

  1. 讲解小孔成像
  2. 推导透视投影矩阵

透视投影

image image

相机看向-z轴,因此-Z>0

image

欧几里得和投影空间

image

OpenGL管线

image

OpenGL透视投影

远平面的点被映射到NDC z=1的地方;近平面的点被映射到NDC z=-1的地方

image image image

P7 Procedural Textures

URL:https://www.youtube.com/watch?v=6VCSaOYEcOo

程序纹理:

  • 不占显存
  • 不受分辨率影响
image

SDF

image

锯齿

image

SDF 抗锯齿(推导看不懂,死记)

红线:

  • 在圆内的程度,用于平滑边界。
  • 采样中心完全在圆内返回1,完全在圆外返回0,介于二者之间返回插值。
  • 未知,需要求
image

SDF:纹理坐标空间计算(连续)

抗锯齿填充函数:像素坐标空间(离散)

image

黄线:由像素空间对SDF计算得到的,相当于用离散去平滑连续。

  • 翻转后 + 0.5 就是红线
image image

fwidth研究

1
2
3
>ddx(v) = 该像素点右边的值 - 该像素点的值
>ddy(v) = 该像素点下面的值 - 该像素点的值
>fwidth(v) = abs(ddx(v)) + abs(ddy(v)) //邻域像素之间的近似导数值
image

fwidth可视化:可以发现在45度的位置偏差绝对值之和最大。水平和竖直值最小。

image

P8 Procedural Noise

URL:https://www.youtube.com/watch?v=jkYIOu8HddA

在邻近随机值之间进行双线性插值

image image

Value Noise

顶点着色器

1
2
3
4
5
6
7
8
9
10
11
#version 300 es
precision highp float;
in vec3 position; // input vertex position from mesh
in vec2 texcoord; // input texture coordinate from mesh

out vec2 interpolatedTexCoord;

void main(){
interpolatedTexCoord = texcoord;
gl_Position = vec4(position, 1.0);
}

片段着色器

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#version 300 es
precision highp float;
precision highp int;
out vec4 outColor; // name of the output variable

in vec2 interpolatedTexCoord;
uniform int width;
uniform int height;
uniform int graphTime;

// Hash Functions for GPU Rendering, Jarzynski et al.
// http://www.jcgt.org/published/0009/03/02/
vec3 random_pcg3d(uvec3 v) {
v = v * 1664525u + 1013904223u;
v.x += v.y*v.z; v.y += v.z*v.x; v.z += v.x*v.y;
v ^= v >> 16u;
v.x += v.y*v.z; v.y += v.z*v.x; v.z += v.x*v.y;
return vec3(v) * (1.0/float(0xffffffffu));
}

vec3 valueNoise(vec2 pos, float gridSize) {
// return vec4(pos.x, pos.y, 0.0, 1.0);

vec2 gridPos = pos * gridSize;
uvec2 i = uvec2(gridPos);
vec2 f = fract(gridPos);
//return vec4(f.x, f.y, 0.0, 1.0);

vec3 f11 = random_pcg3d(uvec3(i.x, i.y, 0));
vec3 f12 = random_pcg3d(uvec3(i.x + 1u, i.y, 0));
vec3 f21 = random_pcg3d(uvec3(i.x, i.y + 1u, 0));
vec3 f22 = random_pcg3d(uvec3(i.x + 1u, i.y + 1u, 0));

f = smoothstep(0.0, 1.0, f);

vec3 q1 = mix(f11, f12, vec3(f.x));
vec3 q2 = mix(f21, f22, vec3(f.x));
vec3 p = mix(q1, q2, vec3(f.y));

return p;
}

vec3 fractalValueNoise(vec2 pos) {
vec3 n = vec3(0.0);
n += 0.500 * valueNoise(pos, 4.0);
n += 0.250 * valueNoise(pos, 8.0);
n += 0.125 * valueNoise(pos, 16.0);
return n;
}

void main() {
vec2 pos = interpolatedTexCoord + 0.002 * float(graphTime);
//vec3 texColor = fractalValueNoise(pos);
vec3 texColor = valueNoise(pos, 4.0);
outColor = vec4(texColor, 1.0);;
}

image

Gradiance Noise

在梯度之间插值随机值

image image image

顶点

1
2
3
4
5
6
7
8
9
10
11
#version 300 es
precision highp float;
in vec3 position; // input vertex position from mesh
in vec2 texcoord; // input texture coordinate from mesh

out vec2 interpolatedTexCoord;

void main(){
interpolatedTexCoord = texcoord;
gl_Position = vec4(position, 1.0);
}

片段

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#version 300 es
precision highp float;
precision highp int;
out vec4 outColor; // name of the output variable

in vec2 interpolatedTexCoord;
uniform int width;
uniform int height;
uniform int graphTime;

// Hash Functions for GPU Rendering, Jarzynski et al.
// http://www.jcgt.org/published/0009/03/02/
vec3 random_pcg3d(uvec3 v) {
v = v * 1664525u + 1013904223u;
v.x += v.y*v.z; v.y += v.z*v.x; v.z += v.x*v.y;
v ^= v >> 16u;
v.x += v.y*v.z; v.y += v.z*v.x; v.z += v.x*v.y;
return vec3(v) * (1.0/float(0xffffffffu));
}

#define M_PI 3.1415926535897932384626433832795
vec2 randomGradient(uvec3 p) {
vec3 uv = random_pcg3d(p);
float r = sqrt(uv[0]);
float phi = 2.0 * M_PI * uv[1];
return vec2(r * cos(phi), r * sin(phi));
}


vec3 gradientNoise(vec2 pos, float gridSize) {
// return vec4(pos.x, pos.y, 0.0, 1.0);

vec2 gridPos = pos * gridSize;
uvec2 i = uvec2(gridPos);
vec2 f = fract(gridPos);
//return vec4(f.x, f.y, 0.0, 1.0);

vec2 g11 = randomGradient(uvec3(i.x, i.y, 1));
vec2 g12 = randomGradient(uvec3(i.x + 1u, i.y, 1));
vec2 g21 = randomGradient(uvec3(i.x, i.y + 1u, 1));
vec2 g22 = randomGradient(uvec3(i.x + 1u, i.y + 1u, 1));

float d11 = dot(g11, f);
float d12 = dot(g12, f - vec2(1.0, 0.0));
float d21 = dot(g21, f - vec2(0.0, 1.0));
float d22 = dot(g22, f - vec2(1.0, 1.0));

// Gradient noise
/*vec3 f11 = vec3(d11);
vec3 f12 = vec3(d12);
vec3 f21 = vec3(d21);
vec3 f22 = vec3(d22);*/

// Colored gradient noise
vec3 f11 = random_pcg3d(uvec3(i.x, i.y, 0)) * (d11 + 1.0);
vec3 f12 = random_pcg3d(uvec3(i.x + 1u, i.y, 0)) * (d12 + 1.0);
vec3 f21 = random_pcg3d(uvec3(i.x, i.y + 1u, 0)) * (d21 + 1.0);
vec3 f22 = random_pcg3d(uvec3(i.x + 1u, i.y + 1u, 0)) * (d22 + 1.0);

f = smoothstep(0.0, 1.0, f);

vec3 q1 = mix(f11, f12, vec3(f.x));
vec3 q2 = mix(f21, f22, vec3(f.x));
vec3 p = mix(q1, q2, vec3(f.y));

return p;
}

vec3 fractalGradientNoise(vec2 pos) {
vec3 n = vec3(0.0);
n += 0.500 * gradientNoise(pos, 4.0);
n += 0.250 * gradientNoise(pos, 8.0);
n += 0.125 * gradientNoise(pos, 16.0);
return n;
}

void main() {
vec2 pos = interpolatedTexCoord + 0.002 * float(graphTime);
//vec3 texColor = fractalGradientNoise(pos);
vec3 texColor = gradientNoise(pos, 4.0);
outColor = vec4(texColor, 0.0);
}

P9 Microfacet BRDF

Microfacet BRDF: Theory and Implementation of Basic PBR Materials [Shaders Monthly #9]:https://www.youtube.com/watch?v=gya7x9H3mV0

其他优质链接

image image

Fresnel reflectance

宏观Fresnel

image image image image

perpendicular:垂直的

举例:当光线垂直从空气入射到玻璃内时,有4%的光线被反射,96%被折射

不同波长Fresnel反射的比例不同

image

微观Fresnel

主要在θ1的确定

image image

Normal distribution function

image

观察可以发现,法线分布函数取决于==法线n==和==半程向量h==

image image

Geometry term

image image image

Cook-Torrance Microfacet BRDF

image

实践

顶点

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#version 300 es
precision highp float;
in vec3 position; // input vertex position from mesh
in vec2 texcoord; // input vertex texture coordinate from mesh
in vec3 normal; // input vertex normal from mesh

uniform mat4 cameraLookAt; //camera look at matrix
uniform mat4 cameraProjection; //camera projection matrix
uniform mat4 meshTransform; // mesh transformation
uniform mat4 meshTransformTransposedInverse; // transposed inverse of meshTransform

out vec2 tc; // output texture coordinate of vertex
out vec3 wfn; // output fragment normal of vertex in world space
out vec3 vertPos; // output 3D position in world space

void main(){
tc = texcoord;
wfn = vec3(meshTransformTransposedInverse * vec4(normal, 0.0));
vec4 vertPos4 = meshTransform * vec4(position, 1.0);
vertPos = vec3(vertPos4) / vertPos4.w;
gl_Position = cameraProjection * cameraLookAt * vertPos4;
}

片元

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
#version 300 es
precision highp float;
out vec4 outColor;

in vec2 tc; // texture coordinate of pixel (interpolated)
in vec3 wfn; // fragment normal of pixel in world space (interpolated)
in vec3 vertPos; // fragment vertex position in world space (interpolated)

uniform sampler2D baseColorTexture; // description="albedo for dielectrics or F0 for metals"
uniform sampler2D roughnessTexture; // description="roughness texture"
uniform sampler2D normalTexture; // description="roughness texture"
uniform sampler2D emissionTexture; // description="emission texture"
uniform float metallic; // description="metallic parameter, 0.0 for dielectrics, 1.0 for metals" defaultval ="0.0"
uniform float reflectance; // description="Fresnel reflectance for dielectrics in the range [0.0, 1.0]" defaultval ="0.5"
uniform vec4 lightColor; // description="color of light" defaultval="1.0, 1.0, 1.0, 1.0"
uniform float irradiPerp; // description="irradiance in perpendicular direction" defaultval="10.0"
uniform vec3 lightDirection; // light direction in world space
uniform vec3 cameraPosition; // camera position in world space

vec3 rgb2lin(vec3 rgb) { // sRGB to linear approximation
return pow(rgb, vec3(2.2));
}

vec3 lin2rgb(vec3 lin) { // linear to sRGB approximation
return pow(lin, vec3(1.0 / 2.2));
}

#define RECIPROCAL_PI 0.3183098861837907
#define RECIPROCAL_2PI 0.15915494309189535

vec3 modifiedPhongBRDF(vec3 lightDir, vec3 viewDir, vec3 normal,
vec3 phongDiffuseCol, vec3 phongSpecularCol, float phongShininess) {
vec3 color = phongDiffuseCol * RECIPROCAL_PI;
vec3 reflectDir = reflect(-lightDir, normal);
float specDot = max(dot(reflectDir, viewDir), 0.001);
float normalization = (phongShininess + 2.0) * RECIPROCAL_2PI;
color += pow(specDot, phongShininess) * normalization * phongSpecularCol;
return color;
}

float roughnessToShininess( const in float roughness ) {
return pow(1000.0, 1.0-roughness);
}

// from http://www.thetenthplanet.de/archives/1180
mat3 cotangentFrame(in vec3 N, in vec3 p, in vec2 uv)
{
// get edge vectors of the pixel triangle
vec3 dp1 = dFdx( p );
vec3 dp2 = dFdy( p );
vec2 duv1 = dFdx( uv );
vec2 duv2 = dFdy( uv );

// solve the linear system
vec3 dp2perp = cross( dp2, N );
vec3 dp1perp = cross( N, dp1 );
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;

// construct a scale-invariant frame
float invmax = inversesqrt( max( dot(T,T), dot(B,B) ) );
return mat3( T * invmax, B * invmax, N );
}

vec3 applyNormalMap(in vec3 normal, in vec3 viewVec, in vec2 texcoord)
{
vec3 highResNormal = texture(normalTexture, texcoord).xyz;
highResNormal = normalize(highResNormal * 2.0 - 1.0);
mat3 TBN = cotangentFrame(normal, -viewVec, texcoord);
return normalize(TBN * highResNormal);
}


vec3 fresnelSchlick(float cosTheta, vec3 F0) {
return F0 + (1.0 - F0) * pow(1.0 - cosTheta, 5.0);
}

float D_GGX(float NoH, float roughness) {
float alpha = roughness * roughness;
float alpha2 = alpha * alpha;
float NoH2 = NoH * NoH;
float b = (NoH2 * (alpha2 - 1.0) + 1.0);
return alpha2 * RECIPROCAL_PI / (b * b);
}

float G1_GGX_Schlick(float NoV, float roughness) {
float alpha = roughness * roughness;
float k = alpha / 2.0;
return max(NoV, 0.001) / (NoV * (1.0 - k) + k);
}

float G_Smith(float NoV, float NoL, float roughness) {
return G1_GGX_Schlick(NoL, roughness) * G1_GGX_Schlick(NoV, roughness);
}

float fresnelSchlick90(float cosTheta, float F0, float F90) {
return F0 + (F90 - F0) * pow(1.0 - cosTheta, 5.0);
}

float disneyDiffuseFactor(float NoV, float NoL, float VoH, float roughness) {
float alpha = roughness * roughness;
float F90 = 0.5 + 2.0 * alpha * VoH * VoH;
float F_in = fresnelSchlick90(NoL, 1.0, F90);
float F_out = fresnelSchlick90(NoV, 1.0, F90);
return F_in * F_out;
}

vec3 brdfMicrofacet(in vec3 L, in vec3 V, in vec3 N,
in float metallic, in float roughness, in vec3 baseColor, in float reflectance) {

vec3 H = normalize(V + L);

float NoV = clamp(dot(N, V), 0.0, 1.0);
float NoL = clamp(dot(N, L), 0.0, 1.0);
float NoH = clamp(dot(N, H), 0.0, 1.0);
float VoH = clamp(dot(V, H), 0.0, 1.0);

vec3 f0 = vec3(0.16 * (reflectance * reflectance));
f0 = mix(f0, baseColor, metallic);

vec3 F = fresnelSchlick(VoH, f0);
float D = D_GGX(NoH, roughness);
float G = G_Smith(NoV, NoL, roughness);

vec3 spec = (F * D * G) / (4.0 * max(NoV, 0.001) * max(NoL, 0.001));

vec3 rhoD = baseColor;

// optionally
rhoD *= vec3(1.0) - F;
// rhoD *= disneyDiffuseFactor(NoV, NoL, VoH, roughness);

rhoD *= (1.0 - metallic);

vec3 diff = rhoD * RECIPROCAL_PI;

return diff + spec;


}


void main() {
vec3 baseCol = rgb2lin(texture(baseColorTexture, tc).rgb);
float roughness = texture(roughnessTexture, tc).r;
vec4 emission = texture(emissionTexture, tc);
vec3 lightDir = normalize(-lightDirection); // towards light
vec3 viewDir = normalize(cameraPosition - vertPos);
vec3 n = normalize(wfn);

n = applyNormalMap(n, viewDir, tc);

vec3 radiance = rgb2lin(emission.rgb);

float irradiance = max(dot(lightDir, n), 0.0) * irradiPerp;
if(irradiance > 0.0) { // if receives light
vec3 brdf = brdfMicrofacet(lightDir, viewDir, n, metallic, roughness, baseCol, reflectance);
// irradiance contribution from directional light
radiance += brdf * irradiance * lightColor.rgb;
}

outColor.rgb = lin2rgb(radiance);
outColor.a = 1.0;
}

P10 Importance Sampling

本节在间接光漫反射部分就引入重要性采样(实际就是半球上均匀采样,只是采样点由计算机随机生成)

  • 而Learn OpenGL文档对间接光漫反射的采样点是人为定义的,均匀采样半球表面,因此文档该节没有引入importance sampling。

引入

image

对于IBL(基于图像的照明),传入的辐射率Li存储在2D环境图像中

image

本节采用球形环境贴图。我们需要对半球表面进行积分,得到间接光漫反射入射Radiance。需要用到重要性采样。

image

重要性采样

黎曼和

image

蒙特卡罗积分和重要性采样配合使用

  • 可以发现如果采样使用均匀分布的概率密度函数,结果就是黎曼和
image

当p(x)不是均匀分布,而满足某种特定分布时,就是重要性采样。最理想的p(x)是和函数形状一致,即p(x)要满足在f(x)值大的地方大。这是因为积分结果受函数值大的地方影响较大。

下一步,如何得到近似的p(x)?

image

逆变换采样

均匀分布的随机数值转换到其他分布

image

目标:已知仅能从均匀分布采样,需要求从p(x)分布的采样

image

u服从均匀分布,x服从p(x)分布

pdf最大值,cdf斜率最大。取逆后相反。

image

IBL Lambert Diffuse

根据BRDF写

image

将积分转为蒙特卡罗积分

  • 要使得分母的p(x)尽可能和f(x)一样,但是Li没有解析式,因此p(x)设为c cosθ sinθ
image

根据p(x)积分=1,求c,得到p的表达式

image

由于联合分布不依赖于phi,因此直接对phi进行均匀采样,而p(θ)使用逆变换采样得到

image

间接光漫反射结论 记!

间接光漫反射和观察方向无关,因此生成的IBL后续使==用法线方向采样==

image

其中un和vn服从 0~1均匀分布

image

实践

纹理坐标映射到球坐标

image

球坐标映射到笛卡尔坐标

image

代码:https://www.gsn-lib.org/index.html#projectName=ShadersMonthly10&graphName=PrefilterDiffuse

P11 Image-based Lighting (IBL) of PBR Materials

回顾

image image image image image

引入

间接光反射部分生成的IBL,==用H向量采样==。由于D的存在,因此应该借助D实现重要性采样

image

反射公式

![image-20230901151330769](GSN Composer:Shader Monthly.assets/image-20230901151330769.png)

h和n越近,镜面反射越强;h和n越远,镜面反射越弱

image image

GGX 重要性采样

image

验证pdf = 1

image image image image image

采样可视化

  • pdf取决于D,这意味着随机生成的h会接近法线,入射光和出射光会沿H对称
  • 下图中的太阳光源不用管,这里的采样已知的只有v和n,输出为h和l(原因详见结论)
image

其中 u,v ~ (0, 1) 均匀分布

增加粗糙度,采样结果更随机

image

改变积分变量

θ是L和N的夹角,现在希望换成H和法线的夹角

image image image

间接光镜面反射结论 记!

image

需要预先知道v和n才能采样得到Lo,导致无法直接预计算用于实时处理。

分离近似求和

image

1) Pre-Filtered Envmap

随机采样法线附近的H,以法线法向为观察方向V,计算反射方向L,使用L对环境贴图采样,得到Pre-Filtered Envmap

![image-20230901185341519](GSN Composer:Shader Monthly.assets/image-20230901185341519.png)

image
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
vec3 prefilterEnvMapSpecular(in sampler2D envmapSampler, in vec2 tex) {
float px = t2p(tex.x, width);
float py = t2p(tex.y, height);

vec3 normal = sphericalEnvmapToDirection(tex);
mat3 normalTransform = getNormalFrame(normal);
vec3 V = normal;
vec3 result = vec3(0.0);
float totalWeight = 0.0;
uint N = uint(samples);
for(uint n = 0u; n < N; n++) {
vec3 random = random_pcg3d(uvec3(px, py, n));
float phi = 2.0 * PI * random.x;
float u = random.y;
float alpha = roughness * roughness;
float theta = acos(sqrt((1.0 - u) / (1.0 + (alpha * alpha - 1.0) * u)));
vec3 posLocal = vec3(sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta));
vec3 H = normalTransform * posLocal;
vec3 L = 2.0 * dot(V, H) * H - V; // or use L = reflect(-V, H);
float NoL = dot(normal, L);
if(NoL > 0.0) {
vec2 uv = directionToSphericalEnvmap(L);
vec3 radiance = textureLod(envmapSampler, uv, mipmapLevel).rgb;
result += radiance * NoL;
totalWeight += NoL;
}
}
result = result / totalWeight;
return result;
}

2) BRDF Integration Map

以法线为观察方向V,且设$\phi=0$(各向同性性质,$\phi$不重要),随机采样法线附近的H,代公式

image image
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
// adapted from "Real Shading in Unreal Engine 4", Brian Karis, Epic Games
vec2 integrateBRDF(float roughness, float NoV) {
float px = t2p(interpolatedTexCoord.x, width);
float py = t2p(interpolatedTexCoord.y, height);

// view direction in normal space from spherical coordinates
float thetaView = acos(NoV);
vec3 V = vec3(sin(thetaView), 0.0, cos(thetaView)); // with phiView = 0.0

vec2 result = vec2(0.0);
uint N = uint(samples);
for(uint n = 0u; n < N; n++) {
vec3 random = random_pcg3d(uvec3(px, py, n));
float phi = 2.0 * PI * random.x;
float u = random.y;
float alpha = roughness * roughness;
float theta = acos(sqrt((1.0 - u) / (1.0 + (alpha * alpha - 1.0) * u)));
vec3 H = vec3(sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta));
vec3 L = 2.0 * dot(V, H) * H - V; // or use L = reflect(-V, H);
float NoL = clamp(L.z, 0.0, 1.0); //切线空间N(0,0,1)
float NoH = clamp(H.z, 0.0, 1.0);
float VoH = clamp(dot(V, H), 0.0, 1.0);
if(NoL > 0.0) {
float G = G_Smith(NoV, NoL, roughness);
float G_Vis = G * VoH / (NoH * NoV);
float Fc = pow(1.0 - VoH, 5.0);
result.x += (1.0 - Fc) * G_Vis;
result.y += Fc * G_Vis;
}
}
result = result / float(N);
return result;
}

预计算完整代码

漫反射预处理环境贴图:使用n采样

镜面反射预处理环境贴图:使用多个反射rv采样

image

顶点

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#version 300 es
precision highp float;
precision highp int;
in vec3 position; // input vertex position from mesh
in vec2 texcoord; // input vertex texture coordinate from mesh
in vec3 normal; // input vertex normal from mesh

uniform mat4 cameraLookAt; //camera look at matrix
uniform mat4 cameraProjection; //camera projection matrix
uniform mat4 meshTransform; // mesh transformation
uniform mat4 meshTransformTransposedInverse; // transposed inverse of meshTransform
uniform int gsnMeshGroup;

out vec2 tc; // output texture coordinate of vertex
out vec3 wfn; // output fragment normal of vertex in world space
out vec3 vertPos; // output 3D position in world space

void main(){
if(gsnMeshGroup == 0) { // Stone Demon
tc = texcoord;
wfn = vec3(meshTransformTransposedInverse * vec4(normal, 0.0));
vec4 vertPos4 = meshTransform * vec4(position, 1.0);
vertPos = vec3(vertPos4) / vertPos4.w;
gl_Position = cameraProjection * cameraLookAt * vertPos4;
} else { // EnvMap Sphere
tc = texcoord;
wfn = normal;
const float sphereSize = 75.0;
vec4 vertPos4 = vec4(sphereSize * position, 1.0);
vertPos = vec3(vertPos4) / vertPos4.w;
gl_Position = cameraProjection * cameraLookAt * vertPos4;
}
}

片段

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
#version 300 es
precision highp float;
precision highp int;
out vec4 outColor;

in vec2 tc; // texture coordinate of pixel (interpolated)
in vec3 wfn; // fragment normal of pixel in world space (interpolated)
in vec3 vertPos; // fragment vertex position in world space (interpolated)

uniform sampler2D baseColorTexture; // description="albedo for dielectrics or F0 for metals"
uniform sampler2D roughnessTexture; // description="roughness texture"
uniform sampler2D normalTexture; // description="roughness texture"
uniform sampler2D emissionTexture; // description="emission texture"
uniform sampler2D envMap; // min_filter="LINEAR" mag_filter="LINEAR" wrap_s="REPEAT" wrap_t="REPEAT"
uniform sampler2D envmapDiffuse; // min_filter="LINEAR" mag_filter="LINEAR"
uniform sampler2D brdfIntegrationMap; // min_filter="LINEAR" mag_filter="LINEAR"
uniform sampler2D envmapSpecular; // min_filter="LINEAR_MIPMAP_LINEAR" mag_filter="LINEAR"
uniform int mipLevelCount; // description="number of usable mipmap levels in envmapSpecular" defaultval ="5"
uniform float metallic; // description="metallic parameter, 0.0 for dielectrics, 1.0 for metals" defaultval ="0.0"
uniform float reflectance; // description="Fresnel reflectance for dielectrics in the range [0.0, 1.0]" defaultval ="0.5"
uniform vec4 lightColor; // description="color of light" defaultval="1.0, 1.0, 1.0, 1.0"
uniform float irradiPerp; // description="irradiance in perpendicular direction" defaultval="10.0"
uniform vec3 lightDirection; // light direction in world space
uniform vec3 cameraPosition; // camera position in world space
uniform int gsnMeshGroup;

vec3 rgb2lin(vec3 rgb) { // sRGB to linear approximation
return pow(rgb, vec3(2.2));
}

vec3 lin2rgb(vec3 lin) { // linear to sRGB approximation
return pow(lin, vec3(1.0 / 2.2));
}

#define RECIPROCAL_PI 0.3183098861837907
#define RECIPROCAL_2PI 0.15915494309189535

vec3 modifiedPhongBRDF(vec3 lightDir, vec3 viewDir, vec3 normal,
vec3 phongDiffuseCol, vec3 phongSpecularCol, float phongShininess) {
vec3 color = phongDiffuseCol * RECIPROCAL_PI;
vec3 reflectDir = reflect(-lightDir, normal);
float specDot = max(dot(reflectDir, viewDir), 0.001);
float normalization = (phongShininess + 2.0) * RECIPROCAL_2PI;
color += pow(specDot, phongShininess) * normalization * phongSpecularCol;
return color;
}

float roughnessToShininess( const in float roughness ) {
return pow(1000.0, 1.0-roughness);
}

// from http://www.thetenthplanet.de/archives/1180
mat3 cotangentFrame(in vec3 N, in vec3 p, in vec2 uv)
{
// get edge vectors of the pixel triangle
vec3 dp1 = dFdx( p );
vec3 dp2 = dFdy( p );
vec2 duv1 = dFdx( uv );
vec2 duv2 = dFdy( uv );

// solve the linear system
vec3 dp2perp = cross( dp2, N );
vec3 dp1perp = cross( N, dp1 );
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;

// construct a scale-invariant frame
float invmax = inversesqrt( max( dot(T,T), dot(B,B) ) );
return mat3( T * invmax, B * invmax, N );
}

vec3 applyNormalMap(in vec3 normal, in vec3 viewVec, in vec2 texcoord)
{
vec3 highResNormal = texture(normalTexture, texcoord).xyz;
highResNormal = normalize(highResNormal * 2.0 - 1.0);
mat3 TBN = cotangentFrame(normal, -viewVec, texcoord);
return normalize(TBN * highResNormal);
}


vec3 fresnelSchlick(float cosTheta, vec3 F0) {
return F0 + (1.0 - F0) * pow(1.0 - cosTheta, 5.0);
}

float D_GGX(float NoH, float roughness) {
float alpha = roughness * roughness;
float alpha2 = alpha * alpha;
float NoH2 = NoH * NoH;
float b = (NoH2 * (alpha2 - 1.0) + 1.0);
return alpha2 * RECIPROCAL_PI / (b * b);
}

float G1_GGX_Schlick(float NoV, float roughness) {
float alpha = roughness * roughness;
float k = alpha / 2.0;
return max(NoV, 0.001) / (NoV * (1.0 - k) + k);
}

float G_Smith(float NoV, float NoL, float roughness) {
return G1_GGX_Schlick(NoL, roughness) * G1_GGX_Schlick(NoV, roughness);
}

float fresnelSchlick90(float cosTheta, float F0, float F90) {
return F0 + (F90 - F0) * pow(1.0 - cosTheta, 5.0);
}

float disneyDiffuseFactor(float NoV, float NoL, float VoH, float roughness) {
float alpha = roughness * roughness;
float F90 = 0.5 + 2.0 * alpha * VoH * VoH;
float F_in = fresnelSchlick90(NoL, 1.0, F90);
float F_out = fresnelSchlick90(NoV, 1.0, F90);
return F_in * F_out;
}

vec3 brdfMicrofacet(in vec3 L, in vec3 V, in vec3 N,
in float metallic, in float roughness, in vec3 baseColor, in float reflectance) {

vec3 H = normalize(V + L);

float NoV = clamp(dot(N, V), 0.0, 1.0);
float NoL = clamp(dot(N, L), 0.0, 1.0);
float NoH = clamp(dot(N, H), 0.0, 1.0);
float VoH = clamp(dot(V, H), 0.0, 1.0);

vec3 f0 = vec3(0.16 * (reflectance * reflectance));
f0 = mix(f0, baseColor, metallic);

vec3 F = fresnelSchlick(VoH, f0);
float D = D_GGX(NoH, roughness);
float G = G_Smith(NoV, NoL, roughness);

vec3 spec = (F * D * G) / (4.0 * max(NoV, 0.001) * max(NoL, 0.001));

vec3 rhoD = baseColor;

// optionally
rhoD *= vec3(1.0) - F;
// rhoD *= disneyDiffuseFactor(NoV, NoL, VoH, roughness);

rhoD *= (1.0 - metallic);

vec3 diff = rhoD * RECIPROCAL_PI;

return diff + spec;


}


#define PI 3.1415926535897932384626433832795

vec2 directionToSphericalEnvmap(vec3 dir) {
float phi = atan(dir.y, dir.x);
float theta = acos(dir.z);
float s = 0.5 - phi / (2.0 * PI);
float t = 1.0 - theta / PI;
return vec2(s, t);
}

// adapted from "Real Shading in Unreal Engine 4", Brian Karis, Epic Games
vec3 specularIBL(vec3 F0, float roughness, vec3 N, vec3 V) {
float NoV = clamp(dot(N, V), 0.0, 1.0);
vec3 R = reflect(-V, N);
vec2 uv = directionToSphericalEnvmap(R);
vec3 T1 = textureLod(envmapSpecular, uv, roughness * float(mipLevelCount)).rgb;
vec4 brdfIntegration = texture(brdfIntegrationMap, vec2(NoV, roughness));
vec3 T2 = (F0 * brdfIntegration.x + brdfIntegration.y);
return T1 * T2;
}

vec3 renderMesh() {
vec3 baseCol = rgb2lin(texture(baseColorTexture, tc).rgb);
float roughness = texture(roughnessTexture, tc).r;
vec4 emission = texture(emissionTexture, tc);
vec3 lightDir = normalize(-lightDirection); // towards light
vec3 viewDir = normalize(cameraPosition - vertPos);
vec3 n = normalize(wfn);

n = applyNormalMap(n, viewDir, tc);

vec3 radiance = rgb2lin(emission.rgb);

// direct light computation
/*float irradiance = max(dot(lightDir, n), 0.0) * irradiPerp;
if(irradiance > 0.0) { // if receives light
vec3 brdf = brdfMicrofacet(lightDir, viewDir, n, metallic, roughness, baseCol, reflectance);
// irradiance contribution from directional light
radiance += brdf * irradiance * lightColor.rgb;
}*/

// compute F0
vec3 f0 = vec3(0.16 * (reflectance * reflectance));
f0 = mix(f0, baseCol, metallic);

// image-based lighting (diffuse part)
vec2 envUV = directionToSphericalEnvmap(n);
vec3 rhoD = (1.0 - metallic) * baseCol;
rhoD *= vec3(1.0) - f0; // optionally
radiance += rhoD * texture(envmapDiffuse, envUV).rgb;

// image-based lighting (specular part)
radiance += specularIBL(f0, roughness, n, viewDir);

return lin2rgb(radiance);
}

void main() {
if(gsnMeshGroup == 0) {
outColor.rgb = renderMesh();
}
if(gsnMeshGroup == 1) {
outColor.rgb = texture(envMap, vec2(0.5 - tc.x, tc.y)).rgb;
}
outColor.a = 1.0;

}

 评论