時間: 2020-7到12月
修改:20201213
聲明:此文為個人學習筆記堡纬,自己溫習聂受,也樂在交流。如果文章中有侵權烤镐,錯誤蛋济,或者有不同的理解,煩請大家多多留言:指正炮叶,指教碗旅,鄙人會及時糾正。此致崇高敬意镜悉!
PBR系列筆記共三篇祟辟,本篇為第三篇:基于 Unity URP BRDF 算法分析
目錄
簡要
URP Lit 思路導圖
頂點著色器 LitPassVertex分析
片元著色器 LitPassFragment
總結
參考資料
簡要:本筆記略過大量的基礎信息。只有在效果開發(fā)中侣肄,出現(xiàn)頻率較高處測試下旧困。
環(huán)境:U3D 2019.4 URP 7.3.1 管線下的標準 Lit.shader 分析
公式說明:Fragment DirectBDRF 函數(shù)內(nèi)包含對迪斯尼BRDF的精簡擬合公式(筆記下文有詳細注釋)
頂點著色器 LitPassVertex
日常效果開發(fā)中:法線的處理,和SH的調(diào)用稼锅,對美術效果影響比較大吼具。這兩塊功能的處理也是極易出現(xiàn)錯誤的地方。處理好了這兩塊矩距,對美術效果會有質的提升拗盒。祥見第6點和第8點。
1锥债、GetVertexPositionInputs
VertexPositionInputs vertexInput = GetVertexPositionInputs(input.positionOS.xyz);
VertexPositionInputs GetVertexPositionInputs(float3 positionOS)
{
VertexPositionInputs input;
input.positionWS = TransformObjectToWorld(positionOS);
input.positionVS = TransformWorldToView(input.positionWS);
input.positionCS = TransformWorldToHClip(input.positionWS);
float4 ndc = input.positionCS * 0.5f;
input.positionNDC.xy = float2(ndc.x, ndc.y * _ProjectionParams.x) + ndc.w;
input.positionNDC.zw = input.positionCS.zw;
return input;
}
struct VertexPositionInputs
{
float3 positionWS; // World space position
float3 positionVS; // View space position
float4 positionCS; // Homogeneous clip space position
float4 positionNDC;// Homogeneous normalized device coordinates
};
2陡蝇、GetVertexNormalInputs
VertexNormalInputs normalInput = GetVertexNormalInputs(input.normalOS, input.tangentOS);
VertexNormalInputs GetVertexNormalInputs(float3 normalOS)
{
VertexNormalInputs tbn;
tbn.tangentWS = real3(1.0, 0.0, 0.0);
tbn.bitangentWS = real3(0.0, 1.0, 0.0);
tbn.normalWS = TransformObjectToWorldNormal(normalOS);
return tbn;
}
VertexNormalInputs GetVertexNormalInputs(float3 normalOS, float4 tangentOS)
{
VertexNormalInputs tbn;
// 符合mikkts空間要求。 僅在提取法線時標準化哮肚。
real sign = tangentOS.w * GetOddNegativeScale();
tbn.normalWS = TransformObjectToWorldNormal(normalOS);
tbn.tangentWS = TransformObjectToWorldDir(tangentOS.xyz);
tbn.bitangentWS = cross(tbn.normalWS, tbn.tangentWS) * sign;
return tbn;
}
struct VertexNormalInputs
{
real3 tangentWS;
real3 bitangentWS;
float3 normalWS;
};
3毅整、GetCameraPositionWS
4、VertexLighting
half3 vertexLight = VertexLighting(vertexInput.positionWS, normalInput.normalWS);
half3 VertexLighting(float3 positionWS, half3 normalWS)
{
half3 vertexLightColor = half3(0.0, 0.0, 0.0);
#ifdef _ADDITIONAL_LIGHTS_VERTEX
uint lightsCount = GetAdditionalLightsCount();
for (uint lightIndex = 0u; lightIndex < lightsCount; ++lightIndex)
{
Light light = GetAdditionalLight(lightIndex, positionWS);
half3 lightColor = light.color * light.distanceAttenuation;
vertexLightColor += LightingLambert(lightColor, light.direction, normalWS);
}
#endif
return vertexLightColor;
}
5绽左、ComputeFogFactor
half fogFactor = ComputeFogFactor(vertexInput.positionCS.z);
real ComputeFogFactor(float z)
{
float clipZ_01 = UNITY_Z_0_FAR_FROM_CLIPSPACE(z);
#if defined(FOG_LINEAR)
// factor = (end-z)/(end-start) = z * (-1/(end-start)) + (end/(end-start))
float fogFactor = saturate(clipZ_01 * unity_FogParams.z + unity_FogParams.w);
return real(fogFactor);
#elif defined(FOG_EXP) || defined(FOG_EXP2)
// factor = exp(-(density*z)^2)
// -density * z computed at vertex
return real(unity_FogParams.x * clipZ_01);
#else
return 0.0h;
#endif
}
6悼嫉、NormalizeNormalPerVertex 法線歸一化
output.normalWS = NormalizeNormalPerVertex(normalInput.normalWS);
real3 NormalizeNormalPerVertex(real3 normalWS)
{
#if defined(SHADER_QUALITY_LOW) && defined(_NORMALMAP)
return normalWS;
#else
return normalize(normalWS);
#endif
}
重要的法線歸一化:為了獲得更好的質量,法線應該在之前和之后標準化插值拼窥。
在頂點處戏蔑,蒙皮或混合形狀可能會大大改變法線的長度蹋凝。
在像素中,因為即使輸出單位長度的法線插值也會使其變?yōu)榉菃挝弧?/p>
使用法線貼圖時會出現(xiàn)碎片总棵,因為mikktspace會建立非正交基的空間鳍寂。
但是,我們將在此處嘗試平衡性能與質量情龄,同時讓用戶將其配置為著色器質量等級迄汛。
低質量層:根據(jù)是否對法線貼圖采樣,對每個頂點或每個像素進行歸一化骤视。
中質量層:始終對每個頂點進行標準化鞍爱。 僅在使用法線貼圖時對每個像素進行歸一化
高質量層:在頂點和像素著色器中均進行歸一化。
7专酗、OUTPUT_LIGHTMAP_UV
OUTPUT_LIGHTMAP_UV(input.lightmapUV, unity_LightmapST, output.lightmapUV);
#ifdef LIGHTMAP_ON
#define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) float2 lmName : TEXCOORD##index
#define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT) OUT.xy = lightmapUV.xy * lightmapScaleOffset.xy + lightmapScaleOffset.zw;
#define OUTPUT_SH(normalWS, OUT)
#else
#define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) half3 shName : TEXCOORD##index
#define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT)
#define OUTPUT_SH(normalWS, OUT) OUT.xyz = SampleSHVertex(normalWS)
#endif
8睹逃、OUTPUT_SH
頂點SH顏色,會經(jīng)常用于角色和場景中祷肯。使用宏來判斷是使用普通Sample版還是線性版沉填。祥細測試見本筆記下文。
OUTPUT_SH(output.normalWS.xyz, output.vertexSH);
#ifdef LIGHTMAP_ON
#define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) float2 lmName : TEXCOORD##index
#define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT) OUT.xy = lightmapUV.xy * lightmapScaleOffset.xy + lightmapScaleOffset.zw;
#define OUTPUT_SH(normalWS, OUT)
#else
#define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) half3 shName : TEXCOORD##index
#define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT)
#define OUTPUT_SH(normalWS, OUT) OUT.xyz = SampleSHVertex(normalWS)
#endif
9佑笋、GetShadowCoord
#if defined(REQUIRES_VERTEX_SHADOW_COORD_INTERPOLATOR)
output.shadowCoord = GetShadowCoord(vertexInput);
#endif
float4 GetShadowCoord(VertexPositionInputs vertexInput)
{
return TransformWorldToShadowCoord(vertexInput.positionWS);
}
half ComputeCascadeIndex(float3 positionWS)
{
float3 fromCenter0 = positionWS - _CascadeShadowSplitSpheres0.xyz;
float3 fromCenter1 = positionWS - _CascadeShadowSplitSpheres1.xyz;
float3 fromCenter2 = positionWS - _CascadeShadowSplitSpheres2.xyz;
float3 fromCenter3 = positionWS - _CascadeShadowSplitSpheres3.xyz;
float4 distances2 = float4(dot(fromCenter0, fromCenter0), dot(fromCenter1, fromCenter1), dot(fromCenter2, fromCenter2), dot(fromCenter3, fromCenter3));
half4 weights = half4(distances2 < _CascadeShadowSplitSphereRadii);
weights.yzw = saturate(weights.yzw - weights.xyz);
return 4 - dot(weights, half4(4, 3, 2, 1));
}
float4 TransformWorldToShadowCoord(float3 positionWS)
{
#ifdef _MAIN_LIGHT_SHADOWS_CASCADE
half cascadeIndex = ComputeCascadeIndex(positionWS);
#else
half cascadeIndex = 0;
#endif
return mul(_MainLightWorldToShadow[cascadeIndex], float4(positionWS, 1.0));
}
片元著色器 LitPassFragment
結構體SurfaceData
1翼闹、InitializeStandardLitSurfaceData 貼圖信息打包
輸入:頂點傳遞過來的float2 UV
輸出:結構體SurfaceData
功能:初始化表面數(shù)據(jù),對所有貼圖信息采樣蒋纬,貼圖信息打包橄碾。1.1、優(yōu)先采樣albedoMap貼圖颠锉,并得到A通道信息法牲;1.2、使用A通道來優(yōu)先剔除計算(性能優(yōu)化琼掠,節(jié)省后續(xù)沒有必要的計算消耗)拒垃。1.3、根據(jù)不同工作流判斷瓷蛙,得到不同的specGloss信息悼瓮,在此獲取金屬和粗糙度信息,或者metallic roughnessMap貼圖艰猬,并帶各自強度控制横堡。1.4、采樣normalMap貼圖冠桃,并對法線貼圖解包和scale強度控制命贴。1.5、含圖形質量判斷,采樣aoMap胸蛛,帶強度控制污茵。1.6、采樣EmissionMap葬项,HDR _EmissionColor自帶強度控制泞当。
output.uv = TRANSFORM_TEX(input.texcoord, _BaseMap);//頂點內(nèi)的uv計算
InitializeStandardLitSurfaceData(input.uv, surfaceData);//片元內(nèi)的計算
inline void InitializeStandardLitSurfaceData(float2 uv, out SurfaceData outSurfaceData)
{
half4 albedoAlpha = SampleAlbedoAlpha(uv, TEXTURE2D_ARGS(_BaseMap, sampler_BaseMap));
outSurfaceData.alpha = Alpha(albedoAlpha.a, _BaseColor, _Cutoff);
half4 specGloss = SampleMetallicSpecGloss(uv, albedoAlpha.a);
outSurfaceData.albedo = albedoAlpha.rgb * _BaseColor.rgb;
#if _SPECULAR_SETUP
outSurfaceData.metallic = 1.0h;
outSurfaceData.specular = specGloss.rgb;
#else
outSurfaceData.metallic = specGloss.r;
outSurfaceData.specular = half3(0.0h, 0.0h, 0.0h);
#endif
outSurfaceData.smoothness = specGloss.a;
outSurfaceData.normalTS = SampleNormal(uv, TEXTURE2D_ARGS(_BumpMap, sampler_BumpMap), _BumpScale);
outSurfaceData.occlusion = SampleOcclusion(uv);
outSurfaceData.emission = SampleEmission(uv, _EmissionColor.rgb, TEXTURE2D_ARGS(_EmissionMap, sampler_EmissionMap));
}
1.1、SampleAlbedoAlpha
half4 albedoAlpha = SampleAlbedoAlpha(uv, TEXTURE2D_ARGS(_BaseMap, sampler_BaseMap));
half4 SampleAlbedoAlpha(float2 uv, TEXTURE2D_PARAM(albedoAlphaMap, sampler_albedoAlphaMap))
{
return SAMPLE_TEXTURE2D(albedoAlphaMap, sampler_albedoAlphaMap, uv);
}
1.2民珍、Alpha
outSurfaceData.alpha = Alpha(albedoAlpha.a, _BaseColor, _Cutoff);
half Alpha(half albedoAlpha, half4 color, half cutoff)
{
#if !defined(_SMOOTHNESS_TEXTURE_ALBEDO_CHANNEL_A) && !defined(_GLOSSINESS_FROM_BASE_ALPHA)
half alpha = albedoAlpha * color.a;
#else
half alpha = color.a;
#endif
#if defined(_ALPHATEST_ON)
clip(alpha - cutoff);
#endif
return alpha;
}
1.3襟士、SampleMetallicSpecGloss
half4 specGloss = SampleMetallicSpecGloss(uv, albedoAlpha.a);
#ifdef _SPECULAR_SETUP
#define SAMPLE_METALLICSPECULAR(uv) SAMPLE_TEXTURE2D(_SpecGlossMap, sampler_SpecGlossMap, uv)
#else
#define SAMPLE_METALLICSPECULAR(uv) SAMPLE_TEXTURE2D(_MetallicGlossMap, sampler_MetallicGlossMap, uv)
#endif
half4 SampleMetallicSpecGloss(float2 uv, half albedoAlpha)
{
half4 specGloss;
#ifdef _METALLICSPECGLOSSMAP
specGloss = SAMPLE_METALLICSPECULAR(uv);
#ifdef _SMOOTHNESS_TEXTURE_ALBEDO_CHANNEL_A
specGloss.a = albedoAlpha * _Smoothness;
#else
specGloss.a *= _Smoothness;
#endif
#else // _METALLICSPECGLOSSMAP
#if _SPECULAR_SETUP
specGloss.rgb = _SpecColor.rgb;
#else
specGloss.rgb = _Metallic.rrr;
#endif
#ifdef _SMOOTHNESS_TEXTURE_ALBEDO_CHANNEL_A
specGloss.a = albedoAlpha * _Smoothness;
#else
specGloss.a = _Smoothness;
#endif
#endif
return specGloss;
}
1.4、對法線的處理
一般來說法線貼圖嚷量,最終調(diào)取的還是UnpackNormalAG函數(shù)
outSurfaceData.normalTS = SampleNormal(uv, TEXTURE2D_ARGS(_BumpMap, sampler_BumpMap), _BumpScale);
half3 SampleNormal(float2 uv, TEXTURE2D_PARAM(bumpMap, sampler_bumpMap), half scale = 1.0h)
{
#ifdef _NORMALMAP
half4 n = SAMPLE_TEXTURE2D(bumpMap, sampler_bumpMap, uv);
#if BUMP_SCALE_NOT_SUPPORTED
return UnpackNormal(n);
#else
return UnpackNormalScale(n, scale);
#endif
#else
return half3(0.0h, 0.0h, 1.0h);
#endif
}
1.5陋桂、SampleOcclusion
outSurfaceData.occlusion = SampleOcclusion(uv);
half SampleOcclusion(float2 uv)
{
#ifdef _OCCLUSIONMAP
// TODO: Controls things like these by exposing SHADER_QUALITY levels (low, medium, high)
#if defined(SHADER_API_GLES)
return SAMPLE_TEXTURE2D(_OcclusionMap, sampler_OcclusionMap, uv).g;
#else
half occ = SAMPLE_TEXTURE2D(_OcclusionMap, sampler_OcclusionMap, uv).g;
return LerpWhiteTo(occ, _OcclusionStrength);
#endif
#else
return 1.0;
#endif
}
1.6、SampleEmission
outSurfaceData.emission = SampleEmission(uv, _EmissionColor.rgb, TEXTURE2D_ARGS(_EmissionMap, sampler_EmissionMap));
half3 SampleEmission(float2 uv, half3 emissionColor, TEXTURE2D_PARAM(emissionMap, sampler_emissionMap))
{
#ifndef _EMISSION
return 0;
#else
return SAMPLE_TEXTURE2D(emissionMap, sampler_emissionMap, uv).rgb * emissionColor;
#endif
}
結構體InputData
2津肛、InitializeInputData 初始化PBR輸入數(shù)據(jù)章喉,處理頂點傳遞過來的數(shù)據(jù)信息
輸入:結構體 Varyings汗贫,法線貼圖
輸出:結構體 InputData
功能:初始化PBR輸入數(shù)據(jù)身坐,處理頂點傳遞過來的數(shù)據(jù)信息,為后續(xù)PBR計算做準備落包。2.1部蛇、法線貼圖空間轉換,從切線空間轉換到世界空間咐蝇。2.2涯鲁、法線重歸一化,為什么重做歸一化祥見前文頂點功能中的第6點說明艺骂。2.3僧凤、對視向量為0時做預防浅浮。2.4、陰影項警绩。2.5、簡單的全局光照計算:根據(jù)不同的宏來判斷使用LightMap 還是使用像素SH球諧光照(非頂點球諧)盅称;一般情況肩祥,效果開發(fā)中角色會使用SH,場景會使用LightMap缩膝。
InitializeInputData(input, surfaceData.normalTS, inputData);
void InitializeInputData(Varyings input, half3 normalTS, out InputData inputData)
{
inputData = (InputData)0;
#if defined(REQUIRES_WORLD_SPACE_POS_INTERPOLATOR)
inputData.positionWS = input.positionWS;
#endif
#ifdef _NORMALMAP
half3 viewDirWS = half3(input.normalWS.w, input.tangentWS.w, input.bitangentWS.w);
inputData.normalWS = TransformTangentToWorld(normalTS,
half3x3(input.tangentWS.xyz, input.bitangentWS.xyz, input.normalWS.xyz));
#else
half3 viewDirWS = input.viewDirWS;
inputData.normalWS = input.normalWS;
#endif
inputData.normalWS = NormalizeNormalPerPixel(inputData.normalWS);
viewDirWS = SafeNormalize(viewDirWS);
inputData.viewDirectionWS = viewDirWS;
#if defined(REQUIRES_VERTEX_SHADOW_COORD_INTERPOLATOR)
inputData.shadowCoord = input.shadowCoord;
#elif defined(MAIN_LIGHT_CALCULATE_SHADOWS)
inputData.shadowCoord = TransformWorldToShadowCoord(inputData.positionWS);
#else
inputData.shadowCoord = float4(0, 0, 0, 0);
#endif
inputData.fogCoord = input.fogFactorAndVertexLight.x;
inputData.vertexLighting = input.fogFactorAndVertexLight.yzw;
inputData.bakedGI = SAMPLE_GI(input.lightmapUV, input.vertexSH, inputData.normalWS);
}
2.1-2.4
2.5混狠、SAMPLE_GI 簡單的全局光照計算
inputData.bakedGI = SAMPLE_GI(input.lightmapUV, input.vertexSH, inputData.normalWS);
//我們從烘焙的光照貼圖或探針中采樣GI。
//如果lightmap:sampleData.xy = lightmapUV
//如果探針:sampleData.xyz = L2 SH項
#ifdef LIGHTMAP_ON
#define SAMPLE_GI(lmName, shName, normalWSName) SampleLightmap(lmName, normalWSName)
#else
#define SAMPLE_GI(lmName, shName, normalWSName) SampleSHPixel(shName, normalWSName)
#endif
2.51疾层、SampleLightmap
//樣本烘焙的光照貼圖将饺。 非方向性和定向性(如果可用)。
//不支持實時GI。
half3 SampleLightmap(float2 lightmapUV, half3 normalWS)
{
#ifdef UNITY_LIGHTMAP_FULL_HDR
bool encodedLightmap = false;
#else
bool encodedLightmap = true;
#endif
half4 decodeInstructions = half4(LIGHTMAP_HDR_MULTIPLIER, LIGHTMAP_HDR_EXPONENT, 0.0h, 0.0h);
//著色器庫樣本光照貼圖函數(shù)可轉換光照貼圖uv坐標以應用偏差和比例俯逾。
//但是贸桶,通用管道已經(jīng)在頂點處轉換了這些坐標。 我們傳遞half4(1桌肴,1皇筛,0,0)和
//編譯器將優(yōu)化轉換坠七。
half4 transformCoords = half4(1, 1, 0, 0);
#ifdef DIRLIGHTMAP_COMBINED
return SampleDirectionalLightmap(TEXTURE2D_ARGS(unity_Lightmap, samplerunity_Lightmap),
TEXTURE2D_ARGS(unity_LightmapInd, samplerunity_Lightmap),
lightmapUV, transformCoords, normalWS, encodedLightmap, decodeInstructions);
#elif defined(LIGHTMAP_ON)
return SampleSingleLightmap(TEXTURE2D_ARGS(unity_Lightmap, samplerunity_Lightmap), lightmapUV, transformCoords, encodedLightmap, decodeInstructions);
#else
return half3(0.0, 0.0, 0.0);
#endif
}
2.52水醋、SampleSHPixel
第一排:頂點里面最簡單的SH,第二排:頂點里面的線性SH(這里需要跟片元里面的SH計算做混合)彪置,第三排:片元里面的像素SH拄踪;第四排:像素里的線性SH計算,并且跟頂點里面的線性SH結果混合(測試結果跟頂點SH效果相差不大)拳魁。從測試結果來看惶桐,單純的像素SH效果是最好的。SH靈活運用潘懊,用途廣泛姚糊。
// SH像素評估。 根據(jù)目標SH可能會進行采樣
//混合像素或完全像素授舟。 參見SampleSHVertex
half3 SampleSHPixel(half3 L2Term, half3 normalWS)
{
#if defined(EVALUATE_SH_VERTEX)
return L2Term;
#elif defined(EVALUATE_SH_MIXED)
half3 L0L1Term = SHEvalLinearL0L1(normalWS, unity_SHAr, unity_SHAg, unity_SHAb);
return max(half3(0, 0, 0), L2Term + L0L1Term);
#endif
//默認值:按像素完全評估SH
return SampleSH(normalWS);
}
#if HAS_HALF
half3 SampleSH9(half4 SHCoefficients[7], half3 N)
{
half4 shAr = SHCoefficients[0];
half4 shAg = SHCoefficients[1];
half4 shAb = SHCoefficients[2];
half4 shBr = SHCoefficients[3];
half4 shBg = SHCoefficients[4];
half4 shBb = SHCoefficients[5];
half4 shCr = SHCoefficients[6];
// Linear + constant polynomial terms
half3 res = SHEvalLinearL0L1(N, shAr, shAg, shAb);
// Quadratic polynomials
res += SHEvalLinearL2(N, shBr, shBg, shBb, shCr);
return res;
}
#endif
float3 SampleSH9(float4 SHCoefficients[7], float3 N)
{
float4 shAr = SHCoefficients[0];
float4 shAg = SHCoefficients[1];
float4 shAb = SHCoefficients[2];
float4 shBr = SHCoefficients[3];
float4 shBg = SHCoefficients[4];
float4 shBb = SHCoefficients[5];
float4 shCr = SHCoefficients[6];
// Linear + constant polynomial terms
float3 res = SHEvalLinearL0L1(N, shAr, shAg, shAb);
// Quadratic polynomials
res += SHEvalLinearL2(N, shBr, shBg, shBb, shCr);
return res;
}
3救恨、UniversalFragmentPBR BRDF-PBR計算
輸入:結構體 InputData,結構體 SurfaceData
輸出:最終BRDF顏色
功能:BRDF-PBR計算释树。3.1肠槽、初始化BRDF Data結構體。根據(jù)金屬貼圖計算diffuse顏色和specular顏色奢啥。計算掠射項grazingTerm秸仙。計算反向粗糙度perceptualRoughness。計算2次方粗糙度roughness桩盲。計算4次方粗糙度roughness2寂纪。計算歸一化的掠射項normalizationTerm。計算4次方粗糙度減1roughness2MinusOne正驻。3.2弊攘、初如化Light結構體。獲取到主方向光位置direction姑曙。獲取光探測器的遮擋數(shù)據(jù)distanceAttenuation襟交。計算陰影項shadowAttenuation。獲取主方向光顏色color伤靠。3.3捣域、實時光與非實時光混合啼染。計算輸出lightMap混合部分。3.4焕梅、全局光計算迹鹅。BRDF間接光計算。3.5贞言、物理燈光計算斜棚。BRDF直接光計算。
當函數(shù)擬合能熟練使用時该窗,并不需要再做這些圖例測試弟蚀,可以直接給擬合曲線圖例說明了。
結構體BRDFData
struct BRDFData
{
half3 diffuse;
half3 specular;
half perceptualRoughness;
half roughness;
half roughness2;
half grazingTerm;
//我們保存了一些輕度不變的BRDF術語酗失,因此我們不必重新計算
//將它們放在燈光循環(huán)中义钉。 查看DirectBRDF函數(shù)以獲取詳細說明。
half normalizationTerm; // roughness * 4.0 + 2.0
half roughness2MinusOne; // roughness^2 - 1.0
};
3.1规肴、InitializeBRDFData
inline void InitializeBRDFData(half3 albedo, half metallic, half3 specular, half smoothness, half alpha, out BRDFData outBRDFData)
{
#ifdef _SPECULAR_SETUP
half reflectivity = ReflectivitySpecular(specular);
half oneMinusReflectivity = 1.0 - reflectivity;
outBRDFData.diffuse = albedo * (half3(1.0h, 1.0h, 1.0h) - specular);
outBRDFData.specular = specular;
#else
half oneMinusReflectivity = OneMinusReflectivityMetallic(metallic);
half reflectivity = 1.0 - oneMinusReflectivity;
outBRDFData.diffuse = albedo * oneMinusReflectivity;
outBRDFData.specular = lerp(kDieletricSpec.rgb, albedo, metallic);
#endif
outBRDFData.grazingTerm = saturate(smoothness + reflectivity);
outBRDFData.perceptualRoughness = PerceptualSmoothnessToPerceptualRoughness(smoothness);
outBRDFData.roughness = max(PerceptualRoughnessToRoughness(outBRDFData.perceptualRoughness), HALF_MIN);
outBRDFData.roughness2 = outBRDFData.roughness * outBRDFData.roughness;
outBRDFData.normalizationTerm = outBRDFData.roughness * 4.0h + 2.0h;
outBRDFData.roughness2MinusOne = outBRDFData.roughness2 - 1.0h;
#ifdef _ALPHAPREMULTIPLY_ON
outBRDFData.diffuse *= alpha;
alpha = alpha * oneMinusReflectivity + reflectivity;
#endif
}
結構體Light
3.2捶闸、GetMainLight
3.3、MixRealtimeAndBakedGI
MixRealtimeAndBakedGI(mainLight, inputData.normalWS, inputData.bakedGI, half4(0, 0, 0, 0));
void MixRealtimeAndBakedGI(inout Light light, half3 normalWS, inout half3 bakedGI, half4 shadowMask)
{
#if defined(_MIXED_LIGHTING_SUBTRACTIVE) && defined(LIGHTMAP_ON)
bakedGI = SubtractDirectMainLightFromLightmap(light, normalWS, bakedGI);
#endif
}
#if defined(_MIXED_LIGHTING_SUBTRACTIVE) && defined(LIGHTMAP_ON)
bakedGI = SubtractDirectMainLightFromLightmap(light, normalWS, bakedGI);
#endif
half3 SubtractDirectMainLightFromLightmap(Light mainLight, half3 normalWS, half3 bakedGI)
{
//讓我們嘗試使實時陰影在已經(jīng)包含
//烘烤的燈光和主要太陽光的陰影拖刃。
//摘要:
// 1)通過從實時陰影遮擋的位置減去估計的光貢獻來計算陰影中的可能值:
// a)保留其他烘焙的燈光和反彈光
// b)消除了背向燈光的幾何圖形上的陰影
// 2)鎖定用戶定義的ShadowColor删壮。
// 3)選擇原始的光照貼圖值(如果它是最暗的)。
// 1)提供良好的照明估計序调,就好像在烘焙過程中光線會被遮蓋一樣醉锅。
//我們只減去主方向燈兔簇。 這在下面的貢獻期內(nèi)說明发绢。
half shadowStrength = GetMainLightShadowStrength();
half contributionTerm = saturate(dot(mainLight.direction, normalWS));
half3 lambert = mainLight.color * contributionTerm;
half3 estimatedLightContributionMaskedByInverseOfShadow = lambert * (1.0 - mainLight.shadowAttenuation);
half3 subtractedLightmap = bakedGI - estimatedLightContributionMaskedByInverseOfShadow;
// 2)允許用戶定義場景的整體環(huán)境并在實時陰影變得太暗時控制情況。
half3 realtimeShadow = max(subtractedLightmap, _SubtractiveShadowColor.xyz);
realtimeShadow = lerp(bakedGI, realtimeShadow, shadowStrength);
// 3)選擇最深的顏色
return min(bakedGI, realtimeShadow);
}
3.4垄琐、GlobalIllumination
half3 color = GlobalIllumination(brdfData, inputData.bakedGI, occlusion, inputData.normalWS, inputData.viewDirectionWS);
half3 GlobalIllumination(BRDFData brdfData, half3 bakedGI, half occlusion, half3 normalWS, half3 viewDirectionWS)
{
half3 reflectVector = reflect(-viewDirectionWS, normalWS);
half fresnelTerm = Pow4(1.0 - saturate(dot(normalWS, viewDirectionWS)));
half3 indirectDiffuse = bakedGI * occlusion;
half3 indirectSpecular = GlossyEnvironmentReflection(reflectVector, brdfData.perceptualRoughness, occlusion);
return EnvironmentBRDF(brdfData, indirectDiffuse, indirectSpecular, fresnelTerm);
}
GlossyEnvironmentReflection
half3 GlobalIllumination(BRDFData brdfData, half3 bakedGI, half occlusion, half3 normalWS, half3 viewDirectionWS)
{
half3 reflectVector = reflect(-viewDirectionWS, normalWS);
half fresnelTerm = Pow4(1.0 - saturate(dot(normalWS, viewDirectionWS)));
half3 indirectDiffuse = bakedGI * occlusion;
half3 indirectSpecular = GlossyEnvironmentReflection(reflectVector, brdfData.perceptualRoughness, occlusion);
return EnvironmentBRDF(brdfData, indirectDiffuse, indirectSpecular, fresnelTerm);
}
void MixRealtimeAndBakedGI(inout Light light, half3 normalWS, inout half3 bakedGI, half4 shadowMask)
{
#if defined(_MIXED_LIGHTING_SUBTRACTIVE) && defined(LIGHTMAP_ON)
bakedGI = SubtractDirectMainLightFromLightmap(light, normalWS, bakedGI);
#endif
}
EnvironmentBRDF
return EnvironmentBRDF(brdfData, indirectDiffuse, indirectSpecular, fresnelTerm);
half3 EnvironmentBRDF(BRDFData brdfData, half3 indirectDiffuse, half3 indirectSpecular, half fresnelTerm)
{
half3 c = indirectDiffuse * brdfData.diffuse;
float surfaceReduction = 1.0 / (brdfData.roughness2 + 1.0);
c += surfaceReduction * indirectSpecular * lerp(brdfData.specular, brdfData.grazingTerm, fresnelTerm);
return c;
}
3.5边酒、LightingPhysicallyBased
half3 LightingPhysicallyBased(BRDFData brdfData, half3 lightColor, half3 lightDirectionWS, half lightAttenuation, half3 normalWS, half3 viewDirectionWS)
{
half NdotL = saturate(dot(normalWS, lightDirectionWS));
half3 radiance = lightColor * (lightAttenuation * NdotL);
return DirectBDRF(brdfData, normalWS, lightDirectionWS, viewDirectionWS) * radiance;
}
DirectBDRF 內(nèi)含公式擬合說明
//基于極簡主義的CookTorrance BRDF
//實現(xiàn)與原始推導略有不同:http://www.thetenthplanet.de/archives/255
// * NDF [修改] GGX
// *修改了Kelemen和Szirmay-Kalos的可見度術語
// *菲涅耳近似為1 / LdotH
half3 DirectBDRF(BRDFData brdfData, half3 normalWS, half3 lightDirectionWS, half3 viewDirectionWS)
{
#ifndef _SPECULARHIGHLIGHTS_OFF
float3 halfDir = SafeNormalize(float3(lightDirectionWS) + float3(viewDirectionWS));
float NoH = saturate(dot(normalWS, halfDir));
half LoH = saturate(dot(lightDirectionWS, halfDir));
// GGX分布乘以可見性和菲涅耳組合近似
// BRDFspec =(D * V * F)/ 4.0
// D =roughness^ 2 / /(NoH ^ 2 *(roughness^ 2-1-1)+1)^ 2
// V * F = 1.0 /(LoH ^ 2 *(roughness+ 0.5))
//請參閱Siggraph 2015移動移動圖形課程中的“優(yōu)化移動PBR”
// https://community.arm.com/events/1155
//最終的BRDFspec =roughness^ 2 / /(NoH ^ 2 *(roughness2-1-1)+ 1)^ 2 *(LoH ^ 2 *(roughness+ 0.5)* 4.0)
//我們進一步優(yōu)化了一些輕不變項
// brdfData.normalizationTerm =(roughness+ 0.5)* 4.0改寫為roughness* 4.0 + 2.0以適合MAD。
float d = NoH * NoH * brdfData.roughness2MinusOne + 1.00001f;
half LoH2 = LoH * LoH;
half specularTerm = brdfData.roughness2 / ((d * d) * max(0.1h, LoH2) * brdfData.normalizationTerm);
//在一半實際上意味著某物的平臺上狸窘,分母有溢出的風險
//下面的鉗位是專門為“修復”而添加的墩朦,但是dx編譯器(我們將字節(jié)碼轉換為metal / gles)
//看到 specularTerm 僅具有非負項,因此它在鉗位中跳過 max(0翻擒,..)(僅保留min(100氓涣,...))
#if defined (SHADER_API_MOBILE) || defined (SHADER_API_SWITCH)
specularTerm = specularTerm - HALF_MIN;
specularTerm = clamp(specularTerm, 0.0, 100.0); //防止FP16在手機上溢出
#endif
half3 color = specularTerm * brdfData.specular + brdfData.diffuse;
return color;
#else
return brdfData.diffuse;
#endif
}
3.6、GetAdditionalLightsCount 多光照暫時略過
4陋气、MixFog
real ComputeFogIntensity(real fogFactor)
{
real fogIntensity = 0.0h;
#if defined(FOG_LINEAR) || defined(FOG_EXP) || defined(FOG_EXP2)
#if defined(FOG_EXP)
// factor = exp(-density*z)
// fogFactor = density*z compute at vertex
fogIntensity = saturate(exp2(-fogFactor));
#elif defined(FOG_EXP2)
// factor = exp(-(density*z)^2)
// fogFactor = density*z compute at vertex
fogIntensity = saturate(exp2(-fogFactor * fogFactor));
#elif defined(FOG_LINEAR)
fogIntensity = fogFactor;
#endif
#endif
return fogIntensity;
}
half3 MixFogColor(real3 fragColor, real3 fogColor, real fogFactor)
{
#if defined(FOG_LINEAR) || defined(FOG_EXP) || defined(FOG_EXP2)
real fogIntensity = ComputeFogIntensity(fogFactor);
fragColor = lerp(fogColor, fragColor, fogIntensity);
#endif
return fragColor;
}
half3 MixFog(real3 fragColor, real fogFactor)
{
return MixFogColor(fragColor, unity_FogColor.rgb, fogFactor);
}
見解總結:筆記有點過長劳吠,分成三篇記錄。第一篇:《基于 Disney BRDF 算法分析》主要記錄對迪斯尼的方程相關學習巩趁;第二篇:《Unity Builtin BRDF 算法分析》主要記錄對unity 標準管線下的原理疏通痒玩;第三篇,也就是本篇《基于 Unity URP BRDF 算法分析》主要記錄在unityURP管線下的PBR學習。把渲染當做建造房子蠢古,那么基石是一個可行的標準材質PBR奴曙,框架是光效,點睛之筆是后處理草讶,添磚加瓦是其他效果(大氣霧洽糟、水、特效)堕战。
Unity 自有一套較好的PBR擬合脊框,兼顧了效果和性能,所以不用太糾結unity沒完整的把Disney-BRDF搬過來践啄,對于移動平臺來說他是極其友好的(數(shù)學擬合半桶水的情況下浇雹,最好別跟Unity官方Aras去比效果和性能兼顧的數(shù)學擬合,想去深度優(yōu)化PBR效果的屿讽,推薦把擬合精通了再說昭灵,Aras Pranckevi?ius大神用的是Excel來做擬合:P)。Unity的shader源碼是值得讓人深入學習研究的伐谈,是一個巨大的寶庫烂完,到現(xiàn)在來說是我最好的示例老師;當然圖形學基礎知識和算法也是最好的老師诵棵,從書本中能學到更多的東西抠蚣。我常把Shader的效果開發(fā)比作武功修練:會寫Shader,好比在練外功(很多時候你在摘取別人的效果履澳,然后自己創(chuàng)造一點獨特效果嘶窄,當然這種也是可行的);而寫好Shader距贷,去創(chuàng)造自己的風格與藝術效果柄冲,這是內(nèi)功(熟練運用圖形學的各種數(shù)學運算,不再是摘取忠蝗,照抄別人的现横,數(shù)學基礎扎實)。在練外功的同時阁最,要內(nèi)功扎實戒祠,隨時間的推移必見成效。
在絕大多數(shù)unity項目中速种,美術效果提不起來的原因姜盈,是沒有正確的使用Unity整套功能,又或許大多數(shù)地方使用正確了哟旗,但有的地方確沒有正確使用贩据,存在錯誤栋操,而這種錯誤經(jīng)常很致命。
在這三篇文章中饱亮,還存在很多的不足:陰影沒有深入矾芙;Lightmap部分沒有深入;函數(shù)擬合沒有深入等等近上。
個人學習實踐測試用的簡化版URP_PBR shader剔宪,減去了復雜的邏輯keywords判斷,易維護壹无。測試使用葱绒,不推薦直用于移動平臺項目,因為去除了Unity絕大多數(shù)移動上的優(yōu)化(:P)斗锭。
地址:
https://github.com/MasterWangdaoyong/Shader-Graph/tree/main/Algorithm/Unity%20URP%20BRDFgithub.com
必讀參考資料:
Substance PBR 指導手冊
Substance Academyacademy.substance3d.com[圖片上傳失敗...(image-8dfc08-1639564993722)]
八猴 PBR 指導手冊
https://marmoset.co/posts/basic-theory-of-physically-based-rendering/marmoset.co
SIGGRAPH 2012年原版文章: 2012-Physically-Based Shading at Disney
本人整理過后的中文版:
MasterWangdaoyong/Shader-Graphgithub.com
SIGGRAPH 2017年原版文章: 2017-Reflectance Models (BRDF)
https://cgg.mff.cuni.cz/~pepca/lectures/pdf/pg2-05-brdf.en.pdfcgg.mff.cuni.cz
閆令琪(閆神):
GAMES: Graphics And Mixed Environment Seminargames-cn.org
希望也有學101地淀,102,201岖是,202的朋友討論溝通:)
個人學習注釋筆記地址:
https://github.com/MasterWangdaoyong/Shader-Graph/tree/main/Unity_SourceCodegithub.com
https://github.com/MasterWangdaoyong/Shader-Graph/tree/main/Unity_SourceCodegithub.com
毛星云(大佬):
毛星云:【基于物理的渲染(PBR)白皮書】(三)迪士尼原則的BRDF與BSDF相關總結zhuanlan.zhihu.com雨軒先行者同類資料:
雨軒:Unity PBR Standard Shader 實現(xiàn)詳解 (四)BRDF函數(shù)計算zhuanlan.zhihu.com熊新科: 源碼解析 第10章節(jié) 第11章節(jié)
馮樂樂:入門精要 第18章節(jié)