Occlusion Probe Shader部分
OcclusionProbe主要是用来计算环境光遮挡的探针,基本的原理是生成一个体素划分的块区域,在体素中填充阴影遮挡情况。在后在Shader当中提取计算的体素信息。
有两个步骤需要实现:
1.需要使用Unity自带的Progressive烘焙工具,手动烘焙环境贴图。
2.需要保证产生遮挡的物体,开启lightmap static
3.需要保证Baked Global Illumination是选择的。
4.烘焙完成,需要修改Shader读取这些体素信息。
Shader中SurfaceData的区别
HDRP中SurfaceData数据保存了遮挡信息
主要的区别是三个Occlusion值
- skyOcclusion
- treeOcclusion
- grassOcclusion
struct SurfaceData
{
uint materialFeatures;
float3 baseColor;
float specularOcclusion;
float3 normalWS;
float perceptualSmoothness;
float ambientOcclusion;
float metallic;
float coatMask;
float3 specularColor;
uint diffusionProfile;
float subsurfaceMask;
float thickness;
float3 tangentWS;
float anisotropy;
float iridescenceThickness;
float iridescenceMask;
float ior;
float3 transmittanceColor;
float atDistance;
float transmittanceMask;
// (BofD)中的内容
float skyOcclusion; // Occlusion Prode的遮挡信息
float treeOcclusion; // Occlusion Prode2的遮挡信息
// 还有一个grassOcclusion 是一个临时变量没有出现
};
================================
struct SurfaceData
{
uint materialFeatures;
float3 baseColor;
float specularOcclusion;
float3 normalWS;
float perceptualSmoothness;
float ambientOcclusion;
float metallic;
float coatMask;
float3 specularColor;
uint diffusionProfile;
float subsurfaceMask;
float thickness;
float3 tangentWS;
float anisotropy;
float iridescenceThickness;
float iridescenceMask;
float3 geomNormalWS;
float ior;
float3 transmittanceColor;
float atDistance;
float transmittanceMask;
};
Occlusion遮挡信息的读取
主要获取到SurfaceData的位置是LitDaysIndividualLayer. (BofD)
...
//forest-begin: sky occlusion
float grassOcclusion;
surfaceData.skyOcclusion = SampleSkyOcclusion(input.positionRWS, grassOcclusion);
//forest-end
//forest-begin: Tree Occlusion
float4 treeOcclusionInput = float4(input.texCoord2.xy, input.texCoord3.xy);
surfaceData.treeOcclusion = GetTreeOcclusion(input.positionRWS, treeOcclusionInput);
//forest-end:
#else // #if !defined(LAYERED_LIT_SHADER)
...
上面涉及到的具体的函数
float3 _AtmosphericScatteringSunVector;
// 通过树的参数计算得到。
float GetTreeOcclusion(float3 positionRWS, float4 treeOcclusionInput) {
#if defined(_ANIM_SINGLE_PIVOT_COLOR) || defined(_ANIM_HIERARCHY_PIVOT)
if(_UseTreeOcclusion) {
float3 positionWS = GetAbsolutePositionWS(positionRWS);
float treeWidth = _Tree12Width == 0 ? 1.f : saturate((positionWS.y - UNITY_MATRIX_M._m13) / _Tree12Width);
float treeDO = lerp(_TreeDO, _TreeDO2, treeWidth);
float treeAO = lerp(_TreeAO, _TreeAO2, treeWidth);
float4 lightDir = float4(-_AtmosphericScatteringSunVector * treeDO, treeAO);
float treeDOBias = lerp(_TreeDOBias, _TreeDOBias2, treeWidth);
float treeAOBias = lerp(_TreeAOBias, _TreeAOBias2, treeWidth);
return saturate(dot(saturate(treeOcclusionInput + float4(treeDOBias.rrr, treeAOBias)), lightDir));
}
else
#endif
{
return 1.f;
}
}
float SampleOcclusionProbes(float3 positionWS)
{
float occlusionProbes = 1;
// 世界坐标转换到探针坐标
float3 pos = mul(_OcclusionProbesWorldToLocalDetail, float4(positionWS, 1)).xyz;
// 这里有两个三维纹理。一个细节一个全局
UNITY_BRANCH
if(all(pos > 0) && all(pos < 1))
{
// 采样三维纹理
occlusionProbes = tex3D(_OcclusionProbesDetail, pos).a;
}
else
{
pos = mul(_OcclusionProbesWorldToLocal, float4(positionWS, 1)).xyz;
occlusionProbes = tex3D(_OcclusionProbes, pos).a;
}
return occlusionProbes;
}
// 草的环境光遮挡,这是二维纹理。
float SampleGrassOcclusion(float2 terrainUV)
{
return lerp(1.0, tex2D(_GrassOcclusion, terrainUV).a, _GrassOcclusionAmountTerrain);
}
float SampleGrassOcclusion(float3 positionWS)
{
float3 pos = mul(_GrassOcclusionWorldToLocal, float4(positionWS, 1)).xyz;
float terrainHeight = tex2D(_GrassOcclusionHeightmap, pos.xz).a;
float height = pos.y - terrainHeight * _GrassOcclusionHeightRange;
UNITY_BRANCH
if(height < _GrassOcclusionCullHeight)
{
float xz = lerp(1.0, tex2D(_GrassOcclusion, pos.xz).a, _GrassOcclusionAmountGrass);
return saturate(xz + smoothstep(_GrassOcclusionHeightFadeBottom, _GrassOcclusionHeightFadeTop, height));
}
else
return 1;
}
// 这里会返回grassOcclusion和SkyOcclusion两个内容
float SampleSkyOcclusion(float3 positionRWS, out float grassOcclusion)
{
// 获取世界坐标
float3 positionWS = GetAbsolutePositionWS(positionRWS);
// 从世界坐标采样体素块
grassOcclusion = SampleGrassOcclusion(positionWS);
return grassOcclusion * SampleOcclusionProbes(positionWS);
}
float SampleSkyOcclusion(float3 positionRWS, float2 terrainUV, out float grassOcclusion)
{
float3 positionWS = GetAbsolutePositionWS(positionRWS);
grassOcclusion = SampleGrassOcclusion(terrainUV);
return grassOcclusion * SampleOcclusionProbes(positionWS);
}
现在可以知道三个信息的来源:
- skyOcclusion 来自三维纹理
- treeOcclusion 通过树的参数计算得到
- grassOcclusion 来自二维纹理
Occlusion遮挡信息的使用
//BuildinData中获取环境烘焙信息是使用了上面的采样结果。
SampleBakedGI函数出现了区别
// 所有的基础烘焙信息:
// LightMap:fully baked lightmap (static lightmap) + enlighten realtime lightmap (dynamic lightmap) 可以组合使用
// for each case we can have directional lightmap or not.
// lightprobe for dynamic/moving entity:
//探针类型:Either SH9 per object lightprobe or SH4 per pixel per object volume probe
float3 SampleBakedGI(float3 positionRWS, float3 normalWS, float2 uvStaticLightmap, float2 uvDynamicLightmap)
{
// If there is no lightmap, it assume lightprobe
#if !defined(LIGHTMAP_ON) && !defined(DYNAMICLIGHTMAP_ON)
if (unity_ProbeVolumeParams.x == 0.0)
{
// TODO: pass a tab of coefficient instead!
real4 SHCoefficients[7];
SHCoefficients[0] = unity_SHAr;
SHCoefficients[1] = unity_SHAg;
SHCoefficients[2] = unity_SHAb;
SHCoefficients[3] = unity_SHBr;
SHCoefficients[4] = unity_SHBg;
SHCoefficients[5] = unity_SHBb;
SHCoefficients[6] = unity_SHC;
return SampleSH9(SHCoefficients, normalWS);
}
else
{
return SampleProbeVolumeSH4(TEXTURE3D_PARAM(unity_ProbeVolumeSH, samplerunity_ProbeVolumeSH), positionRWS, normalWS, GetProbeVolumeWorldToObject(),
unity_ProbeVolumeParams.y, unity_ProbeVolumeParams.z, unity_ProbeVolumeMin.xyz, unity_ProbeVolumeSizeInv.xyz);
}
#else
//定义了光照贴图,进行贴图内容的读取
float3 bakeDiffuseLighting = float3(0.0, 0.0, 0.0);
#ifdef UNITY_LIGHTMAP_FULL_HDR
bool useRGBMLightmap = false;
float4 decodeInstructions = float4(0.0, 0.0, 0.0, 0.0); // Never used but needed for the interface since it supports gamma lightmaps
#else
bool useRGBMLightmap = true;
#if defined(UNITY_LIGHTMAP_RGBM_ENCODING)
float4 decodeInstructions = float4(34.493242, 2.2, 0.0, 0.0); // range^2.2 = 5^2.2, gamma = 2.2
#else
float4 decodeInstructions = float4(2.0, 2.2, 0.0, 0.0); // range = 2.0^2.2 = 4.59
#endif
#endif
// 每一种Light都可以分成方向烘焙和单一烘焙两种
#ifdef LIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap),
TEXTURE2D_PARAM(unity_LightmapInd, samplerunity_Lightmap),
uvStaticLightmap, unity_LightmapST, normalWS, useRGBMLightmap, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap), uvStaticLightmap, unity_LightmapST, useRGBMLightmap, decodeInstructions);
#endif
#endif
#ifdef DYNAMICLIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap),
TEXTURE2D_PARAM(unity_DynamicDirectionality, samplerunity_DynamicLightmap),
uvDynamicLightmap, unity_DynamicLightmapST, normalWS, false, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap), uvDynamicLightmap, unity_DynamicLightmapST, false, decodeInstructions);
#endif
#endif
// 俩个lightmap是相加的关系
return bakeDiffuseLighting;
#endif
}
// 修改后的烘焙信息获取。
float3 SampleBakedGI(float3 positionRWS, float3 normalWS, float2 uvStaticLightmap, float2 uvDynamicLightmap, float skyOcclusion, float grassOcclusion, float treeOcclusion)
//forest-end
{
// If there is no lightmap, it assume lightprobe
#if !defined(LIGHTMAP_ON) && !defined(DYNAMICLIGHTMAP_ON)
if (unity_ProbeVolumeParams.x == 0.0)
{
// TODO: pass a tab of coefficient instead!
real4 SHCoefficients[7];
SHCoefficients[0] = unity_SHAr;
SHCoefficients[1] = unity_SHAg;
SHCoefficients[2] = unity_SHAb;
SHCoefficients[3] = unity_SHBr;
SHCoefficients[4] = unity_SHBg;
SHCoefficients[5] = unity_SHBb;
SHCoefficients[6] = unity_SHC;
// 这个累加计算了一个环境光的探针。
//forest-begin: sky occlusion
#if SKY_OCCLUSION
SHCoefficients[0] += _AmbientProbeSH[0] * skyOcclusion;
SHCoefficients[1] += _AmbientProbeSH[1] * skyOcclusion;
SHCoefficients[2] += _AmbientProbeSH[2] * skyOcclusion;
SHCoefficients[3] += _AmbientProbeSH[3] * skyOcclusion;
SHCoefficients[4] += _AmbientProbeSH[4] * skyOcclusion;
SHCoefficients[5] += _AmbientProbeSH[5] * skyOcclusion;
SHCoefficients[6] += _AmbientProbeSH[6] * skyOcclusion;
#endif
//forest-end
// 同时还积累的树的环境光遮挡
//forest-begin: Tree occlusion
return SampleSH9(SHCoefficients, normalWS) * treeOcclusion;
//forest-end
}
else
{
//这里只是多了一个树木的遮挡
return SampleProbeVolumeSH4(TEXTURE3D_PARAM(unity_ProbeVolumeSH, samplerunity_ProbeVolumeSH), positionRWS, normalWS, GetProbeVolumeWorldToObject(),
//forest-begin: Tree occlusion
unity_ProbeVolumeParams.y, unity_ProbeVolumeParams.z, unity_ProbeVolumeMin, unity_ProbeVolumeSizeInv) * treeOcclusion;
//forest-end
}
#else
float3 bakeDiffuseLighting = float3(0.0, 0.0, 0.0);
#ifdef UNITY_LIGHTMAP_FULL_HDR
bool useRGBMLightmap = false;
float4 decodeInstructions = float4(0.0, 0.0, 0.0, 0.0); // Never used but needed for the interface since it supports gamma lightmaps
#else
bool useRGBMLightmap = true;
#if defined(UNITY_LIGHTMAP_RGBM_ENCODING)
float4 decodeInstructions = float4(34.493242, 2.2, 0.0, 0.0); // range^2.2 = 5^2.2, gamma = 2.2
#else
float4 decodeInstructions = float4(2.0, 2.2, 0.0, 0.0); // range = 2.0^2.2 = 4.59
#endif
#endif
#ifdef LIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap),
TEXTURE2D_PARAM(unity_LightmapInd, samplerunity_Lightmap),
uvStaticLightmap, unity_LightmapST, normalWS, useRGBMLightmap, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap), uvStaticLightmap, unity_LightmapST, useRGBMLightmap, decodeInstructions);
#endif
#endif
#ifdef DYNAMICLIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap),
TEXTURE2D_PARAM(unity_DynamicDirectionality, samplerunity_DynamicLightmap),
uvDynamicLightmap, unity_DynamicLightmapST, normalWS, false, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap), uvDynamicLightmap, unity_DynamicLightmapST, false, decodeInstructions);
#endif
#endif
// 这里只是多积累了一个草的环境光遮挡,也就是说LightProbe是给动态物体使用的
//forest-begin: sky occlusion
return bakeDiffuseLighting * grassOcclusion;
//forest-end
#endif
}
从上面的代码中我发现skyOcclusion只在lightprobe计算的时候生效,也就是他是给动态物体使用的!!!!
现在三个Occlusion的作用就清楚了
- grassOcclusion 用于乘到每一个烘焙信息上
- treeOcclusion 用于乘到每一个光照探针结果上
- skyOcclusion 用于乘到动态光照探针SH9上
Shader部分总结
上面的内容只是Book of Death的使用方式,而我们可以自己修改应用位置,计算方法。
下面需要讨论这些Occlusion的参数是如何传递到Shader当中的。
Occlusion Prob Script部分
上面介绍了如何在Shader当中使用prob三维纹理内容,现在看一下脚本当中如何生成这些。
在BofD当中主要的脚本就是OcclusionProbes以及他对应的Editor代码
首先最终要的内容就是传递到unity当中参数。
static class Uniforms
{
internal static readonly int _AmbientProbeSH = Shader.PropertyToID("_AmbientProbeSH");
internal static readonly int _OcclusionProbes = Shader.PropertyToID("_OcclusionProbes");
internal static readonly int _OcclusionProbesWorldToLocal = Shader.PropertyToID("_OcclusionProbesWorldToLocal");
internal static readonly int _OcclusionProbesDetail = Shader.PropertyToID("_OcclusionProbesDetail");
internal static readonly int _OcclusionProbesWorldToLocalDetail = Shader.PropertyToID("_OcclusionProbesWorldToLocalDetail");
internal static readonly int _OcclusionProbesReflectionOcclusionAmount = Shader.PropertyToID("_OcclusionProbesReflectionOcclusionAmount");
}
这些数据是通过Progressive烘焙lightmapping引擎烘焙出来的内容,相当于使用了引擎的特性。
要使用引擎的特性需要自己写一部分回调函数。BofD的代码在OcclusionProbes.Editor.cs文件中。
OcclusionProbes生成设置
主要有两部分:
Lightmapping.started += Started;
Lightmapping.completed += Completed;
Started
在光照贴图开始烘焙之前调用
void Started()
{
// 计算探针的数量:这里使用的是体素形式,我们也可以定义自己的形状。
int probeCount = m_XCount * m_YCount * m_ZCount;
if (probeCount == 0)
return;
// 细节Probe数量统计
int detailSetCount = 0;
foreach(OcclusionProbesDetail detail in m_OcclusionProbesDetail)
{
if (detail == null || !detail.gameObject.activeSelf)
continue;
probeCount += detail.m_XCount * detail.m_YCount * detail.m_ZCount;
detailSetCount++;
}
// 声明主探针位置数组,坐标矩阵等等
Vector4[] positions = new Vector4[probeCount];
// Main
Vector3 size = transform.localScale;
Matrix4x4 localToWorld = Matrix4x4.TRS(transform.position - size * 0.5f, transform.rotation, size);
m_WorldToLocal = localToWorld.inverse;
m_CountBaked = new Vector3i(m_XCount, m_YCount, m_ZCount);
int indexOffset = 0;
GenerateProbePositions(ref positions, ref indexOffset, m_CountBaked, localToWorld, CalculateRayOffset(m_RayOffset, size, m_CountBaked));
// Detail
m_CountBakedDetail = new Vector3i[detailSetCount];
m_WorldToLocalDetail = new Matrix4x4[detailSetCount];
// 声明细节探针的位置,矩阵转化,大小等等
int i = 0;
foreach(OcclusionProbesDetail detail in m_OcclusionProbesDetail)
{
if (detail == null || !detail.gameObject.activeSelf)
continue;
Transform t = detail.transform;
Vector3 sizeDetail = t.localScale;
Matrix4x4 localToWorldDetail = Matrix4x4.TRS(t.position - sizeDetail * 0.5f, t.rotation, sizeDetail);
m_WorldToLocalDetail[i] = localToWorldDetail.inverse;
m_CountBakedDetail[i] = new Vector3i(detail.m_XCount, detail.m_YCount, detail.m_ZCount);
GenerateProbePositions(ref positions, ref indexOffset, m_CountBakedDetail[i], localToWorldDetail, CalculateRayOffset(detail.m_RayOffset, sizeDetail, m_CountBakedDetail[i]));
i++;
}
int sampleCount = 1024;
//这两句话才是核心内容,传入顶点数量和顶点位置,上面的内容实际上都是逻辑层的东西。
UnityEditor.Experimental.Lightmapping.SetCustomBakeInputs(positions, sampleCount);
m_SampleCountBaked = sampleCount;
UnityEditor.Experimental.Lightmapping.probesIgnoreDirectEnvironment = true;
}
Completed
光照烘焙计算之后保存数据
void Completed()
{
// 从烘焙管线当中提取所有的探针生成三维纹理。
// 同样也可以生成别的list之类的结构
// ...
// Ambient probe
// 这个只是一个SH9 用于记录全局的光线
BakeAmbientProbe();
// 把所有资源保存
AssetDatabase.SaveAssets();
m_CountBaked = new Vector3i(0, 0, 0);
}
经过上面两步的计算所有的内容都会保存在和Lightmap相同的目录当中,需要自己通过代码设置到Shader当中来使用。
剩下的工作就是把这些点传递到Shader当中就可以了,主要代码在OcclusionProbes当中。
注:在BofD当中OcclusionProbes主要用来削减BakeAmbientProbe,然后削减的值叠加到原始的光照探针上。
使用方法
1.在场景中新建一个Occlusion Probes,设置大小,体素纹理分辨率,细节纹理个数及其分辨率。
2.将烘焙方式设置为手动,烘焙引擎设置为Progressive。
3.点击烘焙,就会生成资源了。
4.通过Occlusion Probes脚本,设置全局的Shader参数。
5.在所有Shader当中就可以读取多个三维纹理,根据矩阵等位置的计算,就可以提去出各个点的环境光明暗情况。