草莓♭布丁

导航

Unity水面渲染研究(CubeMap,反射探针,平面反射)

  最近正在学习利用Unity制作动画,遇到了较多水面问题,正好来研究一下水面是如何渲染的。我们首先来看一下经典教程《Shader入门精要》中的水面渲染。

一、CubeMap,《Shader入门精要》中的水面渲染

  使用立方体纹理(Cubemap)模拟反射,GrabPass模拟折射。使用菲涅尔系数混合折射和反射效果。

    fresnel = pow(1-saturate(dot(v,n)),4);

  vn是视角和法线方向,夹角越小,fresnel越小,反射越弱。

Shader "Unity Shaders Book/Chapter15/WaterWave"
{
    Properties
    {
        _Color ("Main Color",Color) = (0,0.15,0.115,1)          //水面颜色
        _MainTex ("Base", 2D) = "white" {}              //水面波纹材质
        _WaveMap ("Wave Map", 2D) = "bump" {}                   //由噪声生成的法线纹理
        _CubeMap ("Environment Cube Map", Cube) = "_Skybox" {}
        _WaveXSpeed ("Wave Horizontal Speed", Range(-0.1,0.1)) = 0.01
        _WaveYSpeed ("Wave Vertical Speed", Range(-0.1,0.1)) = 0.01
        _Distortion ("Distortion", Range(0,100)) = 10           //折射的扭曲程度
    }
    SubShader
    {
        Tags { "Queue"="Transparent" "RenderType"="Opaque" }
        GrabPass
        {
            "_RefractionTex"
        }

        Pass
        {
            CGPROGRAM
            #include "UnityCG.cginc"
            #pragma vertex vert
            #pragma fragment frag

            fixed4 _Color;
            sampler2D _MainTex;
            float4 _MainTex_ST;
            sampler2D _WaveMap;
            float4 _WaveMap_ST;
            samplerCUBE _CubeMap;
            fixed _WaveXSpeed;
            fixed _WaveYSpeed;
            float _Distortion;
            sampler2D _RefractionTex;
            float4 _RefractionTex_TexelSize;
            
            struct a2v
            {
                float4 vertex : POSITION;
                float3 normal : NORMAL;
                float4 tangent : TANGENT;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float4 pos : SV_POSITION;
                float4 scrPos : TEXCOORD0;
                float4 uv : TEXCOORD1;
                float4 TtoW0 : TEXCOORD2;
                float4 TtoW1 : TEXCOORD3;
                float4 TtoW2 : TEXCOORD4;
            };

            v2f vert (a2v v)
            {
                v2f o;
                o.pos = UnityObjectToClipPos(v.vertex);
                o.scrPos = ComputeGrabScreenPos(o.pos);     //得到抓取屏幕的采样坐标

                o.uv.xy = TRANSFORM_TEX(v.uv, _MainTex);
                o.uv.zw = TRANSFORM_TEX(v.uv, _WaveMap);

                float3 worldPos = mul(unity_ObjectToWorld, v.vertex).xyz;
                float3 worldNormal = UnityObjectToWorldNormal(v.normal);
                float3 worldTangent = UnityObjectToWorldDir(v.tangent.xyz);
                float3 worldBinormal = cross(worldNormal,worldTangent) * v.tangent.w;
                //切线空间到世界空间的变换矩阵
                o.TtoW0 = float4(worldTangent.x, worldBinormal.x, worldNormal.x, worldPos.x);
                o.TtoW1 = float4(worldTangent.y, worldBinormal.y, worldNormal.y, worldPos.y);
                o.TtoW2 = float4(worldTangent.z, worldBinormal.z, worldNormal.z, worldPos.z);
                return o;
            }

            fixed4 frag (v2f i) : SV_Target
            {
                float3 worldPos = float3(i.TtoW0.w, i.TtoW1.w, i.TtoW2.w);
                fixed3 viewDir = normalize(UnityWorldSpaceViewDir(worldPos));
                float2 speed = _Time.y * float2(_WaveXSpeed, _WaveYSpeed);

                fixed3 bump1 = UnpackNormal(tex2D(_WaveMap, i.uv.zw + speed)).rgb;
                fixed3 bump2 = UnpackNormal(tex2D(_WaveMap, i.uv.zw - speed)).rgb;
                fixed3 bump = normalize(bump1+bump2);           //模拟两层交叉波动效果

                float2 offset = bump.xy * _Distortion * _RefractionTex_TexelSize.xy;
                i.scrPos.xy = offset * i.scrPos.z + i.scrPos.xy;      //模拟深度越大,折射越大的效果
                fixed3 refrCol = tex2D(_RefractionTex , i.scrPos.xy/i.scrPos.w).rgb;        //折射颜色

                bump = normalize(half3(dot(i.TtoW0.xyz,bump),dot(i.TtoW1.xyz,bump),dot(i.TtoW2.xyz,bump)));
                fixed4 texColor = tex2D(_MainTex, i.uv.xy + speed);
                fixed3 reflDir = reflect(-viewDir ,bump);
                fixed3 reflCol = texCUBE(_CubeMap,reflDir).rgb * texColor.rgb * _Color.rgb;     //反射颜色

                fixed fresnel = pow(1-saturate(dot(viewDir,bump)),4);
                fixed3 finalColor = reflCol * fresnel + refrCol * (1 - fresnel);
                //fixed3 finalColor = lerp(refrCol,reflCol,fresnel);
                return fixed4(finalColor,1);
            }
            ENDCG
        }
    }
}

  片元着色器需要把法线从切线空间变换到世界空间,转换矩阵是按列写入的切线空间坐标轴,矩阵按行存入TtoW0TtoW1TtoW2。(坐标轴本身是世界空间转换切线空间的,这里转置了一下。而标准正交基转置就是逆矩阵,所以按列写入就得到了切线空间转世界空间的矩阵。这块看不懂的话可以自行去看书上的基础章节)

  噪声纹理用“从灰度创建”的法线贴图,法线有两个方向偏移采样,这是模拟两层交叉的波动效果。使用法线和_DistortionGrabPass采样结果进行偏移,模拟折射效果。具体效果如下

  反射效果是在世界空间中计算出反射方向,然后对CubeMap采样。我们可以利用书上提供的Editor脚本,快速创建一个CubeMap。

using System.Collections;
using System.Collections.Generic;
using UnityEditor;
using UnityEngine;

public class CubeMapRender : ScriptableWizard
{
    public Transform renderFromPosition;
    public Cubemap cubemap;

    /// <summary>
    /// 条件限制,不满足条件按钮不亮
    /// </summary>
    void OnWizardUpdate()
    {
        helpString = "Select transform to render from and cubemap to render into";
        isValid = (renderFromPosition != null) && (cubemap != null);
    }

    /// <summary>
    /// 点击应用调用,不会关闭窗口
    /// </summary>
    void OnWizardOtherButton()
    {
        Debug.Log("Apply");
        ApplySet();
    }

    /// <summary>
    /// 点击确定调用,会关闭窗口
    /// </summary>
    void OnWizardCreate()
    {
        Debug.Log("Create");
        ApplySet();
    }

    public void ApplySet()
    {
        // create temporary camera for rendering
        GameObject go = new GameObject("CubemapCamera");
        go.AddComponent<Camera>();
        // place it on the object
        go.transform.position = renderFromPosition.position;
        // render into cubemap        
        go.GetComponent<Camera>().RenderToCubemap(cubemap);

        // destroy temporary camera
        DestroyImmediate(go);
    }

    [MenuItem("GameObject/Render into Cubemap")]
    static void RenderCubemap()
    {
        ScriptableWizard.DisplayWizard<CubeMapRender>("Render cubemap", "Render!", "Apply");
    }
}

 

二、反射探针(进阶版的CubeMap)

  CubeMap在模拟反射时有很多问题,比如说每次更改物体位置都要手动渲染一下,而且也不便于实时的更新。比如说这里要渲染一个小水盆,要经常更改位置,使用CubeMap就不太方便。

  反射探针就可以很好的解决这个问题,只需要把原先对CubeMap采样的地方换成反射探针即可

                bump = normalize(half3(dot(i.TtoW0.xyz,bump),dot(i.TtoW1.xyz,bump),dot(i.TtoW2.xyz,bump)));
                fixed4 texColor = tex2D(_MainTex, i.uv.xy + speed);
                fixed3 reflDir = reflect(-viewDir ,bump);
                //fixed3 reflCol = texCUBE(_CubeMap,reflDir).rgb * texColor.rgb * _Color.rgb;     //反射颜色
                
                half4 rgbm = UNITY_SAMPLE_TEXCUBE(unity_SpecCube0, reflDir);
                half3 color = DecodeHDR(rgbm, unity_SpecCube0_HDR);
                fixed3 reflCol = color.rgb * _Color.rgb;     //反射颜色

  其实到这里为止,渲染静态的小型水面已经没有问题了。但是如果是大型水面,且有其他物体在水面附近,反射探针(或者CubeMap)就没有办法模拟反射效果。

  这里我们先暂时屏蔽水面波纹和菲涅尔效果(直接把fresnel设为1),只保留反射颜色,来观察一下。

   找来一只坤哥,发现反射出了巨大的鸡,而且位置也不对。这主要是因为反射探针只能代表一个点,是局部的反射,无法代表整个平面的反射。

 

三、平面反射(Planar Reflection)

参考文章:https://blog.csdn.net/puppet_master/article/details/80808486

 

3.1,基本思路

  由于水面是一个平面,可以模拟一个摄像机从另一边进行渲染,然后把结果丢给一个RenderTextrue,再把这个贴图丢给平面。

  如图,A相机要渲染水面,假设对面有个B相机,将B相机的渲染结果给到水面,那么交叉点就显示出了正确的反射后的物体。代码如下:

using UnityEngine;

[ExecuteInEditMode]
public class PlanarReflection : MonoBehaviour
{
    public Camera MainCamera = null;
    public Camera reflectionCamera = null;
    public RenderTexture reflectionRT = null;
    private bool isReflectionCameraRendering = false;
    private Material reflectionMaterial = null;

    private void OnWillRenderObject()
    {
        if (isReflectionCameraRendering)
            return;

        isReflectionCameraRendering = true;

        if (reflectionCamera == null)
        {
            var go = new GameObject("Reflection Camera");
            reflectionCamera = go.AddComponent<Camera>();
            reflectionCamera.CopyFrom(MainCamera);
        }
        if (reflectionRT == null)
        {
            reflectionRT = RenderTexture.GetTemporary(1024, 1024, 24);
        }
        //需要实时同步相机的参数,比如编辑器下滚动滚轮,Editor相机的远近裁剪面就会变化
        UpdateCamearaParams(MainCamera, reflectionCamera);
        reflectionCamera.targetTexture = reflectionRT;
        reflectionCamera.enabled = false;

        var reflectM = CaculateReflectMatrix();
        reflectionCamera.worldToCameraMatrix = MainCamera.worldToCameraMatrix * reflectM;

        var normal = transform.up;
        var d = -Vector3.Dot(normal, transform.position);
        var plane = new Vector4(normal.x, normal.y, normal.z, d);
        //用逆转置矩阵将平面从世界空间变换到反射相机空间
        var viewSpacePlane = reflectionCamera.worldToCameraMatrix.inverse.transpose * plane;
        var clipMatrix = reflectionCamera.CalculateObliqueMatrix(viewSpacePlane);
        reflectionCamera.projectionMatrix = clipMatrix;

        GL.invertCulling = true;
        reflectionCamera.Render();
        GL.invertCulling = false;

        if (reflectionMaterial == null)
        {
            var renderer = GetComponent<Renderer>();
            reflectionMaterial = renderer.sharedMaterial;
        }
        reflectionMaterial.SetTexture("_ReflectionTex", reflectionRT);

        isReflectionCameraRendering = false;
    }

    Matrix4x4 CaculateReflectMatrix()
    {
        var normal = transform.up;
        var d = -Vector3.Dot(normal, transform.position);
        var reflectM = new Matrix4x4();
        reflectM.m00 = 1 - 2 * normal.x * normal.x;
        reflectM.m01 = -2 * normal.x * normal.y;
        reflectM.m02 = -2 * normal.x * normal.z;
        reflectM.m03 = -2 * d * normal.x;

        reflectM.m10 = -2 * normal.x * normal.y;
        reflectM.m11 = 1 - 2 * normal.y * normal.y;
        reflectM.m12 = -2 * normal.y * normal.z;
        reflectM.m13 = -2 * d * normal.y;

        reflectM.m20 = -2 * normal.x * normal.z;
        reflectM.m21 = -2 * normal.y * normal.z;
        reflectM.m22 = 1 - 2 * normal.z * normal.z;
        reflectM.m23 = -2 * d * normal.z;

        reflectM.m30 = 0;
        reflectM.m31 = 0;
        reflectM.m32 = 0;
        reflectM.m33 = 1;
        return reflectM;
    }

    private void UpdateCamearaParams(Camera srcCamera, Camera destCamera)
    {
        if (destCamera == null || srcCamera == null)
            return;

        destCamera.clearFlags = srcCamera.clearFlags;
        destCamera.backgroundColor = srcCamera.backgroundColor;
        destCamera.farClipPlane = srcCamera.farClipPlane;
        destCamera.nearClipPlane = srcCamera.nearClipPlane;
        destCamera.orthographic = srcCamera.orthographic;
        destCamera.fieldOfView = srcCamera.fieldOfView;
        destCamera.aspect = srcCamera.aspect;
        destCamera.orthographicSize = srcCamera.orthographicSize;
    }
}
Shader "Reflection/PlanarReflection"
{    
    SubShader
    {
        Tags { "RenderType"="Opaque" }
 
        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            
            #include "UnityCG.cginc"
 
            struct appdata
            {
                float4 vertex : POSITION;
            };
 
            struct v2f
            {
                float4 screenPos : TEXCOORD0;
                float4 vertex : SV_POSITION;
            };
 
            sampler2D _ReflectionTex;
            
            v2f vert (appdata v)
            {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                o.screenPos = ComputeScreenPos(o.vertex);
                return o;
            }
            
            fixed4 frag (v2f i) : SV_Target
            {
                fixed4 col = tex2D(_ReflectionTex, i.screenPos.xy / i.screenPos.w);
                //或者
                //fixed4 col = tex2Dproj(_ReflectionTex, i.screenPos);
                return col;
            }
            ENDCG
        }
    }
}

  原文是自动生成的反射相机和RenderTexture,为了方便管理,我改成了手动创建并挂上去,脚本挂在水面物体上。(反射相机位置随意,反正MVP矩阵都是代码改的)

  效果还是相当不错的,下面我们来分析一下代码。

3.2,反射矩阵

   这段代码其实并没有真正把摄像机挪过去,相机还在原位,它是把反射点P挪到了对面Q。

  N为法向量,Q = P - 2N|OP| 。

  已知平面上任意一点和法向量垂直,(Xn,Yn,Zn)·(X-X0,Y-Y0,Z-Z0)=0,用水面的transform.up作为法向量(Xn,Yn,Zn),水面位置P0作为(X0,Y0,Z0)

  可得平面方程XnX + YnY + ZnZ + d = 0,其中 d = -XnX0-YnY0-ZnZ0

  对于任意一点P,OP的距离就是向量P0P在N方向的投影。|OP|  = (Xn,Yn,Zn)·(Xp-X0,Yp-Y0,Zp-Z0) = N·P + d

  Q (Xq,Yq,Zq)= (Xp,Yp,Zp) - 2(XnXp + YnYp + ZnZp + d)(Xn,Yn,Zn)

这里以X为例,Xq = Xp - 2XnXnXp- 2XnYnYp - 2XnZnZp - 2dXn,这里就对应第一行的四个参数。将位置P(Xp,Yp,Zp,1)经过矩阵变换后,就得到了位置Q

可以看一下参考文章中的矩阵

  物体从模型空间转换到裁剪空间,需要经过MVP矩阵。其中M矩阵用于将模型转换到世界空间,V矩阵就是worldToCameraMatrix,用于从世界空间转换到视角空间。右乘反射矩阵代表在V矩阵前,先进行世界坐标变换,把P点的世界坐标改到Q点去。(Unity的矩阵是从右向左乘)

 3.3,斜裁剪平面

  如果不进行裁剪,按照原有相机的裁剪平面,把鸡挪到水面以下,鸡脚就会露出来。所以说需要把反射相机的近裁剪平面改成水面。

 

 

  使用worldToCameraMatrix的逆转置矩阵,将水平面转换到反射相机的视角空间。使用逆转置矩阵的原因,和法线在切线空间的转换类似,这里再来推导一下。

  把平面F写成向量的形式:F =(Xn,Yn,Zn,d),有对平面上任意一点P(Xp,Yp,Zp,1),有F·P = 0

  经过任意变换之后,F1P1=0,假设这个变换作用于点和平面分别是Mp和Mf,有(MfF)(MpP) = 0

  因为向量是可以转置的,所以(MfF)T(MpP) = 0

  即为FTMfTMpP = 0,或FMfTMpP = 0

  因为F·P = 0,所以中间两个向量必定是逆矩阵,MfT = Mp-1

  使用内置的CalculateObliqueMatrix函数获取更改了近裁剪平面之后的投影矩阵,并给P矩阵projectionMatrix赋值

3.4,GL.invertCulling

  这个是用于区分模型正反的,正常渲染的时候不会渲染反面,Unity认为顺时针方向的顶点就是正面,逆时针的是反面。由于这个相机是倒着渲染的,每次渲染这个相机之前,需要改变一下正反。

 

四、完整的水面渲染

 

把之前的折射相关代码打开,最好另外用一个变量screenPos来模拟反射的波纹,不要用grabScreenPos,据说在不同平台下,这两个坐标有时候会上下翻转。

(如果想看单纯的波纹效果,可以设置fresnel = 1,然后reflCol不要乘其他东西,就可以看到反射波纹了)

 

 

Shader "Unity Shaders Book/Chapter15/WaterWavePlaneReflection"
{
    Properties
    {
        _Color ("Main Color",Color) = (0,0.15,0.115,1)          //水面颜色
        _MainTex ("Base", 2D) = "white" {}              //水面波纹材质
        _WaveMap ("Wave Map", 2D) = "bump" {}                   //由噪声生成的法线纹理
        _WaveXSpeed ("Wave Horizontal Speed", Range(-0.1,0.1)) = 0.01
        _WaveYSpeed ("Wave Vertical Speed", Range(-0.1,0.1)) = 0.01
        _Distortion ("Distortion", Range(0,100)) = 10           //折射的扭曲程度
    }
    SubShader
    {
        Tags { "Queue"="Transparent" "RenderType"="Opaque" }
        GrabPass
        {
            "_RefractionTex"
        }

        Pass
        {
            CGPROGRAM
            #include "UnityCG.cginc"
            #pragma vertex vert
            #pragma fragment frag

            fixed4 _Color;
            sampler2D _MainTex;
            float4 _MainTex_ST;
            sampler2D _WaveMap;
            float4 _WaveMap_ST;
            fixed _WaveXSpeed;
            fixed _WaveYSpeed;
            float _Distortion;
            sampler2D _RefractionTex;
            float4 _RefractionTex_TexelSize;

            sampler2D _ReflectionTex;
            
            struct a2v
            {
                float4 vertex : POSITION;
                float3 normal : NORMAL;
                float4 tangent : TANGENT;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float4 pos : SV_POSITION;
                float4 grabScreenPos : TEXCOORD0;
                float4 uv : TEXCOORD1;
                float4 TtoW0 : TEXCOORD2;
                float4 TtoW1 : TEXCOORD3;
                float4 TtoW2 : TEXCOORD4;
                float4 screenPos : TEXCOORD5;
            };

            v2f vert (a2v v)
            {
                v2f o;
                o.pos = UnityObjectToClipPos(v.vertex);
                o.grabScreenPos = ComputeGrabScreenPos(o.pos);     //得到抓取屏幕的采样坐标
                o.screenPos = ComputeScreenPos(o.pos);

                o.uv.xy = TRANSFORM_TEX(v.uv, _MainTex);
                o.uv.zw = TRANSFORM_TEX(v.uv, _WaveMap);

                float3 worldPos = mul(unity_ObjectToWorld, v.vertex).xyz;
                float3 worldNormal = UnityObjectToWorldNormal(v.normal);
                float3 worldTangent = UnityObjectToWorldDir(v.tangent.xyz);
                float3 worldBinormal = cross(worldNormal,worldTangent) * v.tangent.w;
                //切线空间到世界空间的变换矩阵
                o.TtoW0 = float4(worldTangent.x, worldBinormal.x, worldNormal.x, worldPos.x);
                o.TtoW1 = float4(worldTangent.y, worldBinormal.y, worldNormal.y, worldPos.y);
                o.TtoW2 = float4(worldTangent.z, worldBinormal.z, worldNormal.z, worldPos.z);
                return o;
            }

            fixed4 frag (v2f i) : SV_Target
            {
                float3 worldPos = float3(i.TtoW0.w, i.TtoW1.w, i.TtoW2.w);
                fixed3 viewDir = normalize(UnityWorldSpaceViewDir(worldPos));
                float2 speed = _Time.y * float2(_WaveXSpeed, _WaveYSpeed);

                fixed3 bump1 = UnpackNormal(tex2D(_WaveMap, i.uv.zw + speed)).rgb;
                fixed3 bump2 = UnpackNormal(tex2D(_WaveMap, i.uv.zw - speed)).rgb;
                fixed3 bump = normalize(bump1+bump2);           //模拟两层交叉波动效果

                float2 offset = bump.xy * _Distortion * _RefractionTex_TexelSize.xy;
                i.grabScreenPos.xy = offset * i.grabScreenPos.z + i.grabScreenPos.xy;      //模拟深度越大,折射越大的效果
                i.screenPos.xy = offset * i.screenPos.z + i.screenPos.xy;
                fixed3 refrCol = tex2D(_RefractionTex , i.grabScreenPos.xy/i.grabScreenPos.w).rgb;        //折射颜色

                bump = normalize(half3(dot(i.TtoW0.xyz,bump),dot(i.TtoW1.xyz,bump),dot(i.TtoW2.xyz,bump)));
                fixed4 texColor = tex2D(_MainTex, i.uv.xy + speed);
                fixed3 reflCol = tex2D(_ReflectionTex, i.screenPos.xy / i.screenPos.w);     //反射颜色
                reflCol = reflCol * texColor.rgb * _Color.rgb;

                fixed fresnel = pow(1-saturate(dot(viewDir,bump)),4);
                //fresnel = 1;
                fixed3 finalColor = reflCol * fresnel + refrCol * (1 - fresnel);
                //fixed3 finalColor = lerp(refrCol,reflCol,fresnel);
                return fixed4(finalColor,1);
            }
            ENDCG
        }
    }
}

 

posted on 2023-03-08 16:56  草莓♭布丁  阅读(942)  评论(0编辑  收藏  举报

Live2D