前言

笔者曾使用UE实现过一个较为完整的写实水材质系统,但是因为UE的管线是定死的,导致高光无法实现,且后来做笔试题发现关于水的交互还未实现,因此本文将实现一个完整的风格化水

管线设置

  • 水是一种半透明材质,定义无需多说

  • 实现

    "RenderType"="Transparent" 
    "Queue" = "Transparent"
    "IgnoreProjector" = "True"
    

深度

  • 我看网上大多数文章的水深都基于相机的depth difference,这样实现出来的体验感并不好,因为depth会随相机视角变化而变化,所以这里笔者选择基于世界空间y轴的difference

  • 实现

    // 忽略半透明物体。类似scene depth
    float3 GetScenePos(float4 positionSS, float3 viewDirWS, float2 uv)
    {
      float3 result = 0.f;
    
      half depth = SampleSceneDepth(uv);
      half sceneDepth = LinearEyeDepth(depth, _ZBufferParams);
      result = -viewDirWS / positionSS.w * sceneDepth;
      result += GetCameraPositionWS();
    
      return result;
    }
    
    // 可以理解为pixel y - scene y
    half GetDepthFade(float3 positionWS, float4 positionSS, float3 viewDirWS, float2 refractUV)
    {
      half result = 0.h;
    
      float3 scenePos = GetScenePos(positionSS, viewDirWS, refractUV);
      result = (positionWS - scenePos).y;
      result = saturate(result / _DepthFade);
    
      return result;
    }
    
  • 效果

颜色

有了深度就可以用深度来实现水的颜色了,这里实现了浅水、深水、海岸线的颜色

  • 思路:根据深度对浅水颜色和深水颜色进行lerp,再对海岸线进行fresnel lerp

  • 实现

    half4 GetSurfaceAlbedo(PSInput psInput, half depthFade)
    {
      half4 result = half4(0.h, 0.h, 0.h, 0.h);
    
      float horizonFrensel = Fresnel(psInput.normalWS, SafeNormalize(psInput.viewDirWS), _HorizonFresnelExp);
    
      result = HSVLerp(_ShallowColor, _DeepColor, depthFade);
      result = HSVLerp(result, _HorizonColor, horizonFrensel);
    
      return saturate(result);
    }
    

    众所周知RGB颜色空间的色彩并不如HSV空间的色彩丰富,若想要更好康的颜色,可以尝试转换至HSV空间进行lerp

    half3 RGBToHSV(half3 In)
    {
      half4 K = half4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
      half4 P = lerp(half4(In.bg, K.wz), half4(In.gb, K.xy), step(In.b, In.g));
      half4 Q = lerp(half4(P.xyw, In.r), half4(In.r, P.yzx), step(P.x, In.r));
      half D = Q.x - min(Q.w, Q.y);
      half E = 1e-10;
      return half3(abs(Q.z + (Q.w - Q.y)/(6.0 * D + E)), D / (Q.x + E), Q.x);
    }
    
    half3 HSVToRGB(half3 In)
    {
      half4 K = half4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
      half3 P = abs(frac(In.xxx + K.xyz) * 6.0 - K.www);
      return In.z * lerp(K.xxx, saturate(P - K.xxx), In.y);
    }
    
    half4 HSVLerp(half4 A, half4 B, half T)
    {
      A.xyz = RGBToHSV(A.xyz);
      B.xyz = RGBToHSV(B.xyz);
    
      half t = T; // used to lerp alpha, needs to remain unchanged
    
      half hue;
      half d = B.x - A.x; // hue difference
    
      if(A.x > B.x)
      {
          half temp = B.x;
          B.x = A.x;
          A.x = temp;
    
          d = -d;
          T = 1-T;
      }
    
      if(d > 0.5)
      {
          A.x = A.x + 1;
          hue = (A.x + T * (B.x - A.x)) % 1;
      }
    
      if(d <= 0.5) hue = A.x + T * d;
    
      half sat = A.y + T * (B.y - A.y);
      half val = A.z + T * (B.z - A.z);
      half alpha = A.w + t * (B.w - A.w);
    
      half3 rgb = HSVToRGB(half3(hue,sat,val));
    
      return half4(rgb, alpha);
    }
    
  • 效果

表面涟漪

  • 思路:使用normal map来模拟凹凸感,为了防止涟漪看着太假,可以使用两张normal map并blend

  • 实现

    float2 Panner(float2 uv, float time, half2 speed = half2(1.h, 1.h))
    {
      return uv + time * speed;
    }
    
    half3 WhiteOutBlendNormal(half3 normal1, half3 normal2)
    {
      half3 result = half3(0.h, 0.h, 0.h);
    
      result = half3(normal1.xy + normal2.xy, normal1.z * normal2.z);
      result = SafeNormalize(result);
    
      return result;
    }
    
    half3 normalTS1 = UnpackNormalScale(SAMPLE_TEXTURE2D(_NormalTex, sampler_NormalTex, Panner(positionWS.xz / _Normal1Scale, _Time.y / 100, _Normal1Speed)), _NormalIntensity);
    half3 normalTS2 = UnpackNormalScale(SAMPLE_TEXTURE2D(_NormalTex, sampler_NormalTex, Panner(positionWS.xz / _Normal2Scale, _Time.y / 100, _Normal2Speed)), _NormalIntensity);
    surfaceData.normalTS = WhiteOutBlendNormal(normalTS1, normalTS2);
    
  • 效果

折射

  • 思路:采样Gradient Noise确定扰动的UV,随后使用这个uv对Scene Color采样

  • 实现

    因为这种方法和基于normal采样Scene Color的不同,它会出现错误的情况(不在水中的部分也会进行扭曲),所以需要矫正,具体思路就是判断y difference是否大于等于0

    // 生成Gradient Noise
    float2 GradientNoiseDir(float2 uv)
    {
      uv = uv % 289;
      float x = (34 * uv.x + 1) * uv.x % 289 + uv.y;
      x = (34 * x + 1) * x % 289;
      x = frac(x / 41) * 2 - 1;
      return normalize(float2(x - floor(x + 0.5), abs(x) - 0.5));
    }
    float GradientNoise(float2 uv)
    {
      float2 iuv = floor(uv);
      float2 fuv = frac(uv);
      float d00 = dot(GradientNoiseDir(iuv), fuv);
      float d01 = dot(GradientNoiseDir(iuv + float2(0, 1)), fuv - float2(0, 1));
      float d10 = dot(GradientNoiseDir(iuv + float2(1, 0)), fuv - float2(1, 0));
      float d11 = dot(GradientNoiseDir(iuv + float2(1, 1)), fuv - float2(1, 1));
      fuv = fuv * fuv * fuv * (fuv * (fuv * 6 - 15) + 10);
      return lerp(lerp(d00, d01, fuv.y), lerp(d10, d11, fuv.y), fuv.x);
    }
    float FinalGradientNoise(float2 uv, float scale = 1.f)
    {
      return GradientNoise(uv * scale) + 0.5f;
    }
    
    // 求得折射后的uv
    float2 GetRefractUV(float3 positionWS, float4 positionSS, float2 positionSSNor, float3 viewDirWS, float2 uv)
    {
      float2 result = float2(0.f, 0.f);
      _RefractIntensity = max(0.f, _RefractIntensity) / 100.h;
      _RefractTiling = max(0.f, _RefractTiling) / 100.h;
    
      result += Panner(uv * rcpFastNR1(_RefractTiling), _Time.y, float2(_RefractSpeed, _RefractSpeed));
      result = FinalGradientNoise(result);
      result = RemapFloat2(result, float2(0, 1), float2(-1, 1));
      result *= _RefractIntensity;
      result += positionSSNor;
    
      // 避免不该出现(错误)的折射效果
      float3 scenePos = GetScenePos(positionSS, viewDirWS, result);
      result = (positionWS - scenePos).y >= 0.f ? result : positionSSNor;
    
      return result;
    }
    
    // 求得折射图像
    half3 GetRefract(float2 refractUV)
    {
      half3 result = half3(0.h, 0.h, 0.h);
    
      half3 sceneColor = SampleSceneColor(refractUV);
    
      result = sceneColor;
    
      return result;
    }
    

    这里不能将折射图像和之前求得图像简单的合并,笔者采用的是进行lerp

    half3 refractColor = GetRefract(albedo, refractUV);
    surfaceData.albedo = lerp(albedo.rgb, refractColor, 0.5);
    
  • 效果

反射

实现反射

  • 因为平面反射使用一个摄像机从镜像角度渲染场景到RenderTexture上,再将结果合并至反射平面,开销不小,适用于PC端,这种方式实现的反射效果十分准确,但缺点是需要重新渲染一次场景,想想如果有多个镜面,那开销着实受不了,所以笔者选择使用对移动端友好的屏幕空间平面反射(ssrp),原理是在屏幕空间中计算镜面反射,开销很低,但效果不是很准确,只能反射屏幕中的像素,容易露馅。对于水面来说,ssrp足以

  • 实现

    1. 因为ssrp基于屏幕空间,可以将其看作一种后处理技术,Volume代码如下
      using UnityEngine;
      using UnityEngine.Rendering;
      
      namespace UnityEditor.Rendering.Universal
      {
       [SerializeField]
       public class SSPRVolume : VolumeComponent
       {
           [Tooltip("是否启用SSPR")]
           public BoolParameter m_Enable = new BoolParameter(false);
      
           [Tooltip("是否启用HDR")] 
           public BoolParameter m_EnableHDR = new BoolParameter(false);
      
           [Tooltip("反射平面分辨率")]
           public ClampedIntParameter m_RTSize = new ClampedIntParameter(512, 128, 1024, false);
      
           [Tooltip("反射平面深度")]
           public FloatParameter m_ReflectPlaneHeight = new FloatParameter(0.5f, false);
      
           [Tooltip("根据距离对屏幕边缘进行平滑")]
           public ClampedFloatParameter m_FadeOutEdge = new ClampedFloatParameter(0.5f, 0f, 1f, false);
      
           [Tooltip("控制分辨率")] 
           public ClampedIntParameter m_Downsample = new ClampedIntParameter(1, 1, 5);
      
           [Tooltip("模糊算法循环次数,越高效果越好")]
           public ClampedIntParameter m_PassLoop = new ClampedIntParameter(1, 1, 5);
      
           [Tooltip("模糊强度")] 
           public ClampedFloatParameter m_BlurIntensity = new ClampedFloatParameter(1, 0, 10);
      
           public bool IsActive() => m_Enable.value;
           public bool IsTileCompatible() => false;
       }
      }
      
    2. compute shader

    • 参数

      #define THREADCOUNTX 8
      #define THREADCOUNTY 8
      #define THREADCOUNTZ 1
      
      RWTexture2D<half4> _ResultRT;  // 反射后的RT
      RWTexture2D<half> _ResultDepthRT;  // 反射后的depth
      Texture2D<float4> _ScreenColorTex;    // scene color
      Texture2D<float4> _ScreenDepthTex;   // scene depth
      
      float2 _RTSize; // 反射后的RT分辨率
      float _ReflectPlaneHeight;  //  反射平面高度(基于screen space uv)
      float _FadeOutEdge; // 屏幕边缘的衰减程度
      float _BlurIntensity;   // DualBlur 模糊强度
      
      SamplerState PointClampSampler;    // 点采样
      SamplerState LinearClampSampler;   // 线性采样
      
    • 初始化

      将RT和Depth清0

      [numthreads(THREADCOUNTX, THREADCOUNTY, THREADCOUNTZ)]
      void ClearRT(uint3 id : SV_DispatchThreadID)
      {
         _ResultRT[id.xy] = half4(0.f, 0.f, 0.f, 0.f);
         _ResultDepthRT[id.xy] = half4(0.f, 0.f, 0.f, 0.f);
      }
      
    • SSRP计算

      • 重建世界坐标
        [numthreads(THREADCOUNTX, THREADCOUNTY, THREADCOUNTZ)]
        void CSMain (uint3 id : SV_DispatchThreadID)
        {
         float2 uvSS = id.xy / _RTSize;  // [0,RTSize-1] -> screen [0,1] uv
        
         float posNDCZ = _ScreenDepthTex.SampleLevel(PointClampSampler, uvSS, 0.f).r;    // raw depth
        
         float3 absoluteWolrdPos = ComputeWorldSpacePosition(uvSS, posNDCZ, UNITY_MATRIX_I_VP);  // 求得重建后的世界坐标
        }
        
      • 重建反射后的世界坐标
        if(absoluteWolrdPos.y < _ReflectPlaneHeight) return;    // 丢弃位于平面下方的像素
        float3 reflectPosWS = absoluteWolrdPos;
        reflectPosWS.y = -(reflectPosWS.y - _ReflectPlaneHeight) + _ReflectPlaneHeight; // 先将坐标减去_ReflectPlaneHeight,使其基于y = 0进行翻转(取负)
        

      • 构建并测试屏幕空间uv

        // 因为重建后的屏幕空间uv可以超出屏幕范围,需要进行测试
        float4 reflectPosCS = mul(UNITY_MATRIX_VP, float4(reflectPosWS, 1.f));   // posWS -> posCS
        float2 reflectPosNDCxy = reflectPosCS.xy / reflectPosCS.w;  // posCS -> posNDC
        if(abs(reflectPosNDCxy.x) > 1.f || abs(reflectPosNDCxy.y) > 1.f) return; // uv范围在[0,1]
        float2 reflectPosSSxy = reflectPosNDCxy * 0.5 + 0.5;   // ndc->ss
        
        // dx平台uv坐标起点在上方
        #if defined UNITY_UV_STARTS_AT_TOP
         reflectPosSSxy.y = 1 - reflectPosSSxy.y;
        #endif
        
        uint2 reflectSSUVID = reflectPosSSxy * _RTSize;  // 根据正确的屏幕uv计算得到新的thread id
        
      • 输出
        _ResultRT[reflectSSUVID] = float4(_ScreenColorTex.SampleLevel(LinearClampSampler, uvSS, 0).rgb, 1);
        

        可以看到这存在一定的遮挡问题(物体的反射渲染顺序错误)。如下图所示,造成这一现象的原因是两个不同的像素反射后都位于同一UV

      • 深度顺序矫正

        很显然,这里可以用到深度缓冲区,写入深度更小的像素(dx平台因为z值翻转,需要写入深度更大的像素)。这样会导致无法反射sky box,后续需要在PS中合并

        #if defined UNITY_REVERSED_Z
        if(reflectPosCS.z / reflectPosCS.w <= _ResultDepthRT[reflectSSUVID]) return;
        #else
        if(rePosCS.z / rePosCS.w >= _ResultDepthRT[reSSUVID]) return;
        #endif
        
        _ResultDepthRT[reflectSSUVID] = reflectPosCS.z / reflectPosCS.w;
        
      • 边缘缺失

        在某些视角下,若反射所需要的信息在渲染后的RT外,会生成不少空白像素,这里有两种解决办法:拉伸uv和2d sdf

        考虑到性能,笔者使用SDF。具体原理是:sdf 作为不透明度

        // 计算pixel到屏幕边缘的距离
        float SDF(float2 pos)
        {
         float2 distance = abs(pos) - float2(1, 1);
         return length(max(0.f, distance) - min(max(distance.x, distance.y), 0.f));
        }
        
        float mask = SDF(uvSS * 2.f - 1.f);
        mask = smoothstep(0.f, _FadeOutEdge, abs(mask));   // 做一个平滑,避免生硬
        _ResultRT[reflectSSUVID] = float4(_ScreenColorTex.SampleLevel(LinearClampSampler, uvSS, 0).rgb, mask);
        
      • 填充空洞

        如下图所示,反射后的平面会存在许多黑点空洞。这是因为计算世界坐标进行了透视投影的计算(近大远小),导致最后的像素索引进行了偏移(如本该渲染到(1,1)的,却渲染到(1,2))

        解决方法有两个:计算像素明度 和 根据像素的透明度,这里笔者选择透明度

        [numthreads(THREADCOUNTX, THREADCOUNTY, THREADCOUNTZ)]
        void FillHoles (uint3 id : SV_DispatchThreadID)
        {
         ////////////////////////////////
         // 
         ////////////////////////////////
         float4 center = _ResultRT[id.xy];
         float4 top = _ResultRT[id.xy + uint2(0, 1)];
         float4 bottom = _ResultRT[id.xy + uint2(0, -1)];
         float4 right = _ResultRT[id.xy + uint2(1, 0)];
         float4 left = _ResultRT[id.xy + uint2(-1, 0)];
        
         // 查找不是空洞的pixel
         float4 best = center;
         best = top.a > best.a + 0.5 ? top : best;
         best = bottom.a > best.a + 0.5 ? bottom : best;
         best = right.a > best.a + 0.5 ? right : best;
         best = left.a > best.a + 0.5 ? left : best;
        
         // 填充空洞
         _ResultRT[id.xy] = best.a > center.a + 0.5 ? best : center;
         _ResultRT[id.xy + uint2(0, 1)] = best.a > top.a + 0.5 ? best : top;
         _ResultRT[id.xy + uint2(0, -1)] = best.a > bottom.a + 0.5 ? best : bottom;
         _ResultRT[id.xy + uint2(1, 0)] = best.a > right.a + 0.5 ? best : right;
         _ResultRT[id.xy + uint2(-1, 0)] = best.a > left.a + 0.5 ? best : left;
        }
        

      • 闪烁

        没有找到好的解决方案,对于水体可以使用折射来遮羞

      • skybox 截断

        目前的实现还存在一个问题:当观察角度过于低时会绘制截断的skybox

        • 解决之道:跳过skybox
        void CSMain (uint3 id : SV_DispatchThreadID)
        {
           float2 uvSS = id.xy / _RTSize;  // [0,RTSize-1] -> screen [0,1] uv
           float posNDCZ = _ScreenDepthTex.SampleLevel(PointClampSampler, uvSS, 0.f).r;    // raw depth
           if(Linear01Depth(posNDCZ, _ZBufferParams) > 0.99) return; 
        }
        
    1. Render Feature

    反射是有一定的模糊效果的,这里使用Dual Blur

    using System;
    using UnityEngine;
    using UnityEngine.Rendering;
    using UnityEngine.Rendering.Universal;
    using UnityEditor.Rendering.Universal;
    
    public class SSPRPassFeature : ScriptableRendererFeature
    {
      [System.Serializable]
      public class PassSetting
      {
          [Tooltip("显示于frame debugger")]
          public readonly string m_ProfilerTag = "SSRP Pass";
    
          [Tooltip("Pass执行位置")]
          public RenderPassEvent passEvent = RenderPassEvent.AfterRenderingTransparents;
    
          [Tooltip("compute shader")]
          public ComputeShader CS = null;
    
          [Tooltip("Pixel Shader")]
          public Material material = null;
      }
    
      class SSPRRenderPass : ScriptableRenderPass
      {
          // profiler tag will show up in frame debugger
          private const string m_ProfilerTag = "SSPR Pass";
    
          // 用于存储pass setting
          private PassSetting m_passSetting;
          private SSPRVolume m_SSPRVolume;
    
          private ComputeShader m_CS;
          private Material m_Material;
    
          // 反射RT的分辨率
          private int RTSizeWidth;
          private int RTSizeHeight;
    
          struct RTID
          {
              public static RenderTargetIdentifier m_ResultRT;
              public static RenderTargetIdentifier m_ResultDepthRT;
              public static RenderTargetIdentifier m_TempRT;
          }
    
          struct ShaderID
          {
              public static readonly int RTSizeID = Shader.PropertyToID("_RTSize");
              public static readonly int ReflectPlaneHeightID = Shader.PropertyToID("_ReflectPlaneHeight");
              public static readonly int FadeOutEdgeID = Shader.PropertyToID("_FadeOutEdge");
              public static readonly int ReflectColorRTID = Shader.PropertyToID("_ResultRT");
              public static readonly int ResultDepthRTID = Shader.PropertyToID("_ResultDepthRT");
              public static readonly int SceneColorRTID = Shader.PropertyToID("_ScreenColorTex");
              public static readonly int SceneDepthRTID = Shader.PropertyToID("_ScreenDepthTex");
    
              public static readonly int BlurIntensityID = Shader.PropertyToID("_BlurIntensity");
              public static readonly int TempRTID = Shader.PropertyToID("_TempRT");
          }
          private struct BlurLevelShaderID
          {
              internal int downLevelID;
              internal int upLevelID;
          }
          private BlurLevelShaderID[] blurLevel;
          private static int m_maxBlurLevel = 16;
    
          struct Dispatch
          {
              // 线程组id
              public static int ThreadGroupCountX;
              public static int ThreadGroupCountY;
              public static int ThreadGroupCountZ;
              // 线程id
              public static int ThreadCountX;
              public static int ThreadCountY;
              public static int ThreadCountZ;
          }
    
          struct Kernal
          {
              public static int ClearRT;
              public static int CSMain;
              public static int FillHoles;
          }
    
          // 用于设置material 属性
          public SSPRRenderPass(SSPRPassFeature.PassSetting passSetting) 
          {
              this.m_passSetting = passSetting;
    
              renderPassEvent = m_passSetting.passEvent;
    
              this.m_CS = m_passSetting.CS;
              this.m_Material = m_passSetting.material;
    
              Kernal.ClearRT = m_CS.FindKernel("ClearRT");
              Kernal.CSMain = m_CS.FindKernel("CSMain");
              Kernal.FillHoles = m_CS.FindKernel("FillHoles");
          }
    
          public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
          {
              // 获取定义的volume
              var POSTStack = VolumeManager.instance.stack;
              m_SSPRVolume = POSTStack.GetComponent<SSPRVolume>();
    
              float aspect = (float)Screen.height / Screen.width;   // 屏幕分辨率
              // 线程对应compute shader中的线程数
              Dispatch.ThreadCountX = 8;
              Dispatch.ThreadCountY = 8;
              Dispatch.ThreadCountZ = 1;
              // 使得一个线程对应一个uv
              Dispatch.ThreadGroupCountY = m_SSPRVolume.m_RTSize.value / Dispatch.ThreadCountY;    // RT高度
              Dispatch.ThreadGroupCountX = Mathf.RoundToInt(Dispatch.ThreadGroupCountY / aspect); // RT 宽度
              Dispatch.ThreadGroupCountZ = 1;
              // 反射面的分辨率大小
              this.RTSizeWidth = Dispatch.ThreadGroupCountX * Dispatch.ThreadCountX;
              this.RTSizeHeight = Dispatch.ThreadGroupCountY * Dispatch.ThreadCountY;
    
              // alpha作为mask
              RenderTextureDescriptor descriptor = new RenderTextureDescriptor(this.RTSizeWidth, this.RTSizeHeight, RenderTextureFormat.ARGB32);
              if (m_SSPRVolume.m_EnableHDR.value == true)
              {
                  // 每位16字节,支持HDR
                  descriptor.colorFormat = RenderTextureFormat.ARGBHalf;
              }
              descriptor.enableRandomWrite = true;    // 用于D3D的UAV(无序视图)
    
              // 申请RT
              cmd.GetTemporaryRT(ShaderID.ReflectColorRTID, descriptor);
              RTID.m_ResultRT = new RenderTargetIdentifier(ShaderID.ReflectColorRTID);
              descriptor.colorFormat = RenderTextureFormat.R16;    // 深度图只有r channel
              cmd.GetTemporaryRT(ShaderID.ResultDepthRTID, descriptor);
              RTID.m_ResultDepthRT = new RenderTargetIdentifier(ShaderID.ResultDepthRTID);
    
              blurLevel = new BlurLevelShaderID[m_maxBlurLevel];
              for (int t = 0; t < m_maxBlurLevel; ++t)
              {
                  blurLevel[t] = new BlurLevelShaderID
                  {
                      downLevelID = Shader.PropertyToID("_BlurMipDown" + t),
                      upLevelID = Shader.PropertyToID("_BlurMipU" + t)
                  };
              }
          }
    
          public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
          {
              if (m_CS == null)
              {
                  Debug.LogError("Custom:SSPR compute shader missing");
              }
    
              if (m_SSPRVolume.m_Enable.value == true)
              {
                  // Grab a command buffer. We put the actual execution of the pass inside of a profiling scope
                  CommandBuffer cmd = CommandBufferPool.Get();
    
                  using (new ProfilingScope(cmd, new ProfilingSampler(m_ProfilerTag)))
                  {
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.ClearRT, ShaderID.ReflectColorRTID, RTID.m_ResultRT);
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.ClearRT, ShaderID.ResultDepthRTID, RTID.m_ResultDepthRT);
                      cmd.DispatchCompute(this.m_CS, Kernal.ClearRT, Dispatch.ThreadGroupCountX, Dispatch.ThreadGroupCountY, Dispatch.ThreadGroupCountZ);
    
                      cmd.SetComputeVectorParam(this.m_CS, ShaderID.RTSizeID, new Vector2(this.RTSizeWidth, this.RTSizeHeight));
                      cmd.SetComputeFloatParam(this.m_CS, ShaderID.ReflectPlaneHeightID, m_SSPRVolume.m_ReflectPlaneHeight.value);
                      cmd.SetComputeFloatParam(this.m_CS, ShaderID.FadeOutEdgeID, m_SSPRVolume.m_FadeOutEdge.value);
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.CSMain, ShaderID.ReflectColorRTID, RTID.m_ResultRT);
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.CSMain, ShaderID.ResultDepthRTID, RTID.m_ResultDepthRT);
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.CSMain, ShaderID.SceneColorRTID, new RenderTargetIdentifier("_CameraOpaqueTexture"));
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.CSMain, ShaderID.SceneDepthRTID, new RenderTargetIdentifier("_CameraDepthTexture"));
                      cmd.DispatchCompute(this.m_CS, Kernal.CSMain, Dispatch.ThreadGroupCountX, Dispatch.ThreadGroupCountY, Dispatch.ThreadGroupCountZ);
    
                      cmd.SetComputeTextureParam(this.m_CS, Kernal.FillHoles, ShaderID.ReflectColorRTID, RTID.m_ResultRT);
                      cmd.DispatchCompute(this.m_CS, Kernal.FillHoles, Dispatch.ThreadGroupCountX, Dispatch.ThreadGroupCountY, Dispatch.ThreadGroupCountZ);
                  }
    
                  if (m_Material == null)
                  {
                      Debug.LogError("Custom:SSPR shader missing");
                  }
                  else
                  {
                      m_Material.SetFloat(ShaderID.BlurIntensityID, m_SSPRVolume.m_BlurIntensity.value);
    
                      RenderTextureDescriptor descriptor = new RenderTextureDescriptor(this.RTSizeWidth, this.RTSizeHeight, RenderTextureFormat.Default);
                      descriptor.width /= m_SSPRVolume.m_Downsample.value;
                      descriptor.height /= m_SSPRVolume.m_Downsample.value;
                      //descriptor.depthBufferBits = 0;     // 不会用到深度,精度设为0
                      cmd.GetTemporaryRT(ShaderID.TempRTID, descriptor);
                      RTID.m_TempRT = new RenderTargetIdentifier(ShaderID.TempRTID);
    
                      Blit(cmd, RTID.m_ResultRT, RTID.m_TempRT);
    
                      RenderTargetIdentifier lastDown = RTID.m_TempRT;
                      for (uint i = 0; i < m_SSPRVolume.m_PassLoop.value; ++i)
                      {
                          int midDown = blurLevel[i].downLevelID;
                          int midUp = blurLevel[i].upLevelID;
                          cmd.GetTemporaryRT(midDown, descriptor, FilterMode.Bilinear);
                          cmd.GetTemporaryRT(midUp, descriptor, FilterMode.Bilinear);
                          Blit(cmd, lastDown, midDown, m_Material, 1);
                          lastDown = midDown;
    
                          descriptor.width = Mathf.Max(descriptor.width / 2, 1);
                          descriptor.height = Mathf.Max(descriptor.height / 2, 1);
                      }
    
                      int lastUp = blurLevel[m_SSPRVolume.m_PassLoop.value - 1].downLevelID;
                      for (int i = m_SSPRVolume.m_PassLoop.value - 2; i >= 0; --i)
                      {
                          int midUp = blurLevel[i].upLevelID;
                          cmd.Blit(lastUp, midUp, m_Material, 2);
                          lastUp = midUp;
                      }
    
                      cmd.Blit(lastUp, RTID.m_ResultRT, m_Material, 2);
                  }
                  context.ExecuteCommandBuffer(cmd);
                  CommandBufferPool.Release(cmd);
              }
          }
    
          public override void OnCameraCleanup(CommandBuffer cmd)
          {
              if(cmd == null) throw new ArgumentNullException("cmd");
    
              cmd.ReleaseTemporaryRT(ShaderID.ReflectColorRTID);
              cmd.ReleaseTemporaryRT(ShaderID.SceneDepthRTID);
          }
      }
    
      public PassSetting m_Setting = new PassSetting();
      SSPRRenderPass m_SSPRPass;
    
      public override void Create()
      {
          m_SSPRPass = new SSPRRenderPass(m_Setting);
      }
    
      public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
      {
          // can queue up multiple passes after each other
          renderer.EnqueuePass(m_SSPRPass);
      }
    }
    
    1. Shader

    最终效果还是得用material(pixel shader)实现(不过不用置于物体上)

    Shader "URP/reflect"
    {
      Properties
      {
          [HideInInspector] _MainTex("", 2D) = "white" {}
      }
    
      SubShader
      {
          Tags
          {
              "RenderPipeline"="UniversalRenderPipeline"
              "Queue"="Overlay"
          }
          Cull Off
          ZWrite Off
          ZTest Always
    
          HLSLINCLUDE
          #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    
          CBUFFER_START(UnityPerMaterial)
          float _BlurIntensity;
          float4 _MainTex_TexelSize;
          CBUFFER_END
    
          TEXTURE2D(_MainTex);            SAMPLER(sampler_MainTex);
          TEXTURE2D(_ResultRT);
          sampler LinearClampSampler;
    
           struct VSInput
           {
               float4 positionOS : POSITION;
               float4 normalOS : NORMAL;
               float2 uv : TEXCOORD;
           };
    
           struct PSInput
           {
               float4 positionCS : SV_POSITION;
               float2 uv:TEXCOORD;
               float4 uv01 : TEXCOORD1;
               float4 uv23 : TEXCOORD2;
               float4 uv45 : TEXCOORD3;
               float4 uv67 : TEXCOORD4;
               float4 positionSS : TEXCOORD5;
               float3 positionWS : TEXCOORD6;
               float3 viewDirWS : TEXCOORD7;
               float3 normalWS : NORMAL;
           };
          ENDHLSL
    
          pass
          {
              NAME "Copy SSRP"
              Tags
              {
                  "LightMode"="UniversalForward"
                  "RenderType"="Overlay"
              }
    
              HLSLPROGRAM
              #pragma vertex VS
              #pragma fragment PS
    
              PSInput VS(VSInput vsInput)
              {
                  PSInput vsOutput;
    
                  VertexPositionInputs vertexPos = GetVertexPositionInputs(vsInput.positionOS);
                  vsOutput.positionWS = vertexPos.positionWS;
                  vsOutput.positionCS = vertexPos.positionCS;
                  vsOutput.positionSS = ComputeScreenPos(vsOutput.positionCS);
    
                  VertexNormalInputs vertexNormal = GetVertexNormalInputs(vsInput.normalOS);
                  vsOutput.normalWS = vertexNormal.normalWS;
    
                  return vsOutput;
              }
    
              half4 PS(PSInput psInput) : SV_Target
              {
                  half2 screenUV = psInput.positionSS.xy / psInput.positionSS.w;
                  float4 SSPRResult = SAMPLE_TEXTURE2D(_ResultRT, LinearClampSampler, screenUV);
                  SSPRResult.xyz *= SSPRResult.w;
    
                  return SSPRResult;
              }
              ENDHLSL
          }
    
          Pass
          {
              NAME "Down Samp[le"
    
              HLSLPROGRAM
              #pragma vertex VS
              #pragma fragment PS
    
              PSInput VS(VSInput vsInput)
              {
                  PSInput vsOutput;
    
                  vsOutput.positionCS = TransformObjectToHClip(vsInput.positionOS);
    
                  // 在D3D平台下,若开启抗锯齿,_TexelSize.y会变成负值,需要进行oneminus,否则会导致图像上下颠倒
                  #ifdef UNITY_UV_STARTS_AT_TOP
                  if(_MainTex_TexelSize.y < 0)
                      vsInput.uv.y = 1 - vsInput.uv.y;
                  #endif
    
                  vsOutput.uv = vsInput.uv;
                  vsOutput.uv01.xy = vsInput.uv + float2(1.f, 1.f) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv01.zw = vsInput.uv + float2(-1.f, -1.f) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv23.xy = vsInput.uv + float2(1.f, -1.f) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv23.zw = vsInput.uv + float2(-1.f, 1.f) * _MainTex_TexelSize.xy * _BlurIntensity;
    
                  return vsOutput;
              }
    
              float4 PS(PSInput psInput) : SV_TARGET
              {
                  float4 outputColor = 0.f;
    
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv.xy) * 0.5;
    
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv01.xy) * 0.125;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv01.zw) * 0.125;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv23.xy) * 0.125;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv23.zw) * 0.125;
    
                  return outputColor;
              }
              ENDHLSL
          }
    
          Pass
          {
              NAME "Up Sample"
    
              HLSLPROGRAM
              #pragma vertex VS
              #pragma fragment PS
    
              PSInput VS(VSInput vsInput)
              {
                  PSInput vsOutput;
    
                  vsOutput.positionCS = TransformObjectToHClip(vsInput.positionOS);
    
                  #ifdef UNITY_UV_STARTS_AT_TOP
                  if(_MainTex_TexelSize.y < 0.f)
                      vsInput.uv.y = 1 - vsInput.uv.y;
                  #endif
    
                  vsOutput.uv = vsInput.uv;
                  // 1/12
                  vsOutput.uv01.xy = vsInput.uv + float2(0, 1) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv01.zw = vsInput.uv + float2(0, -1) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv23.xy = vsInput.uv + float2(1, 0) * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv23.zw = vsInput.uv + float2(-1, 0) * _MainTex_TexelSize.xy * _BlurIntensity;
                  // 1/6
                  vsOutput.uv45.xy = vsInput.uv + float2(1, 1) * 0.5 * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv45.zw = vsInput.uv + float2(-1, -1) * 0.5 * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv67.xy = vsInput.uv + float2(1, -1) * 0.5 * _MainTex_TexelSize.xy * _BlurIntensity;
                  vsOutput.uv67.zw = vsInput.uv + float2(-1, 1) * 0.5 * _MainTex_TexelSize.xy * _BlurIntensity;
    
                  return vsOutput;
              }
    
              float4 PS(PSInput psInput) : SV_TARGET
              {
                  float4 outputColor = 0.f;
    
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv01.xy) * 1/12;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv01.zw) * 1/12;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv23.xy) * 1/12;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv23.zw) * 1/12;
    
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv45.xy) * 1/6;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv45.zw) * 1/6;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv67.xy) * 1/6;
                  outputColor += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv67.zw) * 1/6;
    
                  return outputColor;
              }
              ENDHLSL
          }
      }
    } 
    

加入水中

  • 思路:屏幕空间平面反射图像 + 天空盒反射,并扭曲模糊

  • 实现

    half3 GetReflect(float4 positionSS, half3 normalWS, half3 viewDirWS)
    {
      half3 result = half3(0.h, 0.h, 0.h);
    
      float2 positionSSNor = positionSS.xy / positionSS.w;
      // 对反射图像进行扭曲扰动
      float2 uv = positionSSNor + normalWS.xy * max(0.f, _ReflectDistortIntensity) * (_ScreenParams.zw - 1.h);
    
      half3 SSRPColor = SAMPLE_TEXTURE2D(_ResultRT, sampler_ResultRT, uv).rgb * max(0.0001f, _SSRPColorScale);
      // 视情况调整模糊程度
      half3 skyBoxColor = GetEnvironmentColor(viewDirWS, normalWS, _EnvironmentMipMapLevel) * max(0.0001f, _EnvironmentColorScale);
      result += SSRPColor + skyBoxColor;
    
      return result;
    }
    
  • 效果

焦散

  • 思路:在UE中使用后处理实现过,这里尝试一种新方法,重构世界坐标并采样焦散贴图,最后控制焦散出现区域

  • 纹理

  • 实现

    panner反向uv模拟焦散的运动

    half3 GetCaustic(half depthFade, float2 positionSSNor)
    {
      half3 result = half3(0.h, 0.h, 0.h);
      _CausticTiling = max(0.0001h, _CausticTiling);
      _CausticRange = max(0.1h, _CausticRange);
      _ShoreCausticTransparent = max(0.h, _ShoreCausticTransparent);
      _CausticTint = max(0.h, _CausticTint);
    
      half rawDepth = SampleSceneDepth(positionSSNor);
      // 重构世界坐标
      float3 positionWS = ComputeWorldSpacePosition(positionSSNor, rawDepth, UNITY_MATRIX_I_VP);
    
      half causticArea = saturate(exp(-depthFade / _CausticRange));
    
      float2 uv = positionWS.xz / _CausticTiling;
      float2 uv1 = Panner(uv, _Time.y / 10, half2(_CausticSpeed.x, _CausticSpeed.x));
      float2 uv2 = Panner(-uv, _Time.y / 10, half2(_CausticSpeed.y, _CausticSpeed.y));
      half3 causticTex1 = SAMPLE_TEXTURE2D(_CausticTex, sampler_CausticTex, uv1).rgb;
      half3 causticTex2 = SAMPLE_TEXTURE2D(_CausticTex, sampler_CausticTex, uv2).rgb;
      result = min(causticTex1, causticTex2) * _CausticTint;
      result *= causticArea;
    
      return result;
    }
    
    half3 causticColor = GetCaustic(defaultDepthFade, positionSSNor);
    surfaceData.albedo += causticColor;
    
  • 效果

浪花

  • 参考了很多游戏中的风格化水体效果,发现很少实现波峰浪花的,仅仅实现了近岸浪花,这里偷个懒也只实现近岸浪花叭

  • 实现
    FoamMask基于深度实现,使用Foam Cutoff来控制Foam区域

    half3 GetFoam(half depthFade, float2 uv)
    {
      half3 result = half3(0.h, 0.h, 0.h);
    
      _FoamDepthFade = max(0.0001h, _FoamDepthFade) / 10.h;
      _FoamTiling = max(0.0001h, _FoamTiling) / 10.h;
    
    // 控制浪花出现区域
      half foamMask = depthFade / _FoamDepthFade + 0.1h;
      foamMask = smoothstep(foamMask, 1.h - _FoamFade, 1.h);
      foamMask = 1.h - foamMask;
    
    // 采样浪花贴图
      float2 foamUV = Panner(uv * _FoamTiling, _Time.y / 10.h, half2(_FoamSpeed, _FoamSpeed));
      half foamTex = SAMPLE_TEXTURE2D(_FoamTex, sampler_FoamTex, foamUV).r;
      half cutOff = foamMask * _FoamCutoff;
      foamTex = step(cutOff, foamTex);
    
      half4 temp = _FoamTint * foamTex;
      result = temp.rgb * temp.a * foamMask;
    
      return result;
    }
    
    half3 foamColor = GetFoam(defaultDepthFade, positionWS.xz);
    surfaceData.albedo += foamColor;
    
  • 效果

Wave

  • 老朋友了GerstnerWave,考虑到移动端的性能,最多叠加4个波

  • 实现

    void GerstnerWave(inout float3 positionOS, inout float3 tangentWS, inout float3 binormalWS, half direction)
    {
      direction = direction * 2 - 1;
      float2 d = normalize(float2(cos(3.14 * direction), sin(3.14 * direction)));   
      float k = 2 * 3.14 / _WaveLength; // 频率周期
      float f = k * (dot(d, positionOS.xz) - _G * _Time.y); // sin/cos参数
      float a = _Steepness / k; // 振幅(防止打结)
    
      tangentWS += float3(
      -d.x * d.x * (_Steepness * sin(f)),
      d.x * (_Steepness * cos(f)),
      -d.x * d.y * (_Steepness * sin(f))
      );
    
      binormalWS += float3(
      -d.x * d.y * (_Steepness * sin(f)),
      d.y * (_Steepness * cos(f)),
      -d.y * d.y * (_Steepness * sin(f))
      );
    
      positionOS += float3(
      d.x * (a * cos(f)),
      a * sin(f),
      d.y * (a * cos(f))
      );
    }
    
    void GerstnerWave4(inout float3 positionOS, float3 normalWS)
    {
      _Steepness = max(0.h, _Steepness);
      _WaveLength = max(0.0001h, _WaveLength);
      _G = max(0.h, _G);
    
      float3 tangent = float3(1, 0, 0);
      float3 binormal = float3(0, 0, 1);
    
      GerstnerWave(positionOS, tangent, binormal, _WindDirection.x);
      GerstnerWave(positionOS, tangent, binormal, _WindDirection.y);
      GerstnerWave(positionOS, tangent, binormal, _WindDirection.z);
      GerstnerWave(positionOS, tangent, binormal, _WindDirection.w);
    
      normalWS += cross(tangent, binormal);
    }
    
    // VS中
    GerstnerWave4(i.positionOS, i.normalOS);
    
  • 效果

最终效果

项目链接

URPStylizedWater

reference

Stylized Water Shader

The Secrets of Colour Interpolation

Simulating Waves Using The Water Waves Asset

https://zhuanlan.zhihu.com/p/357714920

UnityURP-MobileScreenSpacePlanarReflection

Unity URP实现屏幕空间平面反射(SSPR)


他们曾如此骄傲的活过,贯彻始终