Shader "Unlit/ARKitBackground/AfterOpaques" { Properties { _textureY ("TextureY", 2D) = "white" {} _textureCbCr ("TextureCbCr", 2D) = "black" {} _HumanStencil ("HumanStencil", 2D) = "black" {} _HumanDepth ("HumanDepth", 2D) = "black" {} _EnvironmentDepth ("EnvironmentDepth", 2D) = "black" {} } SubShader { Name "ARKit Background (After Opaques)" Tags { "Queue" = "Background" "RenderType" = "Background" "ForceNoShadowCasting" = "True" } Pass { Name "ARKit Background Occlusion Handling" Cull Off ZTest GEqual ZWrite On Lighting Off LOD 100 Tags { "LightMode" = "Always" } HLSLPROGRAM #pragma vertex vert #pragma fragment frag #pragma multi_compile_local __ ARKIT_BACKGROUND_URP #pragma multi_compile_local __ ARKIT_HUMAN_SEGMENTATION_ENABLED ARKIT_ENVIRONMENT_DEPTH_ENABLED #if ARKIT_BACKGROUND_URP #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl" #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/DeclareDepthTexture.hlsl" #define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture) #define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler) #define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture) #define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler) #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord) #define ARKIT_DECLARE_CAMERA_DEPTH_TEXTURE(texture) TEXTURE2D_X(texture); SAMPLER(sampler##texture) #define ARKIT_SAMPLE_CAMERA_DEPTH_TEXTURE(texture, uv) SampleSceneDepth(uv) #else // Legacy RP #include "UnityCG.cginc" #define real4 half4 #define real4x4 half4x4 #define TransformObjectToHClip UnityObjectToClipPos #define FastSRGBToLinear GammaToLinearSpace #define ARKIT_TEXTURE2D_HALF(texture) UNITY_DECLARE_TEX2D_HALF(texture) #define ARKIT_SAMPLER_HALF(sampler) #define ARKIT_TEXTURE2D_FLOAT(texture) UNITY_DECLARE_TEX2D_FLOAT(texture) #define ARKIT_SAMPLER_FLOAT(sampler) #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) UNITY_SAMPLE_TEX2D(texture,texcoord) #define ARKIT_DECLARE_CAMERA_DEPTH_TEXTURE(texture) UNITY_DECLARE_DEPTH_TEXTURE(texture) #define ARKIT_SAMPLE_CAMERA_DEPTH_TEXTURE(texture, uv) SAMPLE_DEPTH_TEXTURE(texture, uv) #endif struct appdata { float3 position : POSITION; float2 texcoord : TEXCOORD0; }; struct v2f { float4 position : SV_POSITION; float2 texcoord : TEXCOORD0; float2 depthTexCoord : TEXCOORD1; }; struct fragment_output { real4 color : SV_Target; float depth : SV_Depth; }; // Vertex CBUFFER_START(UnityARFoundationPerFrame) // Device display transform is provided by the AR Foundation camera background renderer. float4x4 _UnityDisplayTransform; float _UnityCameraForwardScale; CBUFFER_END v2f vert (appdata v) { #if ARKIT_HUMAN_SEGMENTATION_ENABLED || ARKIT_ENVIRONMENT_DEPTH_ENABLED // Transform the position from object space to clip space. float4 position = TransformObjectToHClip(v.position); // Remap the texture coordinates based on the device rotation. float2 texcoord = mul(float4(v.texcoord, 1.0f, 1.0f), _UnityDisplayTransform).xy; v2f o; o.position = position; o.texcoord = texcoord; o.depthTexCoord = v.texcoord; #else // Don't Subtract depth when ARKit Occlusion is not enabled. // Accomplish this by clipping all four vertices of the mesh/ v2f o; // Clip behind camera o.position = float4(0, 0, -1, 0); o.texcoord = float2(0, 0); o.depthTexCoord = float2(0, 0); #endif return o; } // Fragment CBUFFER_START(ARKitColorTransformations) static const real4x4 s_YCbCrToSRGB = real4x4( real4(1.0h, 0.0000h, 1.4020h, -0.7010h), real4(1.0h, -0.3441h, -0.7141h, 0.5291h), real4(1.0h, 1.7720h, 0.0000h, -0.8860h), real4(0.0h, 0.0000h, 0.0000h, 1.0000h) ); CBUFFER_END ARKIT_TEXTURE2D_HALF(_textureY); ARKIT_SAMPLER_HALF(sampler_textureY); ARKIT_TEXTURE2D_HALF(_textureCbCr); ARKIT_SAMPLER_HALF(sampler_textureCbCr); #if !ARKIT_BACKGROUND_URP ARKIT_DECLARE_CAMERA_DEPTH_TEXTURE(_CameraDepthTexture); #endif #if ARKIT_ENVIRONMENT_DEPTH_ENABLED ARKIT_TEXTURE2D_FLOAT(_EnvironmentDepth); ARKIT_SAMPLER_FLOAT(sampler_EnvironmentDepth); #elif ARKIT_HUMAN_SEGMENTATION_ENABLED ARKIT_TEXTURE2D_HALF(_HumanStencil); ARKIT_SAMPLER_HALF(sampler_HumanStencil); ARKIT_TEXTURE2D_FLOAT(_HumanDepth); ARKIT_SAMPLER_FLOAT(sampler_HumanDepth); #endif // ARKIT_HUMAN_SEGMENTATION_ENABLED inline float ConvertDistanceToDepth(float d) { // Account for scale d = _UnityCameraForwardScale > 0.0 ? _UnityCameraForwardScale * d : d; // Clip any distances smaller than the near clip plane, and compute the depth value from the distance. return (d < _ProjectionParams.y) ? 0.0f : ((1.0f / _ZBufferParams.z) * ((1.0f / d) - _ZBufferParams.w)); } fragment_output frag (v2f i) { fragment_output o; // Sample the video textures (in YCbCr). real4 ycbcr = real4(ARKIT_SAMPLE_TEXTURE2D(_textureY, sampler_textureY, i.texcoord).r, ARKIT_SAMPLE_TEXTURE2D(_textureCbCr, sampler_textureCbCr, i.texcoord).rg, 1.0h); // Convert from YCbCr to sRGB. real4 videoColor = mul(s_YCbCrToSRGB, ycbcr); #if !UNITY_COLORSPACE_GAMMA // If rendering in linear color space, convert from sRGB to RGB. videoColor.xyz = FastSRGBToLinear(videoColor.xyz); #endif // !UNITY_COLORSPACE_GAMMA // Assume the background depth is the back of the depth clipping volume. // This is because the fragment shader will only run in places where geometry // was occluded. float depthValue = 0.0f; float cameraDepthValue = ARKIT_SAMPLE_CAMERA_DEPTH_TEXTURE(_CameraDepthTexture, i.depthTexCoord); #if ARKIT_ENVIRONMENT_DEPTH_ENABLED // Sample the environment depth (in meters). float envDistance = ARKIT_SAMPLE_TEXTURE2D(_EnvironmentDepth, sampler_EnvironmentDepth, i.texcoord).r; // Convert the distance to depth. depthValue = ConvertDistanceToDepth(envDistance); #elif ARKIT_HUMAN_SEGMENTATION_ENABLED // Check the human stencil, and skip non-human pixels. if (ARKIT_SAMPLE_TEXTURE2D(_HumanStencil, sampler_HumanStencil, i.texcoord).r > 0.5h) { // Sample the human depth (in meters). float humanDistance = ARKIT_SAMPLE_TEXTURE2D(_HumanDepth, sampler_HumanDepth, i.texcoord).r; // Convert the distance to depth. float depthValue = ConvertDistanceToDepth(humanDistance); } #endif // end Fragment Shader variants for Human Body and Depth Occlusion if (cameraDepthValue >= depthValue) { discard; } o.color = videoColor; o.depth = depthValue; return o; } ENDHLSL } Pass { Name "AR Camera Background (ARKit)" Cull Off ZTest LEqual ZWrite On Lighting Off LOD 100 Tags { "LightMode" = "Always" } HLSLPROGRAM #pragma vertex vert #pragma fragment frag #pragma multi_compile_local __ ARKIT_BACKGROUND_URP #pragma multi_compile_local __ ARKIT_HUMAN_SEGMENTATION_ENABLED ARKIT_ENVIRONMENT_DEPTH_ENABLED #if ARKIT_BACKGROUND_URP #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl" #define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture) #define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler) #define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture) #define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler) #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord) #else // Legacy RP #include "UnityCG.cginc" #define real4 half4 #define real4x4 half4x4 #define TransformObjectToHClip UnityObjectToClipPos #define FastSRGBToLinear GammaToLinearSpace #define ARKIT_TEXTURE2D_HALF(texture) UNITY_DECLARE_TEX2D_HALF(texture) #define ARKIT_SAMPLER_HALF(sampler) #define ARKIT_TEXTURE2D_FLOAT(texture) UNITY_DECLARE_TEX2D_FLOAT(texture) #define ARKIT_SAMPLER_FLOAT(sampler) #define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) UNITY_SAMPLE_TEX2D(texture,texcoord) #endif struct appdata { float3 position : POSITION; float2 texcoord : TEXCOORD0; }; struct v2f { float4 position : SV_POSITION; float2 texcoord : TEXCOORD0; }; struct fragment_output { real4 color : SV_Target; float depth : SV_Depth; }; CBUFFER_START(UnityARFoundationPerFrame) // Device display transform is provided by the AR Foundation camera background renderer. float4x4 _UnityDisplayTransform; float _UnityCameraForwardScale; CBUFFER_END v2f vert (appdata v) { // Transform the position from object space to clip space. float4 position = TransformObjectToHClip(v.position); // Remap the texture coordinates based on the device rotation. float2 texcoord = mul(float4(v.texcoord, 1.0f, 1.0f), _UnityDisplayTransform).xy; v2f o; o.position = position; o.texcoord = texcoord; return o; } CBUFFER_START(ARKitColorTransformations) static const real4x4 s_YCbCrToSRGB = real4x4( real4(1.0h, 0.0000h, 1.4020h, -0.7010h), real4(1.0h, -0.3441h, -0.7141h, 0.5291h), real4(1.0h, 1.7720h, 0.0000h, -0.8860h), real4(0.0h, 0.0000h, 0.0000h, 1.0000h) ); CBUFFER_END ARKIT_TEXTURE2D_HALF(_textureY); ARKIT_SAMPLER_HALF(sampler_textureY); ARKIT_TEXTURE2D_HALF(_textureCbCr); ARKIT_SAMPLER_HALF(sampler_textureCbCr); inline float ConvertDistanceToDepth(float d) { // Account for scale d = _UnityCameraForwardScale > 0.0 ? _UnityCameraForwardScale * d : d; // Clip any distances smaller than the near clip plane, and compute the depth value from the distance. return (d < _ProjectionParams.y) ? 0.0f : ((1.0f / _ZBufferParams.z) * ((1.0f / d) - _ZBufferParams.w)); } #if ARKIT_ENVIRONMENT_DEPTH_ENABLED ARKIT_TEXTURE2D_FLOAT(_EnvironmentDepth); ARKIT_SAMPLER_FLOAT(sampler_EnvironmentDepth); #elif ARKIT_HUMAN_SEGMENTATION_ENABLED ARKIT_TEXTURE2D_HALF(_HumanStencil); ARKIT_SAMPLER_HALF(sampler_HumanStencil); ARKIT_TEXTURE2D_FLOAT(_HumanDepth); ARKIT_SAMPLER_FLOAT(sampler_HumanDepth); #endif // ARKIT_HUMAN_SEGMENTATION_ENABLED fragment_output frag (v2f i) { fragment_output o; // Sample the video textures (in YCbCr). real4 ycbcr = real4(ARKIT_SAMPLE_TEXTURE2D(_textureY, sampler_textureY, i.texcoord).r, ARKIT_SAMPLE_TEXTURE2D(_textureCbCr, sampler_textureCbCr, i.texcoord).rg, 1.0h); // Convert from YCbCr to sRGB. real4 videoColor = mul(s_YCbCrToSRGB, ycbcr); #if !UNITY_COLORSPACE_GAMMA // If rendering in linear color space, convert from sRGB to RGB. videoColor.xyz = FastSRGBToLinear(videoColor.xyz); #endif // !UNITY_COLORSPACE_GAMMA // Assume the background depth is the back of the depth clipping volume. // This is because the fragment shader will only run in places where geometry // was occluded. float depthValue = 0.0f; #if ARKIT_ENVIRONMENT_DEPTH_ENABLED // Sample the environment depth (in meters). float envDistance = ARKIT_SAMPLE_TEXTURE2D(_EnvironmentDepth, sampler_EnvironmentDepth, i.texcoord).r; // Convert the distance to depth. depthValue = ConvertDistanceToDepth(envDistance); #elif ARKIT_HUMAN_SEGMENTATION_ENABLED // Check the human stencil, and skip non-human pixels. if (ARKIT_SAMPLE_TEXTURE2D(_HumanStencil, sampler_HumanStencil, i.texcoord).r > 0.5h) { // Sample the human depth (in meters). float humanDistance = ARKIT_SAMPLE_TEXTURE2D(_HumanDepth, sampler_HumanDepth, i.texcoord).r; // Convert the distance to depth. float depthValue = ConvertDistanceToDepth(humanDistance); } #endif // end Fragment Shader variants for Human Body and Depth Occlusion o.color = videoColor; o.depth = depthValue; return o; } ENDHLSL } } }