Skip to content

Instantly share code, notes, and snippets.

@d4rkc0d3r
Last active February 3, 2026 03:40
Show Gist options
  • Select an option

  • Save d4rkc0d3r/886be3b6c233349ea6f8b4a7fcdacab3 to your computer and use it in GitHub Desktop.

Select an option

Save d4rkc0d3r/886be3b6c233349ea6f8b4a7fcdacab3 to your computer and use it in GitHub Desktop.
Sample shader to show how to fixup the unity_CameraInvProjection matrix to work correctly
Shader "d4rkpl4y3r/Debug/Inverse Projection Matrix"
{
Properties
{
_UseTrueInverse("Use True Inverse", Range(0, 1)) = 0
}
SubShader
{
Tags { "Queue"="Geometry" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
float _UseTrueInverse;
UNITY_DECLARE_DEPTH_TEXTURE(_CameraDepthTexture);
struct v2f
{
float4 pos : SV_POSITION;
UNITY_VERTEX_OUTPUT_STEREO
};
float4x4 inverse(float4x4 input);
float4 SVPositionToClipPos(float4 pos);
// thanks lyuma & cnlohr for the base of this.
// it needed some modifications accounting for flipped projection & mirrors
float4 ClipToViewPos(float4 clipPos)
{
float4 normalizedClipPos = float4(clipPos.xyz / clipPos.w, 1);
normalizedClipPos.z = 1 - normalizedClipPos.z;
normalizedClipPos.z = normalizedClipPos.z * 2 - 1;
float4x4 invP = unity_CameraInvProjection;
// do projection flip on this, found empirically
invP._24 *= _ProjectionParams.x;
// this is needed for mirrors to work properly, found empirically
invP._42 *= -1;
float4 viewPos = mul(invP, normalizedClipPos);
// and the y coord needs to flip for flipped projection, found empirically
viewPos.y *= _ProjectionParams.x;
return viewPos;
}
// the same as the previous function, but it precalculates all the operations as one matrix
float4x4 CreateClipToViewMatrix()
{
float4x4 flipZ = float4x4(1, 0, 0, 0,
0, 1, 0, 0,
0, 0, -1, 1,
0, 0, 0, 1);
float4x4 scaleZ = float4x4(1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 2, -1,
0, 0, 0, 1);
float4x4 invP = unity_CameraInvProjection;
float4x4 flipY = float4x4(1, 0, 0, 0,
0, _ProjectionParams.x, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1);
float4x4 result = mul(scaleZ, flipZ);
result = mul(invP, result);
result = mul(flipY, result);
result._24 *= _ProjectionParams.x;
result._42 *= -1;
return result;
}
// this shader is made for PC VR & desktop in dx11 mode, don't expect it to work anywhere else
float4 frag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
// invert the actual projection matrix as ground truth
float4x4 trueInvP = inverse(UNITY_MATRIX_P);
// fixup the provided inverse projection matrix
// this is much faster than doing full matrix inversion
float4x4 constructedInvP = CreateClipToViewMatrix();
// get the clip position and sample the depth texture
float4 clipPos = SVPositionToClipPos(i.pos);
float4 uv = ComputeScreenPos(clipPos);
float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, uv.xy / uv.w);
float4x4 invP = lerp(constructedInvP, trueInvP, _UseTrueInverse);
// construct the clip space position from SV_Position & the depth texture
// then simply multiply it by the inverse projection matrix
float4 viewPos = mul(invP, float4(clipPos.xy / clipPos.w, depth, 1));
// don't forget to do the perspective divide
viewPos = float4(viewPos.xyz / viewPos.w, 1);
// there you go, world space position is just one more matrix multiplication away
float3 worldPos = mul(UNITY_MATRIX_I_V, viewPos).xyz;
// some basic debug visualization of the different matrices and their differences
if (i.pos.y < _ScreenParams.y / 3)
{
const int size = 12;
uint2 pixelPos = i.pos / size;
pixelPos = pixelPos % 10;
bool2 isSecondMatrix = pixelPos / 5;
pixelPos = pixelPos - isSecondMatrix * 5;
if (any(pixelPos == 4))
return 0;
if (isSecondMatrix.x && isSecondMatrix.y)
return 1;
float4x4 mat = trueInvP;
if (isSecondMatrix.y)
mat = constructedInvP;
if (isSecondMatrix.x)
mat = trueInvP - constructedInvP;
float entry = mat[pixelPos.x][pixelPos.y];
if (entry == 0)
return float4(0.02, 0.02, 1, 1);
if (abs(entry) < 0.00001)
return float4(0, 0, 0.5, 1);
if (entry > 0.01)
return float4(0, 1, 0, 1);
if (entry < -0.01)
return float4(1, 0, 0, 1);
if (entry < 0)
return float4(0.5, 0, 0, 1);
return float4(0, 0.5, 0, 1);
}
return float4(GammaToLinearSpace(saturate(frac(worldPos * 10))), 1);
}
float4 SVPositionToClipPos(float4 pos)
{
float4 clipPos = float4(((pos.xy / _ScreenParams.xy) * 2 - 1) * int2(1, -1), pos.z, 1);
#ifdef UNITY_SINGLE_PASS_STEREO
clipPos.x -= 2 * unity_StereoEyeIndex;
#endif
return clipPos;
}
v2f vert (appdata_base v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_OUTPUT(v2f, o);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.pos = UnityObjectToClipPos(v.vertex);
return o;
}
float4x4 inverse(float4x4 input)
{
#define minor(a,b,c) determinant(float3x3(input.a, input.b, input.c))
//determinant(float3x3(input._22_23_23, input._32_33_34, input._42_43_44))
float4x4 cofactors = float4x4(
minor(_22_23_24, _32_33_34, _42_43_44),
-minor(_21_23_24, _31_33_34, _41_43_44),
minor(_21_22_24, _31_32_34, _41_42_44),
-minor(_21_22_23, _31_32_33, _41_42_43),
-minor(_12_13_14, _32_33_34, _42_43_44),
minor(_11_13_14, _31_33_34, _41_43_44),
-minor(_11_12_14, _31_32_34, _41_42_44),
minor(_11_12_13, _31_32_33, _41_42_43),
minor(_12_13_14, _22_23_24, _42_43_44),
-minor(_11_13_14, _21_23_24, _41_43_44),
minor(_11_12_14, _21_22_24, _41_42_44),
-minor(_11_12_13, _21_22_23, _41_42_43),
-minor(_12_13_14, _22_23_24, _32_33_34),
minor(_11_13_14, _21_23_24, _31_33_34),
-minor(_11_12_14, _21_22_24, _31_32_34),
minor(_11_12_13, _21_22_23, _31_32_33)
);
#undef minor
return transpose(cofactors) / determinant(input);
}
ENDCG
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment