I am generating normals from a depth like this in Unity.

```
float3 reconstructPosition(float2 uv, float depth)
{
float x = uv.x * 2.0f - 1.0f;
float y = (1.0 - uv.y) * 2.0f - 1.0f;
float4 position_s = float4(x, y, depth, 1.0f);
float4 position_v = mul(UNITY_MATRIX_I_VP, position_s);
return position_v.xyz / position_v.w;
}
float3 NormalFromDepth(float4 screenPos)
{
float2 uv0 = screenPos.xy / screenPos.w; // center
float2 uv1 = uv0 + float2(1.0 / _CameraDepthTexture_TexelSize.z, 0.0); // right
float2 uv2 = uv0 + float2(0.0, 1.0 / _CameraDepthTexture_TexelSize.w); // top
float depth0 = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, uv0), _ZBufferParams);
float depth1 = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, uv1), _ZBufferParams);
float depth2 = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, uv2), _ZBufferParams);
float3 P0 = reconstructPosition(uv0, depth0);
float3 P1 = reconstructPosition(uv1, depth1);
float3 P2 = reconstructPosition(uv2, depth2);
float3 normal = cross(P2 - P0, P1 - P0);
normal.z = -normal.z;
normal = normalize(normal) * 0.5 + 0.5;
return normal;
}
```

Which gives me normal in camera space, but I wonder how I can convert normal result into world space.