I have a very simple vertex/pixel shader for rendering a bunch of instances with a very simple lighting model.
When testing, I noticed that the instances were becoming dimmer as the world transform scaling was increasing. I determined that this was due to the fact that the the value of float3 normal = mul(input.Normal, WorldInverseTranspose); was shrinking with the increased scaling of the world transform, but the unit portion of it appeared to be correct. To address this, I had to add normal = normalize(normal);.
I do not, for the life of me, understand why. The WorldInverseTranspose contains all of the components of the world transform (SetValueTranspose(Matrix.Invert(world * modelTransforms[mesh.ParentBone.Index]))) and the calculation appears to be correct as is.
Why is the value requiring normalization? under);
);
float4 CalculatePositionInWorldViewProjection(float4 position, matrix world, matrix view, matrix projection)
{
float4 worldPosition = mul(position, world);
float4 viewPosition = mul(worldPosition, view);
return mul(viewPosition, projection);
}
VertexShaderOutput VS(VertexShaderInput input)
{
VertexShaderOutput output;
matrix instanceWorldTransform = mul(World, transpose(input.InstanceTransform));
output.Position = CalculatePositionInWorldViewProjection(input.Position, instanceWorldTransform, View, Projection);
float3 normal = mul(input.Normal, WorldInverseTranspose);
normal = normalize(normal);
float lightIntensity = -dot(normal, DiffuseLightDirection);
output.Color = float4(saturate(DiffuseColor * DiffuseIntensity).xyz * lightIntensity, 1.0f);
output.TextureCoordinate = SpriteSheetBoundsToTextureCoordinate(input.TextureCoordinate, input.SpriteSheetBounds);
return output;
}
float4 PS(VertexShaderOutput input) : SV_Target
{
return Texture.Sample(Sampler, input.TextureCoordinate) * input.Color;
}
↧