internal/graphicsdriver/opengl, metal, directx: skip multiplying a scale when a color matrix is used

This commit is contained in:
Hajime Hoshi 2022-10-01 18:14:22 +09:00
parent 4203a3b68a
commit b457dc3307
3 changed files with 3 additions and 7 deletions

View File

@ -221,8 +221,7 @@ float4 PSMain(PSInput input) : SV_TARGET {
color = mul(color_matrix_body, color) + color_matrix_translation;
// Premultiply alpha
color.rgb *= color.a;
// Apply color scale.
color *= input.color;
// Do not apply the color scale as the scale should always be (1, 1, 1, 1) when a color matrix is used.
// Clamp the output.
color.rgb = min(color.rgb, color.a);
return color;

View File

@ -197,7 +197,6 @@ struct FragmentShaderImpl {
c.rgb /= c.a + (1.0 - sign(c.a));
c = (color_matrix_body * c) + color_matrix_translation;
c.rgb *= c.a;
c *= v.color;
c.rgb = min(c.rgb, c.a);
} else {
c *= v.color;

View File

@ -237,14 +237,12 @@ void main(void) {
color = (color_matrix_body * color) + color_matrix_translation;
// Premultiply alpha
color.rgb *= color.a;
// Apply color scale.
color *= varying_color_scale;
// Do not apply the color scale as the scale should always be (1, 1, 1, 1) when a color matrix is used.
// Clamp the output.
color.rgb = min(color.rgb, color.a);
# else
// Apply color scale.
// Apply the color scale.
color *= varying_color_scale;
// No clamping needed as the color matrix shader is used then.
# endif // defined(USE_COLOR_MATRIX)
gl_FragColor = color;