Hi All !
I use glslang tool for compilation GLSL/HLSL to SPIR-V.
We have two identical HLSL and GLSL vertex shaders. vkCreateGraphicsPipelines returns VK_ERROR_INITIALIZATION_FAILED, if I use SPIR-V code from HLSL, but if I use SPIR-V code from GLSL it works correctly.
Note: bug happens only AMD GPU! (AMD R7 240, AMD R7 350X) I use the last drivers 09.2018. On nVidia/Intel it works correctly.
HLSL Vertex Shader:
#ifndef NUM_BONES
#define NUM_BONES 47
#endif
#ifndef NUM_WEIGHT
#define NUM_WEIGHT 4
#endif
typedef int4 BlendIndicesType;
cbuffer : register(b0) {
uniform row_major float4x4 ModelViewProjection;
uniform row_major float4x4 MatTexture;
uniform float4 LightDir; //Light position in world space
uniform row_major float3x4 bones[NUM_BONES];
};
struct VS {
layout(location = 0) float4 Pos : POSITION;
layout(location = 1) float4 BlendWeights : BLENDWEIGHT;
layout(location = 7) BlendIndicesType BlendIndices : BLENDINDICES;
layout(location = 2) float3 Normal : NORMAL;
layout(location = 😎 float2 Tex0 : TEXCOORD0;
};
struct PS {
float4 Pos : POSITION;
layout(location = 0) float2 Tex0 : TEXCOORD0;
layout(location = 1) float3 Normal : TEXCOORD1;
layout(location = 2) float3 LightDir : TEXCOORD2;
layout(location = 3) float4 shadowCrd: TEXCOORD3;
};
PS vsMain(in VS In)
{
PS Out;
float4 Pos = float4(0.0f, 0.0f, 0.0f, 1.0f);
float3 Normal = float3(0.0f, 0.0f, 0.0f);
for(int i = 0; i < NUM_WEIGHT; ++i) {
const int boneIndex = In.BlendIndices;
Pos.xyz += (mul(bones[boneIndex], In.Pos).xyz * In.BlendWeights);
Normal.xyz += (mul((float3x3)bones[boneIndex], In.Normal) * In.BlendWeights);
}
Out.Pos = mul(In.Pos, ModelViewProjection);
Out.Tex0 = In.Tex0;
Out.shadowCrd = mul(Pos, MatTexture);
Out.LightDir.xyz = -LightDir.xyz;
return Out;
}
glsl Vertex Shader:
//
#version 450 core
#extension GL_ARB_separate_shader_objects : enable
#extension GL_ARB_shading_language_420pack : enable
#ifndef NUM_BONES
#define NUM_BONES 47
#endif
#ifndef NUM_WEIGHT
#define NUM_WEIGHT 4
#endif
layout (std140, binding = 0) uniform UBODecl {
mat4 ModelViewProjection;
vec4 LightDir;
mat4 MatTexture;
vec4 bones[3 * NUM_BONES];
} UBO;
layout(location = 0) in vec4 POSITION; // position in projection space
layout(location = 2) in vec3 NORMAL;
layout(location = 😎 in vec2 TEXCOORD0; // texture coordinate
layout(location = 1) in vec4 BLENDWEIGHT;
layout(location = 7) in ivec4 BLENDINDICES;
layout(location = 0) out vec2 Tex0;
layout(location = 1) out vec3 Normal;
layout(location = 2) out vec3 LightDir;
layout(location = 3) out vec4 shadowCrd;
void main()
{
vec4 Pos = vec4(0.0, 0.0, 0.0, 1.0);
vec4 position = vec4(POSITION.xyz, 1.0);
Normal = vec3(0.0, 0.0, 0.0);
ivec4 BoneIndices = ivec4(BLENDINDICES);
vec4 BlendWeight = BLENDWEIGHT;
for(int i = 0; i < NUM_WEIGHT; ++i) {
int index = 3 * BoneIndices;
float weight = BlendWeight;
vec3 v;
v.x = dot(UBO.bones[index + 0], position);
v.y = dot(UBO.bones[index + 1], position);
v.z = dot(UBO.bones[index + 2], position);
Pos.xyz += (v.xyz * weight);
v.x = dot(UBO.bones[index + 0].xyz, NORMAL);
v.y = dot(UBO.bones[index + 1].xyz, NORMAL);
v.z = dot(UBO.bones[index + 2].xyz, NORMAL);
Normal.xyz += (v.xyz * weight);
}
gl_Position = UBO.ModelViewProjection * Pos;
Tex0 = TEXCOORD0;
shadowCrd = UBO.MatTexture * Pos;
LightDir = -UBO.LightDir.xyz;
}
What do I do wrong ?
I can provide additional information.
I attached The simple project.
Solved! Go to Solution.
hi xhuang, I have fixed hlsl shader, we need use SV_POSITION semantic insteaded of POSITION, now it works correctly:
struct VS {
float4 Pos : SV_POSITION;
....
};
struct PS {
float4 Pos : SV_POSITION;
};
Hello, thanks for reporting this. I will triage the problem soon and get you back.
hi xhuang, I have fixed hlsl shader, we need use SV_POSITION semantic insteaded of POSITION, now it works correctly:
struct VS {
float4 Pos : SV_POSITION;
....
};
struct PS {
float4 Pos : SV_POSITION;
};
That's great!
If you have any further questions/problems to discuss, please let me know, thanks.