SHADE_Y3/TempShaderFolder/TestCubeVs.glsl

62 lines
1.7 KiB
GLSL

#version 450
#extension GL_KHR_vulkan_glsl : enable
//#include "ShaderDescriptorDefinitions.glsl"
layout(location = 0) in vec3 aVertexPos;
layout(location = 1) in vec2 aUV;
layout(location = 2) in vec3 aNormal;
layout(location = 3) in vec3 aTangent;
layout(location = 4) in mat4 worldTransform;
//layout(std140, push_constant) uniform TestPushConstant
//{
// mat4 pvMat;
// vec4 lightPosition;
// vec4 eyePosition;
// vec4 ambientColor;
// vec4 lightColor;
//
//} testPushConstant;
layout(location = 0) out struct
{
//mat3 BTN;
vec4 vertColor;
//vec3 localSpacePosition;
//vec2 uv;
//vec3 localLightPosition;
//vec3 localEyePosition;
} Out;
layout(set = 2, binding = 0) uniform CameraData
{
vec4 position;
mat4 vpMat;
} cameraData;
void main()
{
//const float gamma = testPushConstant.eyePosition.w;
//mat4 W2L = inverse(worldTransform);
//// Since attributes are instanced we want the local positions of light and eye (camera)
//Out.localLightPosition = vec3(W2L * vec4(testPushConstant.lightPosition.xyz, 1.0f));
//Out.localEyePosition = vec3(W2L * vec4(testPushConstant.eyePosition.xyz, 1.0f));
//vec3 biTangent = normalize(cross(aNormal, aTangent));
//gl_Position = testPushConstant.pvMat * worldTransform * vec4(aVertexPos, 1.0);
//// Since the normal we are sampling is in tangent space, we want to later convert them to local space
//// so we need this matrix to multiply with the sampled texel of the normal map.
//Out.BTN = mat3(aTangent, biTangent, aNormal);
//Out.localSpacePosition = aVertexPos;
//Out.uv = aUV;
// render NDC first
//gl_Position = vec4(aVertexPos, 1.0f);
gl_Position = cameraData.vpMat * worldTransform * vec4 (aVertexPos, 1.0f);
Out.vertColor = vec4 (aVertexPos, 1.0f);
}