VS_NORMALMAP_INPUT:
//Vertex shader input
struct VS_NORMALMAP_INPUT
{
float3 Pos : POSITION;
float3 Normal : NORMAL;
float3 Tangent : TANGENT;
float2 UV : TEXCOORD0;TEXCOORD;
};
Vertex Shader:
VS_LIGHTING_OUTPUT LightingTransformTex(VS_NORMALMAP_INPUT vIn)
//Vertex{
shader output
struct VS_LIGHTING_OUTPUT vOut;
{
float4 ProjPos // Use world matrix :passed SV_POSITION;from C++ //to 2Dtransform "projected"the positioninput formodel vertex (required outputposition forinto vertexworld shader)space
float3 WorldPos : POSITION;
float3 WorldNormalfloat4 :modelPos NORMAL;
float3= Tangentfloat4(vIn.Pos, :1.0f); TANGENT;
float2// UVPromote to 1x4 so we can multiply by 4x4 matrix, put 1.0 :in TEXCOORD;
};
4th
//Vertexelement shader
VS_BASIC_OUTPUTfor VS_PlainTexturea point (VS_BASIC_INPUT0.0 vInfor a vector)
{
VS_BASIC_OUTPUT vOut;
float4 modelPosworldPos = float4mul(vIn.PosmodelPos, 1.0fWorldMatrix);
float4 worldPos vOut.WorldPos = mulworldPos.xyz;
// Use camera matrices to further transform the vertex from world space into view space (modelPos,camera's WorldMatrixpoint of view); and finally into 2D "projection" space for rendering
float4 viewPos = mul(worldPos, ViewMatrix);
vOut.ProjPos = mul(viewPos, ProjMatrix);
vOut.UV = vIn.UV;
return vOut;
}
//Pixel Shader
float4Transform ShadowMapTexthe vertex normal from model space into world space (VS_LIGHTING_OUTPUTalmost vOut)same :as SV_Target
{first lines of code above)
float3 float4 modelNormal = normalizefloat4(vOutvIn.WorldNormalNormal, 0.0f); // Set 4th element to 0.0 this time as normals are vectors
float3 modelTangent = normalize( vOut.Tangent);
= vIn.Tangent;
float3 modelBiTangent vOut.WorldNormal = crossmul(modelNormal, modelTangentWorldMatrix);.xyz;
float3x3
invTangentMatrix = float3x3 // Pass texture coordinates (modelTangent,UVs) modelBiTangenton to the pixel shader, modelNormal);
the
float3vertex CameraDirshader =doesn't normalize(CameraPosneed -them
vOut.WorldPosUV = vIn.xyz);UV;
float3x3 invWorldMatrix = transpose return vOut;
}
Input Layout:
bool CModel::Load(WorldMatrix const string& fileName, ID3D10EffectTechnique* exampleTechnique, bool tangents /*= false*/ );
float3 cameraModelDir// =The normalize(mulcommented out bit is the default parameter (CameraDircan't write it here, invWorldMatrix)only in the declaration);
{
// Release any existing geometry in this object
float3x3 tangentMatrix = transpose ReleaseResources(invTangentMatrix);
float2
textureOffsetDir = mul // Use CImportXFile class (cameraModelDir,from tangentMatrixanother application); to load the given file. The import code is wrapped in the namespace 'gen'
gen::CImportXFile mesh;
float texDepth = ParallaxDepth *if (NormalMapmesh.SampleImportFile(Trilinear, vOutfileName.UVc_str().a -) 0.5f!= gen::kSuccess);
{
float2 offsetTexCoord = vOut.UV + texDepth * textureOffsetDir; return false;
}
float3
textureNormal = 2.0f *// NormalMap.Sample(Trilinear,Get offsetTexCoord)first sub-mesh 1.0f;from loaded file
gen::SSubMesh subMesh;
float3 worldNormal = normalize(mul if (mulmesh.GetSubMesh(textureNormal 0, invTangentMatrix)&subMesh, WorldMatrixtangents ) != gen::kSuccess);
{
float3 Light1Dir = normalize(LightPos1 - vOut return false;
}
// Create vertex element list & layout.WorldPos We need a vertex layout to say what data we have per vertex in this model (e.xyzg. position, normal, uv, etc.);
float3 Light1Dist = length(LightPos1 -// vOut.WorldPosIn previous projects the element list was a manually typed in array as we knew what data we would provide.xyz); However, as we can load models with
float3 DiffuseLight1 = LightColour1 *// max(dot(worldNormaldifferent vertex data this time we need flexible code.xyz The array is built up one element at a time: ask the import class if it loaded normals, Light1Dir)
// if so then add a normal line to the array, 0)then ask if it loaded UVS...etc
unsigned int numElts = 0;
unsigned int offset = 0;
// Light1Dist;Position is always required
float3 halfway m_VertexElts[numElts].SemanticName = normalize"POSITION"; // Semantic in HLSL (Light1Dirwhat +is CameraDirthis data for);
float3 SpecularLight1 m_VertexElts[numElts].SemanticIndex = DiffuseLight10; * pow(max(dot // Index to add to semantic (worldNormal.xyza count for this kind of data, halfway)when using multiple of the same type, 0)e.g. TEXCOORD0, SpecularPowerTEXCOORD1);
float3 Light2Dir m_VertexElts[numElts].Format = normalize(LightPos2DXGI_FORMAT_R32G32B32_FLOAT; // Type of data - vOut.WorldPosthis one will be a float3 in the shader.xyz); Most data communicated as though it were colours
float3 Light2Dist m_VertexElts[numElts].AlignedByteOffset = length(LightPos2offset; - vOut// Offset of element from start of vertex data (e.WorldPosg.xyz);
float3 DiffuseLight2if =we LightColour2have *position max(dot(worldNormal.xyzfloat3), Light2Diruv (float2) then normal, 0)the /normal's Light2Dist;
halfwayoffset is 5 floats = normalize(Light2Dir5*4 += CameraDir20);
float3 SpecularLight2 m_VertexElts[numElts].InputSlot = DiffuseLight20; * pow(max(dot // For when using multiple vertex buffers (worldNormale.xyz,g. halfway),instancing 0),- SpecularPoweran advanced topic);
float4 SpotlightViewPos m_VertexElts[numElts].InputSlotClass = mul(float4D3D10_INPUT_PER_VERTEX_DATA; // Use this value for most cases (vOutonly changed for instancing)
m_VertexElts[numElts].WorldPos,InstanceDataStepRate 1= 0; // --"--
offset += 12;
++numElts;
// Repeat for each kind of vertex data
if (subMesh.0fhasNormals),
SpotlightViewMatrix); {
float4 SpotlightProjPos m_VertexElts[numElts].SemanticName = mul(SpotlightViewPos,"NORMAL";
SpotlightProjMatrix); m_VertexElts[numElts].SemanticIndex = 0;
m_VertexElts[numElts].Format = DXGI_FORMAT_R32G32B32_FLOAT;
float3 SpotlightDir m_VertexElts[numElts].AlignedByteOffset = normalize(SpotlightPosoffset;
- vOut m_VertexElts[numElts].WorldPosInputSlot = 0;
m_VertexElts[numElts].xyz);InputSlotClass = D3D10_INPUT_PER_VERTEX_DATA;
m_VertexElts[numElts].InstanceDataStepRate = 0;
if (dot(SpotlightFacing, -SpotlightDir) > SpotlightCosAngle) //**** This condition needsoffset to+= be12;
written as the first exercise to get spotlights++numElts;
working }
if (subMesh.hasTangents)
{
float2 shadowUV m_VertexElts[numElts].SemanticName = 0"TANGENT";
m_VertexElts[numElts].5fSemanticIndex *= SpotlightProjPos0;
m_VertexElts[numElts].xyFormat /= SpotlightProjPosDXGI_FORMAT_R32G32B32_FLOAT;
m_VertexElts[numElts].wAlignedByteOffset += float2(0offset;
m_VertexElts[numElts].5f,InputSlot 0= 0;
m_VertexElts[numElts].5fInputSlotClass = D3D10_INPUT_PER_VERTEX_DATA;
m_VertexElts[numElts].InstanceDataStepRate = 0;
offset += 12;
++numElts;
}
if (subMesh.hasTextureCoords);
shadowUV {
m_VertexElts[numElts].ySemanticName = 1"TEXCOORD";
m_VertexElts[numElts].0fSemanticIndex -= shadowUV0;
m_VertexElts[numElts].y;Format = DXGI_FORMAT_R32G32_FLOAT;
m_VertexElts[numElts].AlignedByteOffset = offset;
float depthFromLight m_VertexElts[numElts].InputSlot = SpotlightProjPos0;
m_VertexElts[numElts].zInputSlotClass /= SpotlightProjPosD3D10_INPUT_PER_VERTEX_DATA;
m_VertexElts[numElts].w;//InstanceDataStepRate -= DepthAdjust;0;
//*** Adjustment so polygons don't shadow themselves offset += 8;
++numElts;
if (depthFromLight < ShadowMap1.Sample(PointClamp, shadowUV)}
if (subMesh.rhasVertexColours)
{
m_VertexElts[numElts].SemanticName = "COLOR";
float3 SpotlightDist m_VertexElts[numElts].SemanticIndex = length(SpotlightPos0;
m_VertexElts[numElts].Format = DXGI_FORMAT_R8G8B8A8_UNORM; // A RGBA colour with 1 byte (0-255) vOutper component
m_VertexElts[numElts].WorldPosAlignedByteOffset = offset;
m_VertexElts[numElts].xyz);InputSlot = 0;
diffuseLight3 m_VertexElts[numElts].InputSlotClass = SpotlightColourD3D10_INPUT_PER_VERTEX_DATA;
* max(dot(worldNormal m_VertexElts[numElts].xyz,InstanceDataStepRate SpotlightDir)= 0;
offset += 4;
++numElts;
}
m_VertexSize = offset;
// Given the vertex element list, 0)pass it to DirectX to create a vertex layout. We also need to pass an example of a technique that will
// SpotlightDist;render this model. We will only be able to render this model with techniques that have the same vertex input as the example we use here
float3 halfway = normalize D3D10_PASS_DESC PassDesc;
exampleTechnique->GetPassByIndex(SpotlightDir +0 cameraDir)->GetDesc( &PassDesc );
specularLight3 = diffuseLight3 * pow(max(dotDevice->CreateInputLayout(worldNormal.xyz m_VertexElts, halfway)numElts, 0)PassDesc.pIAInputSignature, SpecularPowerPassDesc.IAInputSignatureSize, &m_VertexLayout );
}
}
float3 DiffuseLight = AmbientColour// +Create DiffuseLight1the +vertex DiffuseLight2buffer +and diffuseLight3;fill it with the loaded vertex data
float3 SpecularLight m_NumVertices = SpecularLight1subMesh.numVertices;
+ SpecularLight2 + specularLight3;
D3D10_BUFFER_DESC bufferDesc;
float4 DiffuseMaterial = DiffuseMap bufferDesc.Sample(Trilinear,BindFlags offsetTexCoord);= D3D10_BIND_VERTEX_BUFFER;
float3 SpecularMaterial = DiffuseMaterial bufferDesc.a;
Usage
float4= combinedColour;D3D10_USAGE_DEFAULT; // Not a dynamic buffer
combinedColour bufferDesc.rgbByteWidth = DiffuseMaterialm_NumVertices * DiffuseLight +m_VertexSize; SpecularMaterial// *Buffer SpecularLight;size
combinedColour bufferDesc.aCPUAccessFlags = 1.0f;0; // NoIndicates alphathat processingCPU inwon't access this shader,buffer soat justall setafter itcreation
to 1 bufferDesc.MiscFlags = 0;
D3D10_SUBRESOURCE_DATA initData; // Initial data
initData.pSysMem = subMesh.vertices;
if (FAILED( Device->CreateBuffer( &bufferDesc, &initData, &m_VertexBuffer )))
{
return combinedColour;false;
}