I am struggling working with cgfx in MotionBuilder with the lack of output from it’s errors. If a compiler error happens then sure, one can read what went wrong - but now it feels that I’m working against the binding to the ui.
Shader, where I sample the color from a texture.
float4x4 WorldITXf : WorldInverseTranspose;
float4x4 WorldViewProjXf : WorldViewProjection;
// Exposing parameters to UI,
float3 DiffuseColor : DIFFUSE
<
string UIName = "Diffuse Color";
string UIWidget = "ColorPicker";
> = {0.66, 0.66, 0.66};
// the texture object will not show in mobu, so no point giving it ui names etc but we can do that to the sampler
texture DiffuseTexture <string ResourceType = "2D";>;
sampler2D DiffuseSampler <string UIName = "Diffuse Texture";> = sampler_state
{
Texture = <DiffuseTexture>;
MinFilter = Linear;
MagFilter = Linear;
AddressU = WRAP;
AddressV = WRAP;
AddressW = WRAP;
};
// Defining structures we will be using,
// Application => Vertex data,
struct appdata {
float3 Position: POSITION;
float2 UV: TEXCOORD0;
float4 Normal: NORMAL;
// Tangent and Binormal both require that the mesh has been UV Mapped,
// We can pick whichever TEXCOORD we want, just make sure MOBUs widget
// passes the correct data to the right TEXCOORD...
float4 Tangent: TEXCOORD6;
float4 Binormal: TEXCOORD7;
};
// Vertex => Fragment shader data
struct vert {
float4 Position: POSITION;
float2 UV: TEXCOORD0;
float3 Normal: TEXCOORD1;
float3 Tangent: TEXCOORD2;
float3 Binormal: TEXCOORD3;
};
vert ShaderVertex(appdata IN)
{
// Initialize the vert output to all zero,
vert OUT = (vert)0;
// Get the vertex position from object->world->view->projection space
// All using one multiplication!
float4 position = float4(IN.Position.xyz, 1.0f);
OUT.Position = mul(WorldViewProjXf, position);
// Just passing the UV from Application data,
OUT.UV = IN.UV;
// Get world normals by multiplying object space normals to
// the world inverse transpose matrix
OUT.Normal = normalize(mul(WorldITXf, IN.Normal).xyz);
OUT.Tangent = normalize(mul(WorldITXf, IN.Tangent).xyz);
OUT.Binormal = normalize(mul(WorldITXf, IN.Binormal).xyz);
return OUT;
}
float4 ShaderPixel(vert IN) : COLOR
{
// Hardcoding a light vector,
float3 lightDirection0 = normalize(float3(0.5f, 0.7f, 0.3f));
// Use the color sampled from the texture,
float4 diffuseColor = tex2D(DiffuseSampler, IN.UV);
// float4 diffuseColor = float4(DiffuseColor.xyz, 1.0); // Attempting to use this will cause errors
// Simple diffuse light, surface normal dot light direction * color
float NoL = saturate(dot(IN.Normal, lightDirection0));
float3 color = diffuseColor.xyz * NoL;
return float4(color, 1.0f);
}
// Techniques will be selectable from a drop-down in MotionBuilder.
technique SimpleDiffuse
{
// This is a single pass shader, we can name the passes anything we
// would like.
pass FirstPass
{
VertexProgram = compile gp4vp ShaderVertex();
FragmentProgram = compile gp4fp ShaderPixel();
}
}
This shader above works, it samples a color from a texture. Happy days.
If I however attempt to use the parameter DiffuseColor
instead. MotionBuilder will just throw Error: the parameter used is invalid
message boxes my way untill I kill the program using taskmanager.
Commenting out texture and sampler and I am allowed to use the parameter DiffuseColor
.
But what is causing this error? I can see how I might want to use both texture and a parameter to get a float3 and use both in the same “technique”