int(boneIndex[i]) always returns incorrect result on my ATI HD2400PRO:
finalMatrix += boneWeight[i]*boneMat[int(boneIndex[i])];
Here is my workaround:
ivec3 boneIndexAti = ivec3(0,0,0);
for(int i=0;i<32;i++)
if (boneIndex[0]==i) boneIndexAti[0]=i;
for(int i=0;i<32;i++)
if (boneIndex[1]==i) boneIndexAti[1]=i;
for(int i=0;i<32;i++)
if (boneIndex[2]==i) boneIndexAti[2]=i;
for (int i = 0; i < 3; i++)
finalMatrix += boneWeight[i]*boneMat[boneIndexAti[i]];
Is there any way to int(float)?
Another question:
Here is my texturing fragment shader:
uniform sampler2D myTexture0;
uniform sampler2D myTexture1;
uniform sampler2D myTexture2;
uniform sampler2D myTexture3;
uniform sampler2D myTexture4;
uniform sampler2D myTexture5;
uniform sampler2D myTexture6;
uniform sampler2D myTexture7;
varying float texNum;
void main(void)
{
float texNum2 = floor(texNum*255.0-1.0+0.001);
// …SLOW
if (texNum2==0.0)
gl_FragColor = texture2D( myTexture0, gl_TexCoord[0].st );
if (texNum2==1.0)
gl_FragColor = texture2D( myTexture1, gl_TexCoord[0].st );
if (texNum2==2.0)
gl_FragColor = texture2D( myTexture2, gl_TexCoord[0].st );
if (texNum2==3.0)
gl_FragColor = texture2D( myTexture3, gl_TexCoord[0].st );
if (texNum2==4.0)
gl_FragColor = texture2D( myTexture4, gl_TexCoord[0].st );
if (texNum2==5.0)
gl_FragColor = texture2D( myTexture5, gl_TexCoord[0].st );
if (texNum2==6.0)
gl_FragColor = texture2D( myTexture6, gl_TexCoord[0].st );
if (texNum2==7.0)
gl_FragColor = texture2D( myTexture7, gl_TexCoord[0].st );
}
Is there any way to do it faster and/or more beautiful?