First off, why does the following code crash, but the latter not?
Code: Select all
unsigned short __attribute__((aligned(16))) g_font_clut[16];
void clut_color( long color ) {
unsigned short* clut = (unsigned short*)(((unsigned short)g_font_clut)|0x40000000);
int i;
for (i=0;i<16;i++)
*(clut++) = ((i) << 12) | (color & 0xFFF);
}
Code: Select all
unsigned int __attribute__((aligned(16))) g_font_clut[16];
void clut_color( long color ) {
unsigned short* clut = (unsigned short*)(((unsigned int)g_font_clut)|0x40000000);
int i;
for (i=0;i<16;i++)
*(clut++) = ((i) << 12) | (color & 0xFFF);
}
Then the other problem is, my texture won't be drawn correctly and I dug it out to the fact, that all indices drawn by GU from the texture are 0, even though when I just read through the texture array and print every index as int to a textfile, it shows correctly. So the problem must lie either in the texture upload, or in the texture mode parameters. I load the texture data from a file btw.
Code: Select all
unsigned char __attribute__((aligned(16))) g_font_tex[256*128];
void drawstuff()
{
sceGuClutMode(GU_PSM_4444,0,0xff,0); // 16-bit palette
sceGuClutLoad((16/8),g_font_clut); // upload 2*8 entries (16)
sceGuTexMode(GU_PSM_T4,0,0,0); // 4-bit image
sceGuTexImage(0,256,256,256,g_font_tex);
sceGuTexFunc(GU_TFX_REPLACE,GU_TCC_RGBA);
sceGuTexFilter(GU_NEAREST,GU_NEAREST);
sceGuBlendFunc(GU_ADD, GU_SRC_ALPHA, GU_ONE_MINUS_SRC_ALPHA, 0, 0);
sceGuEnable( GU_BLEND );
...
}
EDIT: It's not a problem of the texture cash or DCash, as I just verified.
And for some clarification needs: What parameters am I really supposed to give the sceGuTexImage function with a 4bit indexed texture like above? Is the texture width and buffer width really 256 or should the one or the other be 128 (byte width)?
Thanks for your help :)