• Advertisement
Sign in to follow this  

problem by loading a shader

This topic is 4617 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hi, My program compile but at the execution it crashes : "the application have to close..." I don't find the error, can you help me ? this is my code :
class loadShader
{
	private :
		int stringslen[SLEN] ;
		int nbString ;
		const char *strings[SLEN] ;
	public :
		loadShader(char *fileName1, char *fileName2) ;
		void loadStrings(char *fileName) ;
};

loadShader::loadShader(char *fileName1, char *fileName2)
{
	unsigned int vsh, fsh ; //vertex shader handler, vertex shader handler

	vsh = glCreateShader(GL_VERTEX_SHADER);
	fsh = glCreateShader(GL_FRAGMENT_SHADER);
	
	loadStrings(fileName) ;
	glShaderSource(vsh, nbString, strings, stringslen);
        strings[0]='\0' ;

	loadStrings(fileName2) ;
	glShaderSource(fsh, nbString, strings, stringslen);

	glCompileShader(vsh);
	glCompileShader(fsh);

	unsigned short int ph = glCreateProgram();//program handler
	glAttachShader(ph, vsh);
	glAttachShader(ph fsh);

	glLinkProgram(ph;
	glUseProgram(ph);
}

void loadShader::loadStrings(char *fileName)
{
	FILE *f ;
	f=fopen(fileName, "r") ;
	nbString=0;
	while(feof(f) !=0 )
	{
		stringslen[nbString]=fscanf(f, "%s", &strings[nbString]);
		nbString++ ;
	}
}



[Edited by - TheSeb on June 3, 2005 6:41:26 AM]

Share this post


Link to post
Share on other sites
Advertisement
Dude, in the example you have posted, you're creating an array of pointers, not pointer(s) to array(s) of chars...
Here's how to fix your code, notice that this will crash if your file path is incorrect.


class loadShader
{
private :
int streamLength;
char *stream;
public :
loadShader(char *fileName1, char *fileName2) ;
void loadStrings(char *fileName) ;
};

loadShader::loadShader(char *fileName1, char *fileName2)
{
unsigned int vsh, fsh ; //vertex shader handler, vertex shader handler

vsh = glCreateShader(GL_VERTEX_SHADER);
fsh = glCreateShader(GL_FRAGMENT_SHADER);

loadStrings(fileName) ;
glShaderSource(vsh, 1, (const GLcharARB **)&stream, streamLength);

delete[] stream;

loadStrings(fileName2) ;
glShaderSource(fsh, 1, (const GLcharARB **)&stream, streamLength);
delete[] stream;

glCompileShader(vsh);
glCompileShader(fsh);

unsigned short int ph = glCreateProgram();//program handler
glAttachShader(ph, vsh);
glAttachShader(ph fsh);

glLinkProgram(ph;
glUseProgram(ph);
}

void loadShader::loadStrings(char *fileName)
{
ifstream fileInputStream(fileName, ifstream::in | ifstream::binary);

if(fileInputStream.is_open())
{
fileInputStream.seekg(0, ios::end);

streamLength = fileInputStream.tellg();
stream = new char[streamLength + 1];
memset(stream, NULL, streamLength + 1);

fileInputStream.seekg(0, ios::beg);
fileInputStream.read(stream, streamLength);
fileInputStream.close();
}
}


Share this post


Link to post
Share on other sites
Alright, since I'm in a good mood I figured I'd help a brother out; I took the liberty in rewriting your shader class, I hope you like what I have done :)
PS: I haven't compiled this one, so there might be few errors here and there :P


class Shader
{
private :
char* loadString(char *fileName) ;

unsigned int vsh,
fsh,
lsh;
public :
Shader() ;
bool loadShaders(char *fileName1, char *fileName2);
bool useShaders();
void clearShaders();
};

Shader::Shader()
{
vsh = 0;
fsh = 0;
lsh = 0;
}

void Shader::clearShaders()
{
if(vsh)
{
glDeleteObjectARB(vsh);
vsh = 0;
}

if(fsh)
{
glDeleteObjectARB(fsh);
fsh = 0;
}

if(lsh)
{
glDeleteObjectARB(lsh);
lsh = 0;
}
}

bool Shader::useShaders()
{
if(vsh && lsh && fsh)
{
glUseProgramObjectARB(lsh)
return true;
}

return false;
}

bool Shader::loadShaders(char *fileName1, char *fileName2)
{
char *shaderContent = loadString(fileName);
int length = shaderContent ? strlen(shaderContent) : 0;
errorLog = GL_FALSE;

if(length == 0)
return false;

vsh = glCreateShader(GL_VERTEX_SHADER);

glShaderSource(vsh, 1, (const GLcharARB **)&shaderContent, length);
glCompileShader(vsh);
delete[] shaderContent;

glGetObjectParameterivARB(shaderID, GL_OBJECT_COMPILE_STATUS_ARB, &errorLog);

if(errorLog == GL_FALSE )
{
clearShaders();
return false;
}

lsh = glCreateProgramObjectARB();
glAttachObjectARB(lsh, vsh);

shaderContent = loadString(fileName);
length = shaderContent ? strlen(shaderContent) : 0;

if(length == 0)
{
clearShaders();
return false;
}

fsh = glCreateShader(GL_FRAGMENT_SHADER);

glShaderSource(fsh, 1, (const GLcharARB **)&shaderContent, length);
delete[] shaderContent;
glCompileShader(fsh);

glGetObjectParameterivARB(shaderID, GL_OBJECT_COMPILE_STATUS_ARB, &errorLog);

if(errorLog == GL_FALSE )
{
clearShaders();
return false;
}
glAttachObjectARB(lsh, fsh);
glLinkProgram(lsh);

return true;
}

char* loadShader::loadString(char *fileName)
{
ifstream fileInputStream(fileName, ifstream::in | ifstream::binary);
char *stream = NULL;
int streamLength = 0;

if(fileInputStream.is_open())
{
fileInputStream.seekg(0, ios::end);

streamLength = fileInputStream.tellg();
stream = new char[streamLength + 1];
memset(stream, NULL, streamLength + 1);

fileInputStream.seekg(0, ios::beg);
fileInputStream.read(stream, streamLength);
fileInputStream.close();
}

return stream;
}



Share this post


Link to post
Share on other sites
There is also the shader classes I posted here, which wraps away alot of the GLSL around it all (and soon to be included in a book once I get to that bit, heh... infact, I might change the loading routine before I do that)

Share this post


Link to post
Share on other sites
Hi, thanks for your answers, but i still have a problem the program seems to crash here :
vsh = glCreateShader(GL_VERTEX_SHADER);
where vsh is an unsigned int
i don't understand why it doen't work...

Share this post


Link to post
Share on other sites
a windows error, something like: "the application has encountered a problem and have to close"

Share this post


Link to post
Share on other sites
vsh is equal to 0 but i don't know if it is normal, the other variables have stange values but it is because the debugger stops at the first line and crash just after.

Share this post


Link to post
Share on other sites
Je voudrais bien savoir le type de carte graphique que vous avez, je doute bien qu'elle supporte les vertex et pixel shaders 2.0 ce qui explique bien le mal fonctionnement de ton programme.
Aller, ciao.

Share this post


Link to post
Share on other sites
i have an ATI radeon 9600 SE with 5.3 catalyst, it works well with renderMonkey 1.6.
PS : why do you speak in french ?

Share this post


Link to post
Share on other sites
Quote:
Original post by TheSeb
i have an ATI radeon 9600 SE with 5.3 catalyst, it works well with renderMonkey 1.6.
PS : why do you speak in french ?



I dunno, I figured I might score some chicks from around here...j/k [wink]
Hmm I have the same card on my developing machine at work so I'm positive it supports PS/VS 2.0.
Bah, have you tried the_phantom set of utilities to load your shaders?

Share this post


Link to post
Share on other sites
i would like to do it myself before, nobody else knows of what it comes from ? maybe something which is not in my code ? (i'm just supposing)

Share this post


Link to post
Share on other sites
Quote:
Original post by JavaCoolDude
Alright, since I'm in a good mood I figured I'd help a brother out; I took the liberty in rewriting your shader class, I hope you like what I have done :)
PS: I haven't compiled this one, so there might be few errors here and there :P

*** Source Snippet Removed ***


Well... I'm not sure but I'll give it a try...
Maybe, for some reason, not releasing the memory inside loadString could be causing these errors... try making the stream var a member of the Shader class like this:


class Shader
{
private :
void loadString(char *fileName);
char *stream;

unsigned int vsh,
fsh,
lsh;
public :
Shader() ;
bool loadShaders(char *fileName1, char *fileName2);
bool useShaders();
void clearShaders();
};

Shader::Shader()
{
vsh = 0;
fsh = 0;
lsh = 0;
stream = NULL;
}

void Shader::clearShaders()
{
if(vsh)
{
glDeleteObjectARB(vsh);
vsh = 0;
}

if(fsh)
{
glDeleteObjectARB(fsh);
fsh = 0;
}

if(lsh)
{
glDeleteObjectARB(lsh);
lsh = 0;
}
}

bool Shader::useShaders()
{
if(vsh && lsh && fsh)
{
glUseProgramObjectARB(lsh)
return true;
}

return false;
}

bool Shader::loadShaders(char *fileName1, char *fileName2)
{
int length=0, errorLog=GL_FALSE;

loadString(fileName);
length = stream ? strlen(stream) : 0;

if(length == 0)
return false;

vsh = glCreateShader(GL_VERTEX_SHADER);

glShaderSource(vsh, 1, (const GLcharARB **)&stream, length);
glCompileShader(vsh);
delete [] stream;
stream = NULL;

glGetObjectParameterivARB(shaderID, GL_OBJECT_COMPILE_STATUS_ARB, &errorLog);

if(errorLog == GL_FALSE )
{
clearShaders();
return false;
}

lsh = glCreateProgramObjectARB();
glAttachObjectARB(lsh, vsh);

loadString(fileName);
length = stream ? strlen(stream) : 0;

if(length == 0)
{
clearShaders();
return false;
}

fsh = glCreateShader(GL_FRAGMENT_SHADER);

glShaderSource(fsh, 1, (const GLcharARB **)&stream, length);
delete [] stream;
stream = NULL;
glCompileShader(fsh);

glGetObjectParameterivARB(shaderID, GL_OBJECT_COMPILE_STATUS_ARB, &errorLog);

if(errorLog == GL_FALSE )
{
clearShaders();
return false;
}
glAttachObjectARB(lsh, fsh);
glLinkProgram(lsh);

return true;
}

void loadShader::loadString(char *fileName)
{
ifstream fileInputStream(fileName, ifstream::in | ifstream::binary);
int streamLength = 0;

if(fileInputStream.is_open())
{
fileInputStream.seekg(0, ios::end);

streamLength = fileInputStream.tellg();
stream = new char[streamLength+1];
memset(stream, NULL, streamLength+1);

fileInputStream.seekg(0, ios::beg);
fileInputStream.read(stream, streamLength);
fileInputStream.close();
}
}




Share this post


Link to post
Share on other sites
Quote:
Original post by TheSeb
i would like to do it myself before, nobody else knows of what it comes from ? maybe something which is not in my code ? (i'm just supposing)


ok, my last shot in the dark, how are you setting up the function pointers for the extension?

@_GLoom_
I very much doubt thats going to be the problem, if you look at the classes I linked to above I use a local stream object and dont have any problems like that.

Share this post


Link to post
Share on other sites
Quote:
Original post by _the_phantom_
ok, my last shot in the dark, how are you setting up the function pointers for the extension?

@_GLoom_
I very much doubt thats going to be the problem, if you look at the classes I linked to above I use a local stream object and dont have any problems like that.


Yep, I think you're right... it's just that I didn't see other potential problems in that code =P

Share this post


Link to post
Share on other sites
In the example code that I hacked together in a rush I certainly do release the char stream after everytime I allocate memory for it.
The problem lies somewhere else, I'm positive.

Share this post


Link to post
Share on other sites
Quote:
Original post by JavaCoolDude
In the example code that I hacked together in a rush I certainly do release the char stream after everytime I allocate memory for it.
The problem lies somewhere else, I'm positive.


ahhh, I hadn't seen the first code you posted, only the second one... sorry :)

Share this post


Link to post
Share on other sites
Quote:
Original post by _the_phantom_
Quote:
Original post by TheSeb
i would like to do it myself before, nobody else knows of what it comes from ? maybe something which is not in my code ? (i'm just supposing)


ok, my last shot in the dark, how are you setting up the function pointers for the extension?

@_GLoom_
I very much doubt thats going to be the problem, if you look at the classes I linked to above I use a local stream object and dont have any problems like that.


sorry what do you mean by "the function pointers for the extension" ?
if you are talking about what i think, i'm using glew, i have just put the glew.h in the include directory of visual c++ 6, i have also put glew.lib in the lib directory and i use it with this line :
#pragma comment( lib, "glew32.lib")
and i have put the .dll of glew in system32 directory
i have done nothing else. did i forget something ?

Share this post


Link to post
Share on other sites
Now I'm not familiar with GLEW since I'm diehard GLEE fan, but isn't there an init function that you have to call right after creating your GL context before using the said extensions?

Share this post


Link to post
Share on other sites
indeed there is a call to glewInit();
and now i see the result :-)
thanks guys for your help and your code (it helped me to make mine).

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement