Jump to content
  • Advertisement
Sign in to follow this  
meh

OpenGL Eeek, Pbuffer just not wanting to initialise.

This topic is 5157 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hey chaps, I'm currently trying to setup some off screen render effects to perform post render effects on my scene using pbuffers. Having taken the whitepapers from NVIDIA and ATI I've come up with the following initialisation routine.
	int attr[] =
	{
		WGL_SUPPORT_OPENGL_ARB, TRUE,		// pbuffer will be used with gl
		WGL_DRAW_TO_PBUFFER_ARB, TRUE,		// enable render to pbuffer
		WGL_BIND_TO_TEXTURE_RGBA_ARB, TRUE, // pbuffer will be used as a texture
		WGL_RED_BITS_ARB, 8,				// at least 8 bits for RED channel
		WGL_GREEN_BITS_ARB, 8,				// at least 8 bits for GREEN channel
		WGL_BLUE_BITS_ARB, 8,				// at least 8 bits for BLUE channel
		WGL_ALPHA_BITS_ARB, 8,				// at least 8 bits for ALPHA channel
		WGL_DEPTH_BITS_ARB, 24,				// at least 24 bits for depth buffer
		WGL_DOUBLE_BUFFER_ARB, TRUE,		// we want double buffering
		0
	};

	unsigned int Count = 0;
	int PixelFormat;
	// find a pixel format that meets the above requirements
	wglChoosePixelFormatARB( m_hDC, (const int*)attr, NULL, 1, &PixelFormat, &Count );

	if(Count == 0)
	{
		g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to find an acceptable pixel format" );
		return S_FALSE;
	}

	// setup the pbuffer so it can be used as a texture
	int pAttrib[] =
	{
		WGL_TEXTURE_FORMAT_ARB,	WGL_TEXTURE_RGBA_ARB,
		WGL_TEXTURE_TARGET_ARB, WGL_TEXTURE_2D_ARB,
		WGL_MIPMAP_TEXTURE_ARB, 0,
		WGL_PBUFFER_LARGEST_ARB, 0,
		0
	};

	// allocate the pbuffer
	m_hRenderTexture = wglCreatePbufferARB(m_hDC, PixelFormat, (int)m_uiWidth/2, (int)m_uiHeight/2, (const int*)pAttrib);
	m_hRenderTextureDC = wglGetPbufferDCARB(m_hRenderTexture);
	m_hRenderTextureRC = wglCreateContext(m_hRenderTextureDC);

	if(!m_hRenderTexture)
	{
		g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to create Pbuffer" );
		return S_FALSE;
	}

	// make a texture object for binding the pbuffer to
	glGenTextures(1, &m_uiRenderTextureID);
	glBindTexture(GL_TEXTURE_2D, m_uiRenderTextureID);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

	return S_OK;
The main issue with this code is that it fails to create the pbuffer even though the values passed to the function are all correct (at least according to the whitepapers). If I take the code down to the minimum that the NVIDIA whitepaper says it required I still have the same problem.
	int attr[] =
	{
		WGL_DRAW_TO_PBUFFER_ARB, TRUE,		// enable render to pbuffer
		WGL_BIND_TO_TEXTURE_RGBA_ARB, TRUE, // pbuffer will be used as a texture
		0
	};

	unsigned int Count = 0;
	int PixelFormat;
	// find a pixel format that meets the above requirements
	wglChoosePixelFormatARB( m_hDC, (const int*)attr, NULL, 1, &PixelFormat, &Count );

	if(Count == 0)
	{
		g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to find an acceptable pixel format" );
		return S_FALSE;
	}

	// setup the pbuffer so it can be used as a texture
	int pAttrib[] =
	{
		WGL_TEXTURE_FORMAT_ARB,	WGL_TEXTURE_RGBA_ARB,
		WGL_TEXTURE_TARGET_ARB, WGL_TEXTURE_2D_ARB,
		0
	};

	// allocate the pbuffer
	m_hRenderTexture = wglCreatePbufferARB(m_hDC, PixelFormat, (int)m_uiWidth/2, (int)m_uiHeight/2, (const int*)pAttrib);
	m_hRenderTextureDC = wglGetPbufferDCARB(m_hRenderTexture);
	m_hRenderTextureRC = wglCreateContext(m_hRenderTextureDC);

	if(!m_hRenderTexture)
	{
		g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to create Pbuffer" );
		return S_FALSE;
	}

	// make a texture object for binding the pbuffer to
	glGenTextures(1, &m_uiRenderTextureID);
	glBindTexture(GL_TEXTURE_2D, m_uiRenderTextureID);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

	return S_OK;
Both versions fail to create the pbuffer. The card supports all the extensions needed it just seems nothing works. :( Any help is appreciated, Charles

Share this post


Link to post
Share on other sites
Advertisement
Well just to confuse me further, deleted the whole thing and rewrote it.


int attr[] =
{
WGL_SUPPORT_OPENGL_ARB, TRUE, // P-buffer will be used with OpenGL
WGL_DRAW_TO_PBUFFER_ARB, TRUE, // Enable render to p-buffer
WGL_BIND_TO_TEXTURE_RGBA_ARB, TRUE, // P-buffer will be used as a texture
WGL_RED_BITS_ARB, 8, // At least 8 bits for RED channel
WGL_GREEN_BITS_ARB, 8, // At least 8 bits for GREEN channel
WGL_BLUE_BITS_ARB, 8, // At least 8 bits for BLUE channel
WGL_ALPHA_BITS_ARB, 8, // At least 8 bits for ALPHA channel
WGL_DEPTH_BITS_ARB, 16, // At least 16 bits for depth buffer
WGL_DOUBLE_BUFFER_ARB, TRUE, // We don't require double buffering
0 // Zero terminates the list
};

unsigned int Count = 0;
int PixelFormat;
wglChoosePixelFormatARB( m_hDC,(const int*)attr, NULL, 1, &PixelFormat, &Count);

if( Count == 0 )
{
g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to find an acceptable pixel format" );
return S_FALSE;;
}

int pAttr[] =
{
WGL_TEXTURE_FORMAT_ARB, WGL_TEXTURE_RGBA_ARB, // Our p-buffer will have a texture format of RGBA
WGL_TEXTURE_TARGET_ARB, WGL_TEXTURE_2D_ARB, // Of texture target will be GL_TEXTURE_2D
0 // Zero terminates the list
};

m_hRenderTexture = wglCreatePbufferARB( m_hDC, PixelFormat, m_uiWidth/2, m_uiHeight/2, pAttr );
m_hRenderTextureDC = wglGetPbufferDCARB( m_hRenderTexture );
m_hRenderTextureRC = wglCreateContext( m_hRenderTextureDC );

if( m_hRenderTexture )
{
g_ProgramLog.Log( LOG_WARNING, "Pbuffer Error: Unable to create Pbuffer" );
return S_FALSE;
}


Is the section I've changed. Prizes for anyone that can spot the bit I did wrong last time because this works fine! Maybe its my tired eyes but apart from a couple of variable names everything is identical.

Its nothing to do with the depth buffer precision. :)

Share this post


Link to post
Share on other sites
That'll teach you to post in the middle of me editing. ;)

Depth bits are now up to 24 and it still works fine. o_O

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement
×

Important Information

By using GameDev.net, you agree to our community Guidelines, Terms of Use, and Privacy Policy.

We are the game development community.

Whether you are an indie, hobbyist, AAA developer, or just trying to learn, GameDev.net is the place for you to learn, share, and connect with the games industry. Learn more About Us or sign up!

Sign me up!