• Advertisement
Sign in to follow this  

Help! Using FFMPEG + OpenGL Data compatibility

This topic is 2945 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hello all! (Excuse me for my english level) I made a class, called "CMovieStream2" that play Video Files using FFMPEG.
#pragma once

#ifdef __cplusplus
extern "C" {
#endif

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"

#ifdef __cplusplus
}
#endif

#pragma comment( lib, "avcodec.lib")
#pragma comment( lib, "avformat.lib") 
#pragma comment( lib, "swscale.lib") 
#pragma comment( lib, "avutil.lib") 

class CMovieStream2
{
public:
	CMovieStream2(void);
	bool OpenAVI(LPCTSTR szFile);
	void GrabAVIFrame();
	int Update(DWORD milliseconds);
public:
	virtual ~CMovieStream2(void);
public:
	uint8_t 			*data;
	int					width;
	int					height;
	int					nwidth;
	int					nheight;
	DWORD dwNext;
	double dNext;
	double dDur;
	DWORD dwDur;
	DWORD dwFrame;
	DWORD dwFrameSys;
	void SetZeroCount(void);
	void CloseAVI(void);
	bool bLoad;
	bool bLoop;
	bool bResize;
	bool bActive;
	void SetResize(void);
	void ClearResize(void);
private:
	LPSTR chFile;
	int CreateContext();
	void DestroyContext();
	AVFormatContext *pFormatCtx;
    int             videoStream;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVFrame         *pFrame; 
    AVFrame         *pFrameRGB;
    AVPacket        packet;
	AVStream* video_st;
	struct SwsContext *img_convert_ctx;
    int             frameFinished;
    int             numBytes;
    uint8_t         *buffer;
	double dTimeBase;
	double dPts;
};


#include "MovieStream2.h"

CMovieStream2::CMovieStream2(void)
: bLoad(false)
, bLoop(false)
, bResize(false)
{
	bActive = true;
	data = 0;
	dwNext = 0;
	dNext = 0;
	width = 0;
	height = 0;
	nwidth = 0;
	nheight = 0;
	dwFrame = 0;
	dwFrameSys = 0;
	dPts = 0.0;
}

CMovieStream2::~CMovieStream2(void)
{
}

bool CMovieStream2::OpenAVI(LPCTSTR szFile)
{
	UnicodeToAnsi(szFile, &chFile);
	if(!CreateContext()) return 0;
	pFrameRGB=avcodec_alloc_frame();
    if(pFrameRGB==NULL)
	{
		printf("FFMPEG: Can't Alloc Frame! \"%s\"\n", chFile);
		DestroyContext();
        return 0;
	}

	nwidth = GetPowerOf2(pCodecCtx->width), nheight = GetPowerOf2(pCodecCtx->height);

    numBytes=avpicture_get_size(PIX_FMT_RGB24, nwidth, nheight);
    buffer=new uint8_t[numBytes];

    avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, nwidth, nheight);

	SetResize();
	dwFrame=0;
	dwFrameSys = 0;
	dwNext=0;
	dNext=0;
	dPts=0;
	bLoad = true;
	bActive = true;
	return true;
}

#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)

void CMovieStream2::GrabAVIFrame()
{
	if(!bActive) return;
	int nDifFrame = dwFrame - dwFrameSys;
	if(nDifFrame > 0)
	{
		for(int k = 0; k < nDifFrame; k++)
		{
			if(av_read_frame(pFormatCtx, &packet)<0) 
			{
				bActive = false;
				break;
			}
			if(k != (nDifFrame-1)) av_free_packet(&packet);
		}
		if(!bActive) 
		{
			if(bLoop) SetZeroCount();
			return;
		}
		if(packet.stream_index==videoStream)
		{
			dPts = 0;
			// Decode video frame
			avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, 
				packet.data, packet.size);

			if(packet.dts == AV_NOPTS_VALUE 
			   && pFrame->opaque && *(uint64_t*)pFrame->opaque != AV_NOPTS_VALUE) 
			{
				dPts = double(*(uint64_t *)pFrame->opaque);
			} 
			else if(packet.dts != AV_NOPTS_VALUE) 
			{
				dPts = double(packet.dts);
			} 
			else 
			{
				// No se sabe el tiempo, entonces se asigna al tiempo actual
				dPts = 0.0;
			}
			dPts *= av_q2d(video_st->time_base);
			if(dPts == 0.0) dPts = dNext;

			// Did we get a video frame?
			if(frameFinished)
			{
				sws_scale (img_convert_ctx, pFrame->data, pFrame->linesize,
							0, pCodecCtx->height,
							pFrameRGB->data,pFrameRGB->linesize);
				data = pFrameRGB->data[0];
			}
		}
		av_free_packet(&packet);
	}
}

int CMovieStream2::Update(DWORD milliseconds)
{
	dwNext += milliseconds;
	dNext = double(dwNext)/1000;	// Convertir a segundos
	dwFrame = DWORD(dNext/dTimeBase);
	dwFrameSys = DWORD(dPts/dTimeBase);
	return 0;
}
void CMovieStream2::SetZeroCount(void)
{
	dwFrame=0;
	dwFrameSys = 0;
	dwNext=0;
	dNext=0;
	dPts=0;
	DestroyContext();
	CreateContext();
	bActive = true;
}

void CMovieStream2::CloseAVI(void)
{
	if(bLoad) 
	{
		DestroyContext();
		delete [] buffer;
		if(pFrameRGB != NULL) av_free(pFrameRGB);
	}
	bLoad = false;
	if(bResize) ClearResize();
}

void CMovieStream2::SetResize(void)
{
	static int sws_flags = SWS_BICUBIC;
	img_convert_ctx = sws_getContext(pCodecCtx->width, 
									pCodecCtx->height,
									pCodecCtx->pix_fmt,
									nwidth, 
									nheight,
									PIX_FMT_RGB24,
									sws_flags, NULL, NULL, NULL);
	bResize = true;
}

void CMovieStream2::ClearResize(void)
{
	sws_freeContext(img_convert_ctx);
	bResize = false;
}

// ** Funciones privadas
int CMovieStream2::CreateContext(void)
{
	if(av_open_input_file(&pFormatCtx, chFile, NULL, 0, NULL)!=0)
	{
		printf("FFMPEG: Can't open file! \"%s\"\n", chFile);
        return 0;
	}

    if(av_find_stream_info(pFormatCtx)<0)
	{
		printf("FFMPEG: Couldn't find stream information! \"%s\"\n", chFile);
        return 0;
	}

    dump_format(pFormatCtx, 0, chFile, false);
    videoStream=-1;
	int i;
    for(i=0; i<int(pFormatCtx->nb_streams); i++)
        if(pFormatCtx->streams->codec->codec_type==CODEC_TYPE_VIDEO)
        {
            videoStream=i;
            break;
        }
    if(videoStream==-1)
	{
		printf("FFMPEG: Didn't find a video stream! \"%s\"\n", chFile);
        return 0; 
	}

    // Get a pointer to the codec context for the video stream
	video_st=pFormatCtx->streams[videoStream];
    pCodecCtx=pFormatCtx->streams[videoStream]->codec;

    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==NULL)
	{
		printf("FFMPEG: Codec not found! \"%s\"\n", chFile);
        return 0;
	}

    if(avcodec_open(pCodecCtx, pCodec)<0)
	{
		printf("FFMPEG: Could not open codec! \"%s\"\n", chFile);
        return 0;
	}

    pFrame=avcodec_alloc_frame();

	dTimeBase = av_q2d(video_st->time_base);
	dDur = double(video_st->duration)*dTimeBase;
	dwDur = DWORD(dDur*1000);

	GrabAVIFrame();

	return 1;
}

void CMovieStream2::DestroyContext(void)
{
    av_free(pFrame);
    avcodec_close(pCodecCtx);
    av_close_input_file(pFormatCtx);
}


A basical sintax for play video is the following:
main()
{
	CMovieStream2* movie = new MovieStream2();
	movie->OpenAVI(_T("file.avi"));
	movie->bLoop = false; // Or true
	.
	.
	.
	DWORD dwTick  = GetTick();
	DWORD dwTickLast = dwTick;
	while(1)
	{
		dwTick  = GetTick();
		movie->Update(dwTick-dwTickLast);
		movie->GrabAVIFrame();
		/*Here I use movie->data for glTexImage2D
		using movie->nwidth and movie->nheight as 
		Power-of-two dimmentions
		*/
		dwTickLast = dwTick;
	}
}



Well.. in some cases.. I need to play two videos at time, but all time (and some times when I play only one video) the image show rarely, like pixeled and very ugly. I don't know how to fix this. Maybe this error is caused by the "time control" written by me in the function CMovieStream2::GrabAVIFrame(), but, I don't kow how to fix this! Any Body can help me??!! PD: If somebody know how to play video files with OpenGL if can be compiled in Windows and linux (in both), Please, Reply. I need play video files! Thanks

Share this post


Link to post
Share on other sites
Advertisement
just wanted to say big thanks for this - it is very helpful!
only thing I had to do to get it to work for me was change:

avcodec_register_all();

to:
av_register_all();

in OpenAVI

ps: I will post a reply if I run into the issue you are having.

Share this post


Link to post
Share on other sites
Well... I've solved this issue (finally.... ufff!)
Merry Chrismas! I'll give you my CMovieStream2 Code!

MovieStream2.h

#ifdef __cplusplus
extern "C" {
#endif

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"

#ifdef __cplusplus
}
#endif

#pragma comment( lib, "avcodec.lib")
#pragma comment( lib, "avformat.lib")
#pragma comment( lib, "swscale.lib")
#pragma comment( lib, "avutil.lib")

class CMovieStream2
{
public:
CMovieStream2(void);
bool OpenAVI(LPCTSTR szFile);
void GrabAVIFrame();
int Update(DWORD milliseconds);
public:
virtual ~CMovieStream2(void);
public:
uint8_t *data;
int width;
int height;
int nwidth;
int nheight;
DWORD dwNext;
double dNext;
double dDur;
DWORD dwDur;
DWORD dwFrame;
DWORD dwFrameSys;
void SetZeroCount(void);
void CloseAVI(void);
bool bLoad;
bool bLoop;
bool bResize;
bool bActive;
void SetResize(void);
void ClearResize(void);
private:
LPSTR chFile;
int CreateContext();
void DestroyContext();
AVFormatContext *pFormatCtx;
int videoStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVFrame *pFrameRGB;
AVPacket packet;
AVStream* video_st;
struct SwsContext *img_convert_ctx;
int frameFinished;
int numBytes;
uint8_t *buffer;
double dTimeBase;
double dPts;
};




MovieStream2.cpp

#include "MovieStream2.h"

CMovieStream2::CMovieStream2(void)
: bLoad(false)
, bLoop(false)
, bResize(false)
{
bActive = true;
data = 0;
dwNext = 0;
dNext = 0;
width = 0;
height = 0;
nwidth = 0;
nheight = 0;
dwFrame = 0;
dwFrameSys = 0;
dPts = 0.0;
}

CMovieStream2::~CMovieStream2(void)
{
}

bool CMovieStream2::OpenAVI(LPCTSTR szFile)
{
UnicodeToAnsi(szFile, &chFile);
if(!CreateContext()) return 0;
pFrameRGB=avcodec_alloc_frame();
if(pFrameRGB==NULL)
{
printf("FFMPEG: Can't Alloc Frame! \"%s\"\n", chFile);
DestroyContext();
return 0;
}

nwidth = GetPowerOf2(pCodecCtx->width), nheight = GetPowerOf2(pCodecCtx->height);

numBytes=avpicture_get_size(PIX_FMT_RGB24, nwidth, nheight);
buffer=new uint8_t[numBytes];

avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, nwidth, nheight);

SetResize();
dwFrame=0;
dwFrameSys = 0;
dwNext=0;
dNext=0;
dPts=0;
bLoad = true;
bActive = true;
return true;
}

//#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)

void CMovieStream2::GrabAVIFrame()
{
if(!bActive) return;
int nDifFrame = dwFrame - dwFrameSys;
if(nDifFrame > 0)
{
//for(int k = 0; k < nDifFrame; k++)
int k = 0;
while(k < nDifFrame)
{
if(av_read_frame(pFormatCtx, &packet)<0)
{
bActive = false;
break;
}
if(packet.stream_index==videoStream)
{
dPts = 0;
// Decode video frame
avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
packet.data, packet.size);

if(packet.dts == AV_NOPTS_VALUE
&& pFrame->opaque && *(uint64_t*)pFrame->opaque != AV_NOPTS_VALUE)
{
dPts = double(*(uint64_t *)pFrame->opaque);
}
else if(packet.dts != AV_NOPTS_VALUE)
{
dPts = double(packet.dts);
}
else
{
// No se sabe el tiempo, entonces se asigna al tiempo actual
dPts = 0.0;
}
dPts *= av_q2d(video_st->time_base);
if(dPts == 0.0) dPts = dNext;

// Did we get a video frame?
if(frameFinished)
{
k++;
}
}
if(k != (nDifFrame-1)) av_free_packet(&packet);
}
if(!bActive)
{
if(bLoop) SetZeroCount();
return;
}
sws_scale (img_convert_ctx, pFrame->data, pFrame->linesize,
0, pCodecCtx->height,
pFrameRGB->data,pFrameRGB->linesize);
data = pFrameRGB->data[0];
av_free_packet(&packet);
}
}

int CMovieStream2::Update(DWORD milliseconds)
{
dwNext += milliseconds;
dNext = double(dwNext)/1000; // Convertir a segundos
dwFrame = DWORD(dNext/dTimeBase);
dwFrameSys = DWORD(dPts/dTimeBase);
return 0;
}
void CMovieStream2::SetZeroCount(void)
{
dwFrame=0;
dwFrameSys = 0;
dwNext=0;
dNext=0;
dPts=0;
DestroyContext();
CreateContext();
bActive = true;
}

void CMovieStream2::CloseAVI(void)
{
if(bLoad)
{
DestroyContext();
delete [] buffer;
if(pFrameRGB != NULL) av_free(pFrameRGB);
}
bLoad = false;
if(bResize) ClearResize();
}

void CMovieStream2::SetResize(void)
{
static int sws_flags = SWS_BICUBIC;
img_convert_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
nwidth,
nheight,
PIX_FMT_RGB24,
sws_flags, NULL, NULL, NULL);
bResize = true;
}

void CMovieStream2::ClearResize(void)
{
sws_freeContext(img_convert_ctx);
bResize = false;
}

// ** Funciones privadas
int CMovieStream2::CreateContext(void)
{
if(av_open_input_file(&pFormatCtx, chFile, NULL, 0, NULL)!=0)
{
printf("FFMPEG: Can't open file! \"%s\"\n", chFile);
return 0;
}

if(av_find_stream_info(pFormatCtx)<0)
{
printf("FFMPEG: Couldn't find stream information! \"%s\"\n", chFile);
return 0;
}

dump_format(pFormatCtx, 0, chFile, false);
videoStream=-1;
int i;
for(i=0; i<int(pFormatCtx->nb_streams); i++)
if(pFormatCtx->streams->codec->codec_type==CODEC_TYPE_VIDEO)
{
videoStream=i;
break;
}
if(videoStream==-1)
{
printf("FFMPEG: Didn't find a video stream! \"%s\"\n", chFile);
return 0;
}

// Get a pointer to the codec context for the video stream
video_st=pFormatCtx->streams[videoStream];
pCodecCtx=pFormatCtx->streams[videoStream]->codec;

pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{
printf("FFMPEG: Codec not found! \"%s\"\n", chFile);
return 0;
}

if(avcodec_open(pCodecCtx, pCodec)<0)
{
printf("FFMPEG: Could not open codec! \"%s\"\n", chFile);
return 0;
}

pFrame=avcodec_alloc_frame();

dTimeBase = av_q2d(video_st->time_base);
dDur = double(video_st->duration)*dTimeBase;
dwDur = DWORD(dDur*1000);

GrabAVIFrame();

return 1;
}

void CMovieStream2::DestroyContext(void)
{
av_free(pFrame);
avcodec_close(pCodecCtx);
av_close_input_file(pFormatCtx);
}




And... yeah! You must call "av_register_all" before all! in main() function

Bye!

Share this post


Link to post
Share on other sites
Does anyone know anything about useing audioqueues with ffmpeg on the iphone,
When I try to create a queue I get an error.

Video continues to play and is simply perfect.

ret = avcodec_open(enc, codec);

if (ret < 0) {

NSLog(@"Error: Could not open video decoder: %d", ret);

av_close_input_file(avfContext);

return;

}


if (audio_index >= 0) {

AudioStreamBasicDescription audioFormat;

audioFormat.mFormatID = -1;

audioFormat.mSampleRate = avfContext->streams[audio_index]->codec->sample_rate;

audioFormat.mFormatFlags = 0;

switch (avfContext->streams[audio_index]->codec->codec_id) {

case CODEC_ID_MP3:

audioFormat.mFormatID = kAudioFormatMPEGLayer3;

break;

case CODEC_ID_AAC:

audioFormat.mFormatID = kAudioFormatMPEG4AAC;

audioFormat.mFormatFlags = kMPEG4Object_AAC_Main;

break;

case CODEC_ID_AC3:

audioFormat.mFormatID = kAudioFormatAC3;

break;

default:

break;

}


if (audioFormat.mFormatID != -1) {

audioFormat.mBytesPerPacket = 0;

audioFormat.mFramesPerPacket = avfContext->streams[audio_index]->codec->frame_size;

audioFormat.mBytesPerFrame = 0;

audioFormat.mChannelsPerFrame = avfContext->streams[audio_index]->codec->channels;

audioFormat.mBitsPerChannel = 0;


if (ret = AudioQueueNewOutput(&audioFormat, audioQueueOutputCallback, self, NULL, NULL, 0, &audioQueue)) {

NSLog(@"Error creating audio output queue: %d", ret);

avfContext->streams[audio_index]->discard = AVDISCARD_ALL;

audio_index = -1;

}

else {

for (i = 0; i < AUDIO_BUFFER_QUANTITY; i++) {

NSLog(@"%d packet capacity, %d byte capacity", (int)(avfContext->streams[audio_index]->codec->sample_rate * AUDIO_BUFFER_SECONDS / avfContext->streams[audio_index]->codec->frame_size + 1), (int)(avfContext->streams[audio_index]->codec->bit_rate * AUDIO_BUFFER_SECONDS / 8));

if (ret = AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, avfContext->streams[audio_index]->codec->bit_rate * AUDIO_BUFFER_SECONDS / 8, avfContext->streams[audio_index]->codec->sample_rate * AUDIO_BUFFER_SECONDS / avfContext->streams[audio_index]->codec->frame_size + 1, audioBuffers + i)) {

NSLog(@"Error: Could not allocate audio queue buffer: %d", ret);

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement