Commit 5f8413a3 authored by Christophe Courtaut's avatar Christophe Courtaut Committed by Hugo Beauzee-Luyssen

Use smem pts difference between block for imem pts

parent 8b6a97ec
...@@ -45,6 +45,7 @@ VideoFrame::VideoFrame(VideoFrame const & tocopy) : QSharedData( tocopy ) ...@@ -45,6 +45,7 @@ VideoFrame::VideoFrame(VideoFrame const & tocopy) : QSharedData( tocopy )
nboctets = tocopy.nboctets; nboctets = tocopy.nboctets;
nbpixels = tocopy.nboctets / Pixel::NbComposantes; nbpixels = tocopy.nboctets / Pixel::NbComposantes;
ptsDiff = tocopy.ptsDiff;
frame.octets = new quint8[tocopy.nboctets]; frame.octets = new quint8[tocopy.nboctets];
for ( i = 0; i < nboctets; ++i ) for ( i = 0; i < nboctets; ++i )
...@@ -54,6 +55,7 @@ VideoFrame::VideoFrame(VideoFrame const & tocopy) : QSharedData( tocopy ) ...@@ -54,6 +55,7 @@ VideoFrame::VideoFrame(VideoFrame const & tocopy) : QSharedData( tocopy )
{ {
nboctets = 0; nboctets = 0;
nbpixels = 0; nbpixels = 0;
ptsDiff = 0;
frame.octets = NULL; frame.octets = NULL;
} }
} }
...@@ -85,6 +87,7 @@ LightVideoFrame::LightVideoFrame(quint32 nboctets) ...@@ -85,6 +87,7 @@ LightVideoFrame::LightVideoFrame(quint32 nboctets)
m_videoFrame->nboctets = nboctets; m_videoFrame->nboctets = nboctets;
m_videoFrame->nbpixels = nboctets / Pixel::NbComposantes; m_videoFrame->nbpixels = nboctets / Pixel::NbComposantes;
m_videoFrame->frame.octets = new quint8[nboctets]; m_videoFrame->frame.octets = new quint8[nboctets];
m_videoFrame->ptsDiff = 0;
}; };
LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets) LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets)
...@@ -95,6 +98,7 @@ LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets) ...@@ -95,6 +98,7 @@ LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets)
m_videoFrame->nboctets = nboctets; m_videoFrame->nboctets = nboctets;
m_videoFrame->nbpixels = nboctets / Pixel::NbComposantes; m_videoFrame->nbpixels = nboctets / Pixel::NbComposantes;
m_videoFrame->frame.octets = new quint8[nboctets]; m_videoFrame->frame.octets = new quint8[nboctets];
m_videoFrame->ptsDiff = 0;
for ( i = 0; i < m_videoFrame->nboctets; ++i ) for ( i = 0; i < m_videoFrame->nboctets; ++i )
m_videoFrame->frame.octets[i] = tocopy[i]; m_videoFrame->frame.octets[i] = tocopy[i];
...@@ -103,7 +107,7 @@ LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets) ...@@ -103,7 +107,7 @@ LightVideoFrame::LightVideoFrame(quint8 const * tocopy, quint32 nboctets)
LightVideoFrame::~LightVideoFrame() LightVideoFrame::~LightVideoFrame()
{ {
}; };
VideoFrame const * LightVideoFrame::operator->(void) const VideoFrame const * LightVideoFrame::operator->(void) const
{ {
return ( m_videoFrame.data() ); return ( m_videoFrame.data() );
......
...@@ -58,6 +58,7 @@ struct VideoFrame : public QSharedData ...@@ -58,6 +58,7 @@ struct VideoFrame : public QSharedData
RawVideoFrame frame; RawVideoFrame frame;
quint32 nbpixels; quint32 nbpixels;
quint32 nboctets; quint32 nboctets;
quint64 ptsDiff;
}; };
class LightVideoFrame class LightVideoFrame
...@@ -75,7 +76,7 @@ public: ...@@ -75,7 +76,7 @@ public:
VideoFrame const & operator*(void) const; VideoFrame const & operator*(void) const;
VideoFrame* operator->(void); VideoFrame* operator->(void);
VideoFrame& operator*(void); VideoFrame& operator*(void);
private: private:
QSharedDataPointer<VideoFrame> m_videoFrame; QSharedDataPointer<VideoFrame> m_videoFrame;
......
...@@ -44,6 +44,7 @@ WorkflowRenderer::WorkflowRenderer() : ...@@ -44,6 +44,7 @@ WorkflowRenderer::WorkflowRenderer() :
m_waitCond = new QWaitCondition; m_waitCond = new QWaitCondition;
m_renderVideoFrame = new unsigned char[VIDEOHEIGHT * VIDEOWIDTH * Pixel::NbComposantes]; m_renderVideoFrame = new unsigned char[VIDEOHEIGHT * VIDEOWIDTH * Pixel::NbComposantes];
m_videoEsHandler = new EsHandler; m_videoEsHandler = new EsHandler;
m_videoEsHandler->self = this; m_videoEsHandler->self = this;
m_videoEsHandler->type = Video; m_videoEsHandler->type = Video;
...@@ -52,7 +53,7 @@ WorkflowRenderer::WorkflowRenderer() : ...@@ -52,7 +53,7 @@ WorkflowRenderer::WorkflowRenderer() :
m_audioEsHandler->self = this; m_audioEsHandler->self = this;
m_audioEsHandler->type = Audio; m_audioEsHandler->type = Audio;
m_nbChannels = 1; m_nbChannels = 2;
m_rate = 48000; m_rate = 48000;
sprintf( videoString, "width=%i:height=%i:dar=%s:fps=%s:data=%lld:codec=%s:cat=2:caching=0", sprintf( videoString, "width=%i:height=%i:dar=%s:fps=%s:data=%lld:codec=%s:cat=2:caching=0",
...@@ -122,22 +123,26 @@ int WorkflowRenderer::lock( void *datas, int64_t *dts, int64_t *pts, unsigne ...@@ -122,22 +123,26 @@ int WorkflowRenderer::lock( void *datas, int64_t *dts, int64_t *pts, unsigne
int WorkflowRenderer::lockVideo( WorkflowRenderer* self, int64_t *pts, size_t *bufferSize, void **buffer ) int WorkflowRenderer::lockVideo( WorkflowRenderer* self, int64_t *pts, size_t *bufferSize, void **buffer )
{ {
quint64 ptsDiff = 0;
if ( self->m_stopping == false ) if ( self->m_stopping == false )
{ {
MainWorkflow::OutputBuffers* ret = self->m_mainWorkflow->getSynchroneOutput( MainWorkflow::VideoTrack ); MainWorkflow::OutputBuffers* ret = self->m_mainWorkflow->getSynchroneOutput( MainWorkflow::VideoTrack );
memcpy( self->m_renderVideoFrame, (*(ret->video))->frame.octets, (*(ret->video))->nboctets ); memcpy( self->m_renderVideoFrame, (*(ret->video))->frame.octets, (*(ret->video))->nboctets );
self->m_videoBuffSize = (*(ret->video))->nboctets; self->m_videoBuffSize = (*(ret->video))->nboctets;
ptsDiff = (*(ret->video))->ptsDiff;
} }
*pts = qRound64( (float)( self->m_pts * 1000000.0f ) / self->m_outputFps ); self->m_pts = *pts = ptsDiff + self->m_pts;
++self->m_pts; //qDebug() << "Video pts" << self->m_pts << "diff" << ptsDiff;
//*pts = qRound64( (float)( self->m_pts * 1000000.0f ) / self->m_outputFps );
//++self->m_pts;
*buffer = self->m_renderVideoFrame; *buffer = self->m_renderVideoFrame;
*bufferSize = self->m_videoBuffSize; *bufferSize = self->m_videoBuffSize;
return 0; return 0;
} }
int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size_t *bufferSize, void **buffer ) int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size_t *bufferSize, void **buffer )
{ {
quint64 ptsDiff;
if ( self->m_paused == true ) if ( self->m_paused == true )
return 1; return 1;
if ( self->m_stopping == false ) if ( self->m_stopping == false )
...@@ -151,6 +156,7 @@ int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size ...@@ -151,6 +156,7 @@ int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size
nbSample = self->m_renderAudioSample->nbSample; nbSample = self->m_renderAudioSample->nbSample;
*buffer = self->m_renderAudioSample->buff; *buffer = self->m_renderAudioSample->buff;
*bufferSize = self->m_renderAudioSample->size; *bufferSize = self->m_renderAudioSample->size;
ptsDiff = self->m_renderAudioSample->ptsDiff;
} }
else else
{ {
...@@ -163,9 +169,12 @@ int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size ...@@ -163,9 +169,12 @@ int WorkflowRenderer::lockAudio( WorkflowRenderer* self, int64_t *pts, size
memset( WorkflowRenderer::m_silencedAudioBuffer, 0, buffSize ); memset( WorkflowRenderer::m_silencedAudioBuffer, 0, buffSize );
*buffer = WorkflowRenderer::m_silencedAudioBuffer; *buffer = WorkflowRenderer::m_silencedAudioBuffer;
*bufferSize = buffSize; *bufferSize = buffSize;
ptsDiff = self->m_pts - self->m_audioPts;
} }
*pts = self->m_audioPts * 1000000.0f / self->m_rate; self->m_audioPts = *pts = self->m_audioPts + ptsDiff;
self->m_audioPts += nbSample * self->m_nbChannels; //qDebug() << "Audio pts" << self->m_audioPts << "diff" << ptsDiff;
//*pts = self->m_audioPts * 1000000.0f / self->m_rate;
//self->m_audioPts += nbSample * self->m_nbChannels;
return 0; return 0;
} }
......
...@@ -92,7 +92,7 @@ void AudioClipWorkflow::lock( AudioClipWorkflow* cw, uint8_t** pcm_buffer ...@@ -92,7 +92,7 @@ void AudioClipWorkflow::lock( AudioClipWorkflow* cw, uint8_t** pcm_buffer
void AudioClipWorkflow::unlock( AudioClipWorkflow* cw, uint8_t* pcm_buffer, void AudioClipWorkflow::unlock( AudioClipWorkflow* cw, uint8_t* pcm_buffer,
unsigned int channels, unsigned int rate, unsigned int channels, unsigned int rate,
unsigned int nb_samples, unsigned int bits_per_sample, unsigned int nb_samples, unsigned int bits_per_sample,
unsigned int size, qint64 pts ) unsigned int size, quint64 pts )
{ {
// qDebug() << "pts:" << pts << "nb channels" << channels << "rate:" << rate << // qDebug() << "pts:" << pts << "nb channels" << channels << "rate:" << rate <<
// "size:" << size << "nb_samples:" << nb_samples; // "size:" << size << "nb_samples:" << nb_samples;
...@@ -100,12 +100,17 @@ void AudioClipWorkflow::unlock( AudioClipWorkflow* cw, uint8_t* pcm_buffe ...@@ -100,12 +100,17 @@ void AudioClipWorkflow::unlock( AudioClipWorkflow* cw, uint8_t* pcm_buffe
Q_UNUSED( rate ); Q_UNUSED( rate );
Q_UNUSED( bits_per_sample ); Q_UNUSED( bits_per_sample );
Q_UNUSED( size ); Q_UNUSED( size );
Q_UNUSED( pts ); static quint64 previous_pts = pts;
static quint64 current_pts = pts;
//Q_UNUSED( pts );
previous_pts = current_pts;
current_pts = pts;
if ( cw->m_buffer->buff != NULL ) if ( cw->m_buffer->buff != NULL )
{ {
cw->m_buffer->nbSample = nb_samples; cw->m_buffer->nbSample = nb_samples;
cw->m_buffer->nbChannels = channels; cw->m_buffer->nbChannels = channels;
cw->m_buffer->ptsDiff = current_pts - previous_pts;
} }
cw->m_renderLock->unlock(); cw->m_renderLock->unlock();
......
...@@ -34,6 +34,7 @@ class AudioClipWorkflow : public ClipWorkflow ...@@ -34,6 +34,7 @@ class AudioClipWorkflow : public ClipWorkflow
size_t size; size_t size;
unsigned int nbSample; unsigned int nbSample;
unsigned int nbChannels; unsigned int nbChannels;
quint64 ptsDiff;
}; };
AudioClipWorkflow( Clip* clip ); AudioClipWorkflow( Clip* clip );
~AudioClipWorkflow(); ~AudioClipWorkflow();
...@@ -49,7 +50,7 @@ class AudioClipWorkflow : public ClipWorkflow ...@@ -49,7 +50,7 @@ class AudioClipWorkflow : public ClipWorkflow
static void unlock( AudioClipWorkflow* clipWorkflow, uint8_t* pcm_buffer, static void unlock( AudioClipWorkflow* clipWorkflow, uint8_t* pcm_buffer,
unsigned int channels, unsigned int rate, unsigned int channels, unsigned int rate,
unsigned int nb_samples, unsigned int bits_per_sample, unsigned int nb_samples, unsigned int bits_per_sample,
unsigned int size, qint64 pts ); unsigned int size, quint64 pts );
}; };
#endif // AUDIOCLIPWORKFLOW_H #endif // AUDIOCLIPWORKFLOW_H
...@@ -99,23 +99,30 @@ void VideoClipWorkflow::lock( VideoClipWorkflow* cw, void** pp_ret, int size ...@@ -99,23 +99,30 @@ void VideoClipWorkflow::lock( VideoClipWorkflow* cw, void** pp_ret, int size
{ {
Q_UNUSED( size ); Q_UNUSED( size );
cw->m_renderLock->lock(); cw->m_renderLock->lock();
*pp_ret = (*(cw->m_buffer))->frame.pixels; *pp_ret = (*(cw->m_buffer))->frame.octets;
} }
void VideoClipWorkflow::unlock( VideoClipWorkflow* cw, void* buffer, int width, int height, int bpp, int size ) void VideoClipWorkflow::unlock( VideoClipWorkflow* cw, void* buffer, int width, int height, int bpp, int size, quint64 pts )
{ {
Q_UNUSED( buffer ); Q_UNUSED( buffer );
Q_UNUSED( width ); Q_UNUSED( width );
Q_UNUSED( height ); Q_UNUSED( height );
Q_UNUSED( bpp ); Q_UNUSED( bpp );
Q_UNUSED( size ); Q_UNUSED( size );
static quint64 previous_pts = pts;
static quint64 current_pts = pts;
cw->m_renderLock->unlock(); cw->m_renderLock->unlock();
cw->m_stateLock->lockForWrite(); cw->m_stateLock->lockForWrite();
previous_pts = current_pts;
current_pts = pts;
if ( cw->m_state == Rendering ) if ( cw->m_state == Rendering )
{ {
QMutexLocker lock( cw->m_condMutex ); QMutexLocker lock( cw->m_condMutex );
(*(cw->m_buffer))->ptsDiff = current_pts - previous_pts;
cw->m_state = Sleeping; cw->m_state = Sleeping;
cw->m_stateLock->unlock(); cw->m_stateLock->unlock();
...@@ -123,6 +130,7 @@ void VideoClipWorkflow::unlock( VideoClipWorkflow* cw, void* buffer, int widt ...@@ -123,6 +130,7 @@ void VideoClipWorkflow::unlock( VideoClipWorkflow* cw, void* buffer, int widt
QMutexLocker lock2( cw->m_renderWaitCond->getMutex() ); QMutexLocker lock2( cw->m_renderWaitCond->getMutex() );
cw->m_renderWaitCond->wake(); cw->m_renderWaitCond->wake();
} }
cw->emit renderComplete( cw ); cw->emit renderComplete( cw );
// qDebug() << "Emmiting render completed"; // qDebug() << "Emmiting render completed";
......
...@@ -39,7 +39,7 @@ class VideoClipWorkflow : public ClipWorkflow ...@@ -39,7 +39,7 @@ class VideoClipWorkflow : public ClipWorkflow
LightVideoFrame* m_buffer; LightVideoFrame* m_buffer;
virtual void initVlcOutput(); virtual void initVlcOutput();
static void lock( VideoClipWorkflow* clipWorkflow, void** pp_ret, int size ); static void lock( VideoClipWorkflow* clipWorkflow, void** pp_ret, int size );
static void unlock( VideoClipWorkflow* clipWorkflow, void* buffer, int width, int height, int bpp, int size ); static void unlock( VideoClipWorkflow* clipWorkflow, void* buffer, int width, int height, int bpp, int size, quint64 pts );
}; };
#endif // VIDEOCLIPWORKFLOW_H #endif // VIDEOCLIPWORKFLOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment