Podcast recording on OSX fixed. Details:

The threading logic was changed somewhat. UBQuickTimeFile's run()
function no longer handles enqueuing the video/audio samples in a while
loop. Instead, it runs once, and uses Apple's Dispatch Queues to handle
enqueuing samples.
One dispatch queue was thus added for each input to the AssetWriter.
Each input is associated to one queue, and the requestMediaDataWhenReady
function insures that the inputs go and fetch any available samples when
they are able to write them.

As tested (for short podcasts, repeatedly), this solves all the problems
encountered earlier, such as the program hanging due to one input not
being ready, or corrupt files due (presumably) to missing samples.
preferencesAboutTextFull
Craig Watson 9 years ago
parent fc554baecd
commit 427cdbb68d
  1. 3
      src/podcast/quicktime/UBAudioQueueRecorder.cpp
  2. 15
      src/podcast/quicktime/UBQuickTimeFile.h
  3. 123
      src/podcast/quicktime/UBQuickTimeFile.mm
  4. 5
      src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp

@ -257,7 +257,8 @@ bool UBAudioQueueRecorder::init(const QString& waveInDeviceName)
int nbBuffers = 6; int nbBuffers = 6;
mSampleBufferSize = sAudioFormat.mSampleRate * sAudioFormat.mChannelsPerFrame mSampleBufferSize = sAudioFormat.mSampleRate * sAudioFormat.mChannelsPerFrame
* sAudioFormat.mChannelsPerFrame * mBufferLengthInMs / 1000; // 44.1 Khz * stereo * 16bit * buffer length * sAudioFormat.mBitsPerChannel / 8 * mBufferLengthInMs / 1000;
// BufferSize [bytes] = Length [s] * 44100 frames per second [Fr./s] * channels per frame [Ch./Fr.] * bytes per channel [bytes/Ch.]
for (int i = 0; i < nbBuffers; i++) for (int i = 0; i < nbBuffers; i++)
{ {

@ -89,12 +89,13 @@ class UBQuickTimeFile : public QThread
signals: signals:
void audioLevelChanged(quint8 level); void audioLevelChanged(quint8 level);
void compressionSessionStarted(); void compressionSessionStarted();
void compressionFinished();
protected: protected:
void run(); void run();
private slots: private slots:
void appendAudioBuffer(void* pBuffer, long pLength); void enqueueAudioBuffer(void* pBuffer, long pLength);
private: private:
@ -102,7 +103,7 @@ class UBQuickTimeFile : public QThread
void setLastErrorMessage(const QString& error); void setLastErrorMessage(const QString& error);
void appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp); void appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp);
bool appendSampleBuffer(CMSampleBufferRef sampleBuffer);
QSize mFrameSize; QSize mFrameSize;
QString mVideoFileName; QString mVideoFileName;
@ -125,12 +126,16 @@ class UBQuickTimeFile : public QThread
volatile bool mShouldStopCompression; volatile bool mShouldStopCompression;
volatile bool mCompressionSessionRunning; volatile bool mCompressionSessionRunning;
QString mLastErrorMessage; QString mLastErrorMessage;
QString mAudioRecordingDeviceName; QString mAudioRecordingDeviceName;
dispatch_queue_t mVideoDispatchQueue;
dispatch_queue_t mAudioDispatchQueue;
static QQueue<CMSampleBufferRef> audioQueue;
static QMutex audioQueueMutex;
static QMutex audioWriterMutex;
}; };
#endif /* UBQUICKTIMEFILE_H_ */ #endif /* UBQUICKTIMEFILE_H_ */

@ -41,16 +41,25 @@ QQueue<UBQuickTimeFile::VideoFrame> UBQuickTimeFile::frameQueue;
QMutex UBQuickTimeFile::frameQueueMutex; QMutex UBQuickTimeFile::frameQueueMutex;
QWaitCondition UBQuickTimeFile::frameBufferNotEmpty; QWaitCondition UBQuickTimeFile::frameBufferNotEmpty;
QQueue<CMSampleBufferRef> UBQuickTimeFile::audioQueue;
QMutex UBQuickTimeFile::audioQueueMutex;
QMutex UBQuickTimeFile::audioWriterMutex;
UBQuickTimeFile::UBQuickTimeFile(QObject * pParent) UBQuickTimeFile::UBQuickTimeFile(QObject * pParent)
: QThread(pParent) : QThread(pParent)
, mVideoWriter(0) , mVideoWriter(0)
, mVideoWriterInput(0) , mVideoWriterInput(0)
, mAdaptor(0) , mAdaptor(0)
, mAudioWriterInput(0)
, mWaveRecorder(0)
, mTimeScale(1000) , mTimeScale(1000)
, mRecordAudio(true) , mRecordAudio(true)
, mShouldStopCompression(false) , mShouldStopCompression(false)
, mCompressionSessionRunning(false) , mCompressionSessionRunning(false)
{ {
mVideoDispatchQueue = dispatch_queue_create("org.oef.VideoDispatchQueue", NULL);
mAudioDispatchQueue = dispatch_queue_create("org.oef.AudioDispatchQueue", NULL);
} }
@ -66,6 +75,7 @@ bool UBQuickTimeFile::init(const QString& pVideoFileName, const QString& pProfil
mFrameSize = pFrameSize; mFrameSize = pFrameSize;
mVideoFileName = pVideoFileName; mVideoFileName = pVideoFileName;
mRecordAudio = pRecordAudio; mRecordAudio = pRecordAudio;
//mRecordAudio = false;
if (mRecordAudio) if (mRecordAudio)
mAudioRecordingDeviceName = audioRecordingDevice; mAudioRecordingDeviceName = audioRecordingDevice;
@ -90,33 +100,36 @@ void UBQuickTimeFile::run()
mCompressionSessionRunning = true; mCompressionSessionRunning = true;
emit compressionSessionStarted(); emit compressionSessionStarted();
do { [mVideoWriterInput requestMediaDataWhenReadyOnQueue:mVideoDispatchQueue
// Video usingBlock:^{
frameQueueMutex.lock(); frameQueueMutex.lock();
//frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex); // TODO: monitor performance with and without this
frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex); if (!mShouldStopCompression &&
!frameQueue.isEmpty() &&
if (!frameQueue.isEmpty()) { [mVideoWriterInput isReadyForMoreMediaData])
QQueue<VideoFrame> localQueue = frameQueue; {
frameQueue.clear(); VideoFrame frame = frameQueue.dequeue();
frameQueueMutex.unlock();
while (!localQueue.isEmpty()) {
if ([mVideoWriterInput isReadyForMoreMediaData]) {
VideoFrame frame = localQueue.dequeue();
appendVideoFrame(frame.buffer, frame.timestamp); appendVideoFrame(frame.buffer, frame.timestamp);
} }
else
usleep(10000);
}
}
else
frameQueueMutex.unlock(); frameQueueMutex.unlock();
} while(!mShouldStopCompression); }];
endSession(); if (mRecordAudio) {
[mAudioWriterInput requestMediaDataWhenReadyOnQueue:mAudioDispatchQueue
usingBlock:^{
audioQueueMutex.lock();
if (!audioQueue.isEmpty() &&
[mAudioWriterInput isReadyForMoreMediaData])
{
appendSampleBuffer(audioQueue.dequeue());
}
audioQueueMutex.unlock();
}];
}
} }
@ -197,7 +210,7 @@ bool UBQuickTimeFile::beginSession()
if(mWaveRecorder->init(mAudioRecordingDeviceName)) { if(mWaveRecorder->init(mAudioRecordingDeviceName)) {
connect(mWaveRecorder, &UBAudioQueueRecorder::newWaveBuffer, connect(mWaveRecorder, &UBAudioQueueRecorder::newWaveBuffer,
this, &UBQuickTimeFile::appendAudioBuffer); this, &UBQuickTimeFile::enqueueAudioBuffer);
connect(mWaveRecorder, SIGNAL(audioLevelChanged(quint8)), connect(mWaveRecorder, SIGNAL(audioLevelChanged(quint8)),
this, SIGNAL(audioLevelChanged(quint8))); this, SIGNAL(audioLevelChanged(quint8)));
@ -223,7 +236,7 @@ bool UBQuickTimeFile::beginSession()
AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC],
AVEncoderBitRateKey : [NSNumber numberWithInteger:128000], AVEncoderBitRateKey : [NSNumber numberWithInteger:128000],
AVSampleRateKey : [NSNumber numberWithInteger:44100], AVSampleRateKey : [NSNumber numberWithInteger:44100],
AVChannelLayoutKey : channelLayoutAsData, //AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:1] AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:1]
}; };
@ -232,8 +245,6 @@ bool UBQuickTimeFile::beginSession()
NSCParameterAssert([mVideoWriter canAddInput:mAudioWriterInput]); NSCParameterAssert([mVideoWriter canAddInput:mAudioWriterInput]);
[mVideoWriter addInput:mAudioWriterInput]; [mVideoWriter addInput:mAudioWriterInput];
qDebug() << "audio writer input created and added";
} }
@ -252,14 +263,20 @@ bool UBQuickTimeFile::beginSession()
*/ */
void UBQuickTimeFile::endSession() void UBQuickTimeFile::endSession()
{ {
//qDebug() << "Ending session";
[mVideoWriterInput markAsFinished]; [mVideoWriterInput markAsFinished];
[mVideoWriter finishWritingWithCompletionHandler:^{}];
[mVideoWriter finishWritingWithCompletionHandler:^{
[mAdaptor release]; [mAdaptor release];
[mVideoWriterInput release]; [mVideoWriterInput release];
[mVideoWriter release];
if (mAudioWriterInput != 0)
[mAudioWriterInput release]; [mAudioWriterInput release];
[mVideoWriter release];
mAdaptor = nil; mAdaptor = nil;
mVideoWriterInput = nil; mVideoWriterInput = nil;
mVideoWriter = nil; mVideoWriter = nil;
@ -270,6 +287,10 @@ void UBQuickTimeFile::endSession()
mWaveRecorder->deleteLater(); mWaveRecorder->deleteLater();
} }
emit compressionFinished();
}];
} }
/** /**
@ -277,12 +298,22 @@ void UBQuickTimeFile::endSession()
*/ */
void UBQuickTimeFile::stop() void UBQuickTimeFile::stop()
{ {
//qDebug() << "requested end of recording";
mShouldStopCompression = true; mShouldStopCompression = true;
frameQueueMutex.lock();
audioQueueMutex.lock();
endSession();
frameQueueMutex.unlock();
audioQueueMutex.unlock();
} }
/** /**
* \brief Create a CVPixelBufferRef from the input adaptor's CVPixelBufferPool * \brief Create and return a CVPixelBufferRef
*
* The CVPixelBuffer is created from the input adaptor's CVPixelBufferPool
*/ */
CVPixelBufferRef UBQuickTimeFile::newPixelBuffer() CVPixelBufferRef UBQuickTimeFile::newPixelBuffer()
{ {
@ -305,6 +336,7 @@ CVPixelBufferRef UBQuickTimeFile::newPixelBuffer()
*/ */
void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp) void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp)
{ {
//qDebug() << "appending video frame";
CMTime t = CMTimeMake((msTimeStamp * mTimeScale / 1000.0), mTimeScale); CMTime t = CMTimeMake((msTimeStamp * mTimeScale / 1000.0), mTimeScale);
bool added = [mAdaptor appendPixelBuffer: pixelBuffer bool added = [mAdaptor appendPixelBuffer: pixelBuffer
@ -329,10 +361,11 @@ void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTime
* (implemented in the UBAudioQueueRecorder class) and the recording, handled * (implemented in the UBAudioQueueRecorder class) and the recording, handled
* by the AVAssetWriterInput instance mAudioWriterInput. * by the AVAssetWriterInput instance mAudioWriterInput.
*/ */
void UBQuickTimeFile::appendAudioBuffer(void* pBuffer, void UBQuickTimeFile::enqueueAudioBuffer(void* pBuffer,
long pLength) long pLength)
{ {
if(!mRecordAudio)
if(!mRecordAudio || mShouldStopCompression)
return; return;
@ -370,31 +403,37 @@ void UBQuickTimeFile::appendAudioBuffer(void* pBuffer,
NULL, NULL,
&sampleBuffer); &sampleBuffer);
//qDebug() << "enqueueAudioBuffer, timeStamp = " << timeStamp.value << " / " << timeStamp.timescale;
audioQueueMutex.lock();
audioQueue.enqueue(sampleBuffer);
audioQueueMutex.unlock();
//qDebug() << "buffer enqueued";
// Wait until the AssetWriterInput is ready, but no more than 100ms
// (bit of a duct tape solution; cleaner solution would be to use a QQueue,
// similar to the VideoWriter)
int waitTime = 0;
while(![mAudioWriterInput isReadyForMoreMediaData] && waitTime < 100) {
waitTime += 10;
usleep(10000);
} }
if ([mAudioWriterInput isReadyForMoreMediaData]) {
if(![mAudioWriterInput appendSampleBuffer:sampleBuffer]) bool UBQuickTimeFile::appendSampleBuffer(CMSampleBufferRef sampleBuffer)
{
bool success = [mAudioWriterInput appendSampleBuffer:sampleBuffer];
if (!success)
setLastErrorMessage(QString("Failed to append sample buffer to audio input")); setLastErrorMessage(QString("Failed to append sample buffer to audio input"));
}
else
setLastErrorMessage(QString("AudioWriterInput not ready. Buffer dropped."));
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
CFRelease(sampleBuffer); CFRelease(sampleBuffer);
CFRelease(blockBuffer); CFRelease(blockBuffer);
// The audioQueueBuffers are all freed when UBAudioQueueRecorder::close() is called return success;
} }
/** /**
* \brief Print an error message to the terminal, and store it * \brief Print an error message to the terminal, and store it
*/ */

@ -68,7 +68,7 @@ bool UBQuickTimeVideoEncoder::start()
return false; return false;
} }
connect(&mQuickTimeCompressionSession, SIGNAL(finished()), this, SLOT(compressionFinished())); connect(&mQuickTimeCompressionSession, SIGNAL(compressionFinished()), this, SLOT(compressionFinished()));
connect(&mQuickTimeCompressionSession, SIGNAL(audioLevelChanged(quint8)), this, SIGNAL(audioLevelChanged(quint8))); connect(&mQuickTimeCompressionSession, SIGNAL(audioLevelChanged(quint8)), this, SIGNAL(audioLevelChanged(quint8)));
mQuickTimeCompressionSession.start(); mQuickTimeCompressionSession.start();
@ -78,11 +78,8 @@ bool UBQuickTimeVideoEncoder::start()
bool UBQuickTimeVideoEncoder::stop() bool UBQuickTimeVideoEncoder::stop()
{
if (mQuickTimeCompressionSession.isRunning())
{ {
mQuickTimeCompressionSession.stop(); mQuickTimeCompressionSession.stop();
}
UBQuickTimeFile::frameBufferNotEmpty.wakeAll(); UBQuickTimeFile::frameBufferNotEmpty.wakeAll();

Loading…
Cancel
Save