From 427cdbb68d2453784adb99308b7752dc4ae3874c Mon Sep 17 00:00:00 2001 From: Craig Watson Date: Mon, 30 Nov 2015 10:55:40 +0100 Subject: [PATCH] Podcast recording on OSX fixed. Details: The threading logic was changed somewhat. UBQuickTimeFile's run() function no longer handles enqueuing the video/audio samples in a while loop. Instead, it runs once, and uses Apple's Dispatch Queues to handle enqueuing samples. One dispatch queue was thus added for each input to the AssetWriter. Each input is associated to one queue, and the requestMediaDataWhenReady function insures that the inputs go and fetch any available samples when they are able to write them. As tested (for short podcasts, repeatedly), this solves all the problems encountered earlier, such as the program hanging due to one input not being ready, or corrupt files due (presumably) to missing samples. --- .../quicktime/UBAudioQueueRecorder.cpp | 3 +- src/podcast/quicktime/UBQuickTimeFile.h | 15 +- src/podcast/quicktime/UBQuickTimeFile.mm | 153 +++++++++++------- .../quicktime/UBQuickTimeVideoEncoder.cpp | 7 +- 4 files changed, 110 insertions(+), 68 deletions(-) diff --git a/src/podcast/quicktime/UBAudioQueueRecorder.cpp b/src/podcast/quicktime/UBAudioQueueRecorder.cpp index b1354149..02e23117 100644 --- a/src/podcast/quicktime/UBAudioQueueRecorder.cpp +++ b/src/podcast/quicktime/UBAudioQueueRecorder.cpp @@ -257,7 +257,8 @@ bool UBAudioQueueRecorder::init(const QString& waveInDeviceName) int nbBuffers = 6; mSampleBufferSize = sAudioFormat.mSampleRate * sAudioFormat.mChannelsPerFrame - * sAudioFormat.mChannelsPerFrame * mBufferLengthInMs / 1000; // 44.1 Khz * stereo * 16bit * buffer length + * sAudioFormat.mBitsPerChannel / 8 * mBufferLengthInMs / 1000; + // BufferSize [bytes] = Length [s] * 44100 frames per second [Fr./s] * channels per frame [Ch./Fr.] * bytes per channel [bytes/Ch.] for (int i = 0; i < nbBuffers; i++) { diff --git a/src/podcast/quicktime/UBQuickTimeFile.h b/src/podcast/quicktime/UBQuickTimeFile.h index 358d2a2b..1ad5959f 100644 --- a/src/podcast/quicktime/UBQuickTimeFile.h +++ b/src/podcast/quicktime/UBQuickTimeFile.h @@ -89,12 +89,13 @@ class UBQuickTimeFile : public QThread signals: void audioLevelChanged(quint8 level); void compressionSessionStarted(); + void compressionFinished(); protected: void run(); private slots: - void appendAudioBuffer(void* pBuffer, long pLength); + void enqueueAudioBuffer(void* pBuffer, long pLength); private: @@ -102,7 +103,7 @@ class UBQuickTimeFile : public QThread void setLastErrorMessage(const QString& error); void appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp); - + bool appendSampleBuffer(CMSampleBufferRef sampleBuffer); QSize mFrameSize; QString mVideoFileName; @@ -125,12 +126,16 @@ class UBQuickTimeFile : public QThread volatile bool mShouldStopCompression; volatile bool mCompressionSessionRunning; - - QString mLastErrorMessage; - QString mAudioRecordingDeviceName; + dispatch_queue_t mVideoDispatchQueue; + dispatch_queue_t mAudioDispatchQueue; + + static QQueue audioQueue; + static QMutex audioQueueMutex; + + static QMutex audioWriterMutex; }; #endif /* UBQUICKTIMEFILE_H_ */ diff --git a/src/podcast/quicktime/UBQuickTimeFile.mm b/src/podcast/quicktime/UBQuickTimeFile.mm index 39aa6ac1..572c0c0a 100644 --- a/src/podcast/quicktime/UBQuickTimeFile.mm +++ b/src/podcast/quicktime/UBQuickTimeFile.mm @@ -41,16 +41,25 @@ QQueue UBQuickTimeFile::frameQueue; QMutex UBQuickTimeFile::frameQueueMutex; QWaitCondition UBQuickTimeFile::frameBufferNotEmpty; + +QQueue UBQuickTimeFile::audioQueue; +QMutex UBQuickTimeFile::audioQueueMutex; +QMutex UBQuickTimeFile::audioWriterMutex; + UBQuickTimeFile::UBQuickTimeFile(QObject * pParent) : QThread(pParent) , mVideoWriter(0) , mVideoWriterInput(0) , mAdaptor(0) + , mAudioWriterInput(0) + , mWaveRecorder(0) , mTimeScale(1000) , mRecordAudio(true) , mShouldStopCompression(false) , mCompressionSessionRunning(false) { + mVideoDispatchQueue = dispatch_queue_create("org.oef.VideoDispatchQueue", NULL); + mAudioDispatchQueue = dispatch_queue_create("org.oef.AudioDispatchQueue", NULL); } @@ -66,6 +75,7 @@ bool UBQuickTimeFile::init(const QString& pVideoFileName, const QString& pProfil mFrameSize = pFrameSize; mVideoFileName = pVideoFileName; mRecordAudio = pRecordAudio; + //mRecordAudio = false; if (mRecordAudio) mAudioRecordingDeviceName = audioRecordingDevice; @@ -90,33 +100,36 @@ void UBQuickTimeFile::run() mCompressionSessionRunning = true; emit compressionSessionStarted(); - do { - // Video - frameQueueMutex.lock(); - - frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex); + [mVideoWriterInput requestMediaDataWhenReadyOnQueue:mVideoDispatchQueue + usingBlock:^{ + frameQueueMutex.lock(); + //frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex); // TODO: monitor performance with and without this - if (!frameQueue.isEmpty()) { - QQueue localQueue = frameQueue; - frameQueue.clear(); - - frameQueueMutex.unlock(); - - while (!localQueue.isEmpty()) { - if ([mVideoWriterInput isReadyForMoreMediaData]) { - VideoFrame frame = localQueue.dequeue(); + if (!mShouldStopCompression && + !frameQueue.isEmpty() && + [mVideoWriterInput isReadyForMoreMediaData]) + { + VideoFrame frame = frameQueue.dequeue(); appendVideoFrame(frame.buffer, frame.timestamp); - } - else - usleep(10000); } - } - else - frameQueueMutex.unlock(); - } while(!mShouldStopCompression); + frameQueueMutex.unlock(); + + }]; + + if (mRecordAudio) { + [mAudioWriterInput requestMediaDataWhenReadyOnQueue:mAudioDispatchQueue + usingBlock:^{ + audioQueueMutex.lock(); + if (!audioQueue.isEmpty() && + [mAudioWriterInput isReadyForMoreMediaData]) + { + appendSampleBuffer(audioQueue.dequeue()); + } + audioQueueMutex.unlock(); - endSession(); + }]; + } } @@ -197,7 +210,7 @@ bool UBQuickTimeFile::beginSession() if(mWaveRecorder->init(mAudioRecordingDeviceName)) { connect(mWaveRecorder, &UBAudioQueueRecorder::newWaveBuffer, - this, &UBQuickTimeFile::appendAudioBuffer); + this, &UBQuickTimeFile::enqueueAudioBuffer); connect(mWaveRecorder, SIGNAL(audioLevelChanged(quint8)), this, SIGNAL(audioLevelChanged(quint8))); @@ -223,7 +236,7 @@ bool UBQuickTimeFile::beginSession() AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVEncoderBitRateKey : [NSNumber numberWithInteger:128000], AVSampleRateKey : [NSNumber numberWithInteger:44100], - AVChannelLayoutKey : channelLayoutAsData, + //AVChannelLayoutKey : channelLayoutAsData, AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:1] }; @@ -232,8 +245,6 @@ bool UBQuickTimeFile::beginSession() NSCParameterAssert([mVideoWriter canAddInput:mAudioWriterInput]); [mVideoWriter addInput:mAudioWriterInput]; - - qDebug() << "audio writer input created and added"; } @@ -252,23 +263,33 @@ bool UBQuickTimeFile::beginSession() */ void UBQuickTimeFile::endSession() { + //qDebug() << "Ending session"; + + [mVideoWriterInput markAsFinished]; - [mVideoWriter finishWritingWithCompletionHandler:^{}]; - [mAdaptor release]; - [mVideoWriterInput release]; - [mVideoWriter release]; - [mAudioWriterInput release]; + [mVideoWriter finishWritingWithCompletionHandler:^{ + [mAdaptor release]; + [mVideoWriterInput release]; - mAdaptor = nil; - mVideoWriterInput = nil; - mVideoWriter = nil; - mAudioWriterInput = nil; + if (mAudioWriterInput != 0) + [mAudioWriterInput release]; + + [mVideoWriter release]; - if (mWaveRecorder) { - mWaveRecorder->close(); - mWaveRecorder->deleteLater(); - } + mAdaptor = nil; + mVideoWriterInput = nil; + mVideoWriter = nil; + mAudioWriterInput = nil; + + if (mWaveRecorder) { + mWaveRecorder->close(); + mWaveRecorder->deleteLater(); + } + + + emit compressionFinished(); + }]; } @@ -277,12 +298,22 @@ void UBQuickTimeFile::endSession() */ void UBQuickTimeFile::stop() { + //qDebug() << "requested end of recording"; mShouldStopCompression = true; + + + frameQueueMutex.lock(); + audioQueueMutex.lock(); + endSession(); + frameQueueMutex.unlock(); + audioQueueMutex.unlock(); } /** - * \brief Create a CVPixelBufferRef from the input adaptor's CVPixelBufferPool + * \brief Create and return a CVPixelBufferRef + * + * The CVPixelBuffer is created from the input adaptor's CVPixelBufferPool */ CVPixelBufferRef UBQuickTimeFile::newPixelBuffer() { @@ -305,6 +336,7 @@ CVPixelBufferRef UBQuickTimeFile::newPixelBuffer() */ void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp) { + //qDebug() << "appending video frame"; CMTime t = CMTimeMake((msTimeStamp * mTimeScale / 1000.0), mTimeScale); bool added = [mAdaptor appendPixelBuffer: pixelBuffer @@ -329,10 +361,11 @@ void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTime * (implemented in the UBAudioQueueRecorder class) and the recording, handled * by the AVAssetWriterInput instance mAudioWriterInput. */ -void UBQuickTimeFile::appendAudioBuffer(void* pBuffer, +void UBQuickTimeFile::enqueueAudioBuffer(void* pBuffer, long pLength) { - if(!mRecordAudio) + + if(!mRecordAudio || mShouldStopCompression) return; @@ -370,31 +403,37 @@ void UBQuickTimeFile::appendAudioBuffer(void* pBuffer, NULL, &sampleBuffer); + //qDebug() << "enqueueAudioBuffer, timeStamp = " << timeStamp.value << " / " << timeStamp.timescale; - // Wait until the AssetWriterInput is ready, but no more than 100ms - // (bit of a duct tape solution; cleaner solution would be to use a QQueue, - // similar to the VideoWriter) - int waitTime = 0; - while(![mAudioWriterInput isReadyForMoreMediaData] && waitTime < 100) { - waitTime += 10; - usleep(10000); - } + + audioQueueMutex.lock(); + audioQueue.enqueue(sampleBuffer); + audioQueueMutex.unlock(); + + //qDebug() << "buffer enqueued"; + + + +} - if ([mAudioWriterInput isReadyForMoreMediaData]) { - if(![mAudioWriterInput appendSampleBuffer:sampleBuffer]) - setLastErrorMessage(QString("Failed to append sample buffer to audio input")); - } - else - setLastErrorMessage(QString("AudioWriterInput not ready. Buffer dropped.")); +bool UBQuickTimeFile::appendSampleBuffer(CMSampleBufferRef sampleBuffer) +{ + bool success = [mAudioWriterInput appendSampleBuffer:sampleBuffer]; + + if (!success) + setLastErrorMessage(QString("Failed to append sample buffer to audio input")); + + CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); CFRelease(sampleBuffer); CFRelease(blockBuffer); - // The audioQueueBuffers are all freed when UBAudioQueueRecorder::close() is called + return success; } + /** * \brief Print an error message to the terminal, and store it */ diff --git a/src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp b/src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp index e3497640..cd03d8ed 100644 --- a/src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp +++ b/src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp @@ -68,7 +68,7 @@ bool UBQuickTimeVideoEncoder::start() return false; } - connect(&mQuickTimeCompressionSession, SIGNAL(finished()), this, SLOT(compressionFinished())); + connect(&mQuickTimeCompressionSession, SIGNAL(compressionFinished()), this, SLOT(compressionFinished())); connect(&mQuickTimeCompressionSession, SIGNAL(audioLevelChanged(quint8)), this, SIGNAL(audioLevelChanged(quint8))); mQuickTimeCompressionSession.start(); @@ -79,10 +79,7 @@ bool UBQuickTimeVideoEncoder::start() bool UBQuickTimeVideoEncoder::stop() { - if (mQuickTimeCompressionSession.isRunning()) - { - mQuickTimeCompressionSession.stop(); - } + mQuickTimeCompressionSession.stop(); UBQuickTimeFile::frameBufferNotEmpty.wakeAll();