|
|
|
@ -47,21 +47,20 @@ UBQuickTimeFile::UBQuickTimeFile(QObject * pParent) |
|
|
|
|
, mVideoWriter(0) |
|
|
|
|
, mVideoWriterInput(0) |
|
|
|
|
, mAdaptor(0) |
|
|
|
|
, mCVPixelBufferPool(0) |
|
|
|
|
, mFramesPerSecond(-1) |
|
|
|
|
, mTimeScale(100) |
|
|
|
|
, mTimeScale(1000) |
|
|
|
|
, mRecordAudio(true) |
|
|
|
|
, mShouldStopCompression(false) |
|
|
|
|
, mCompressionSessionRunning(false) |
|
|
|
|
, mPendingFrames(0) |
|
|
|
|
{ |
|
|
|
|
// NOOP |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
UBQuickTimeFile::~UBQuickTimeFile() |
|
|
|
|
{ |
|
|
|
|
// NOOP |
|
|
|
|
// destruction of mWaveRecorder is handled by endSession() |
|
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool UBQuickTimeFile::init(const QString& pVideoFileName, const QString& pProfileData, int pFramesPerSecond |
|
|
|
@ -126,7 +125,11 @@ void UBQuickTimeFile::run() |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
* \brief Initialize the AVAssetWriter, which handles writing the media to file |
|
|
|
|
* \brief Begin the recording session; initialize the audio/video writer |
|
|
|
|
* \return true if the session was initialized successfully |
|
|
|
|
* |
|
|
|
|
* This function initializes the AVAssetWriter and associated video and audio inputs. |
|
|
|
|
* Video is encoded as H264; audio is encoded as AAC. |
|
|
|
|
*/ |
|
|
|
|
bool UBQuickTimeFile::beginSession() |
|
|
|
|
{ |
|
|
|
@ -147,6 +150,11 @@ bool UBQuickTimeFile::beginSession() |
|
|
|
|
|
|
|
|
|
mVideoWriter.movieTimeScale = mTimeScale; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Video |
|
|
|
|
// |
|
|
|
|
|
|
|
|
|
int frameWidth = mFrameSize.width(); |
|
|
|
|
int frameHeight = mFrameSize.height(); |
|
|
|
|
|
|
|
|
@ -175,18 +183,72 @@ bool UBQuickTimeFile::beginSession() |
|
|
|
|
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:mVideoWriterInput |
|
|
|
|
sourcePixelBufferAttributes:pixelBufSettings] retain]; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Add the input(s) to the assetWriter |
|
|
|
|
NSCParameterAssert([mVideoWriter canAddInput:mVideoWriterInput]); |
|
|
|
|
[mVideoWriter addInput:mVideoWriterInput]; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// begin the writing session |
|
|
|
|
|
|
|
|
|
// Audio |
|
|
|
|
// |
|
|
|
|
|
|
|
|
|
if(mRecordAudio) { |
|
|
|
|
mWaveRecorder = new UBAudioQueueRecorder(); |
|
|
|
|
|
|
|
|
|
// Get the audio format description from mWaveRecorder |
|
|
|
|
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, mWaveRecorder->audioFormat(), |
|
|
|
|
0, NULL, 0, NULL, NULL, |
|
|
|
|
&mAudioFormatDescription); |
|
|
|
|
|
|
|
|
|
if(mWaveRecorder->init(mAudioRecordingDeviceName)) { |
|
|
|
|
connect(mWaveRecorder, &UBAudioQueueRecorder::newWaveBuffer, |
|
|
|
|
this, &UBQuickTimeFile::appendAudioBuffer); |
|
|
|
|
|
|
|
|
|
connect(mWaveRecorder, SIGNAL(audioLevelChanged(quint8)), |
|
|
|
|
this, SIGNAL(audioLevelChanged(quint8))); |
|
|
|
|
} |
|
|
|
|
else { |
|
|
|
|
setLastErrorMessage(mWaveRecorder->lastErrorMessage()); |
|
|
|
|
mWaveRecorder->deleteLater(); |
|
|
|
|
mRecordAudio = false; |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Audio is mono, and compressed to AAC at 128kbps |
|
|
|
|
|
|
|
|
|
AudioChannelLayout audioChannelLayout = { |
|
|
|
|
.mChannelLayoutTag = kAudioChannelLayoutTag_Mono, |
|
|
|
|
.mChannelBitmap = 0, |
|
|
|
|
.mNumberChannelDescriptions = 0 |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
NSData *channelLayoutAsData = [NSData dataWithBytes:&audioChannelLayout |
|
|
|
|
length:offsetof(AudioChannelLayout, mChannelDescriptions)]; |
|
|
|
|
|
|
|
|
|
NSDictionary * compressionAudioSettings = @{ |
|
|
|
|
AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], |
|
|
|
|
AVEncoderBitRateKey : [NSNumber numberWithInteger:128000], |
|
|
|
|
AVSampleRateKey : [NSNumber numberWithInteger:44100], |
|
|
|
|
AVChannelLayoutKey : channelLayoutAsData, |
|
|
|
|
AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:1] |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
mAudioWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio |
|
|
|
|
outputSettings:compressionAudioSettings] retain]; |
|
|
|
|
|
|
|
|
|
NSCParameterAssert([mVideoWriter canAddInput:mAudioWriterInput]); |
|
|
|
|
[mVideoWriter addInput:mAudioWriterInput]; |
|
|
|
|
|
|
|
|
|
qDebug() << "audio writer input created and added"; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Begin the writing session |
|
|
|
|
bool canStartWriting = [mVideoWriter startWriting]; |
|
|
|
|
[mVideoWriter startSessionAtSourceTime:CMTimeMake(0, mTimeScale)]; |
|
|
|
|
|
|
|
|
|
// return true if everything was created and started successfully |
|
|
|
|
mStartTime = CFAbsoluteTimeGetCurrent(); // used for audio timestamp calculation |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return (mVideoWriter != nil) && (mVideoWriterInput != nil) && canStartWriting; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -201,10 +263,18 @@ void UBQuickTimeFile::endSession() |
|
|
|
|
[mAdaptor release]; |
|
|
|
|
[mVideoWriterInput release]; |
|
|
|
|
[mVideoWriter release]; |
|
|
|
|
[mAudioWriterInput release]; |
|
|
|
|
|
|
|
|
|
mAdaptor = nil; |
|
|
|
|
mVideoWriterInput = nil; |
|
|
|
|
mVideoWriter = nil; |
|
|
|
|
mAudioWriterInput = nil; |
|
|
|
|
|
|
|
|
|
if (mWaveRecorder) { |
|
|
|
|
mWaveRecorder->close(); |
|
|
|
|
mWaveRecorder->deleteLater(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
/** |
|
|
|
@ -235,11 +305,11 @@ CVPixelBufferRef UBQuickTimeFile::newPixelBuffer() |
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
* \brief Add a frame to the pixel buffer adaptor |
|
|
|
|
* \param pixelBuffer The CVPixelBufferRef (video frame) to add to the movie |
|
|
|
|
* \param msTimeStamp Timestamp, in milliseconds, of the frame |
|
|
|
|
*/ |
|
|
|
|
void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp) |
|
|
|
|
{ |
|
|
|
|
//qDebug() << "adding video frame at time: " << msTimeStamp; |
|
|
|
|
|
|
|
|
|
CMTime t = CMTimeMake((msTimeStamp * mTimeScale / 1000.0), mTimeScale); |
|
|
|
|
|
|
|
|
|
bool added = [mAdaptor appendPixelBuffer: pixelBuffer |
|
|
|
@ -252,11 +322,79 @@ void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTime |
|
|
|
|
CVPixelBufferRelease(pixelBuffer); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
* \brief Append an AudioQueue Buffer to the audio AVAssetWriterInput |
|
|
|
|
* \param pBuffer The AudioQueueBufferRef to add. Must be uncompressed (LPCM). |
|
|
|
|
* \param pLength The length of the buffer, in Bytes |
|
|
|
|
* |
|
|
|
|
* This function serves as an interface between the low-level audio stream |
|
|
|
|
* (implemented in the UBAudioQueueRecorder class) and the recording, handled |
|
|
|
|
* by the AVAssetWriterInput instance mAudioWriterInput. |
|
|
|
|
*/ |
|
|
|
|
void UBQuickTimeFile::appendAudioBuffer(void* pBuffer, |
|
|
|
|
long pLength) |
|
|
|
|
{ |
|
|
|
|
if(!mRecordAudio) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// CMSampleBuffers require a CMBlockBuffer to hold the media data; we |
|
|
|
|
// create a blockBuffer here from the AudioQueueBuffer's data. |
|
|
|
|
CMBlockBufferRef blockBuffer; |
|
|
|
|
CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, |
|
|
|
|
pBuffer, |
|
|
|
|
pLength, |
|
|
|
|
kCFAllocatorNull, |
|
|
|
|
NULL, |
|
|
|
|
0, |
|
|
|
|
pLength, |
|
|
|
|
kCMBlockBufferAssureMemoryNowFlag, |
|
|
|
|
&blockBuffer); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Timestamp of current sample |
|
|
|
|
CFAbsoluteTime currentTime = CFAbsoluteTimeGetCurrent(); |
|
|
|
|
CFTimeInterval elapsedTime = currentTime - mStartTime; |
|
|
|
|
CMTime timeStamp = CMTimeMake(elapsedTime * mTimeScale, mTimeScale); |
|
|
|
|
|
|
|
|
|
// Number of samples in the buffer |
|
|
|
|
long nSamples = pLength / mWaveRecorder->audioFormat()->mBytesPerFrame; |
|
|
|
|
|
|
|
|
|
CMSampleBufferRef sampleBuffer; |
|
|
|
|
CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, |
|
|
|
|
blockBuffer, |
|
|
|
|
true, |
|
|
|
|
NULL, |
|
|
|
|
NULL, |
|
|
|
|
mAudioFormatDescription, |
|
|
|
|
nSamples, |
|
|
|
|
timeStamp, |
|
|
|
|
NULL, |
|
|
|
|
&sampleBuffer); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Add the audio sample to the asset writer input |
|
|
|
|
if ([mAudioWriterInput isReadyForMoreMediaData]) |
|
|
|
|
if(![mAudioWriterInput appendSampleBuffer:sampleBuffer]) |
|
|
|
|
setLastErrorMessage(QString("Failed to append sample buffer to audio input: %1").arg(err)); |
|
|
|
|
|
|
|
|
|
else |
|
|
|
|
setLastErrorMessage(QString("Audio Writer not ready; sample dropped")); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CFRelease(sampleBuffer); |
|
|
|
|
CFRelease(blockBuffer); |
|
|
|
|
// The audioQueueBuffers are all freed when UBAudioQueueRecorder::close() is called |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
/** |
|
|
|
|
* \brief Print an error message to the terminal, and store it |
|
|
|
|
*/ |
|
|
|
|
void UBQuickTimeFile::setLastErrorMessage(const QString& error) |
|
|
|
|
{ |
|
|
|
|
mLastErrorMessage = error; |
|
|
|
|
qWarning() << "UBQuickTimeFile error" << error; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|