preferencesAboutTextFull
agriche 9 years ago
commit ab4ae9435a
  1. 4
      OpenBoard.pro
  2. 14
      src/podcast/UBPodcastController.cpp
  3. 17
      src/podcast/podcast.pri
  4. 4
      src/podcast/quicktime/UBAudioQueueRecorder.cpp
  5. 656
      src/podcast/quicktime/UBQuickTimeFile.cpp
  6. 77
      src/podcast/quicktime/UBQuickTimeFile.h
  7. 262
      src/podcast/quicktime/UBQuickTimeFile.mm
  8. 10
      src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp

@ -130,13 +130,13 @@ macx {
LIBS += -framework Foundation LIBS += -framework Foundation
LIBS += -framework Cocoa LIBS += -framework Cocoa
LIBS += -framework Carbon LIBS += -framework Carbon
LIBS += -framework AVFoundation
LIBS += -framework CoreMedia
LIBS += -lcrypto LIBS += -lcrypto
CONFIG(release, debug|release):CONFIG += x86_64 CONFIG(release, debug|release):CONFIG += x86_64
CONFIG(debug, debug|release):CONFIG += x86_64 CONFIG(debug, debug|release):CONFIG += x86_64
# TODO Craig: switch to 64bit
QMAKE_MAC_SDK = macosx QMAKE_MAC_SDK = macosx
QMAKE_MACOSX_DEPLOYMENT_TARGET = "10.10" QMAKE_MACOSX_DEPLOYMENT_TARGET = "10.10"

@ -59,9 +59,9 @@
#ifdef Q_OS_WIN #ifdef Q_OS_WIN
#include "windowsmedia/UBWindowsMediaVideoEncoder.h" #include "windowsmedia/UBWindowsMediaVideoEncoder.h"
#include "windowsmedia/UBWaveRecorder.h" #include "windowsmedia/UBWaveRecorder.h"
//#elif defined(Q_OS_OSX) #elif defined(Q_OS_OSX)
// #include "quicktime/UBQuickTimeVideoEncoder.h" #include "quicktime/UBQuickTimeVideoEncoder.h"
// #include "quicktime/UBAudioQueueRecorder.h" #include "quicktime/UBAudioQueueRecorder.h"
#endif #endif
#include "core/memcheck.h" #include "core/memcheck.h"
@ -305,8 +305,8 @@ void UBPodcastController::start()
#ifdef Q_OS_WIN #ifdef Q_OS_WIN
mVideoEncoder = new UBWindowsMediaVideoEncoder(this); //deleted on stop mVideoEncoder = new UBWindowsMediaVideoEncoder(this); //deleted on stop
//#elif defined(Q_OS_OSX) #elif defined(Q_OS_OSX)
// mVideoEncoder = new UBQuickTimeVideoEncoder(this); //deleted on stop mVideoEncoder = new UBQuickTimeVideoEncoder(this); //deleted on stop
#endif #endif
if (mVideoEncoder) if (mVideoEncoder)
@ -795,8 +795,8 @@ QStringList UBPodcastController::audioRecordingDevices()
#ifdef Q_OS_WIN #ifdef Q_OS_WIN
devices = UBWaveRecorder::waveInDevices(); devices = UBWaveRecorder::waveInDevices();
//#elif defined(Q_OS_OSX) #elif defined(Q_OS_OSX)
// devices = UBAudioQueueRecorder::waveInDevices(); devices = UBAudioQueueRecorder::waveInDevices();
#endif #endif
return devices; return devices;

@ -22,13 +22,14 @@ win32 {
src/podcast/windowsmedia/UBWaveRecorder.h src/podcast/windowsmedia/UBWaveRecorder.h
} }
#macx { macx {
# SOURCES += src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp \ SOURCES += src/podcast/quicktime/UBQuickTimeVideoEncoder.cpp \
# src/podcast/quicktime/UBQuickTimeFile.cpp \ src/podcast/quicktime/UBAudioQueueRecorder.cpp
# src/podcast/quicktime/UBAudioQueueRecorder.cpp
# HEADERS += src/podcast/quicktime/UBQuickTimeVideoEncoder.h \ HEADERS += src/podcast/quicktime/UBQuickTimeVideoEncoder.h \
# src/podcast/quicktime/UBQuickTimeFile.h \ src/podcast/quicktime/UBQuickTimeFile.h \
# src/podcast/quicktime/UBAudioQueueRecorder.h src/podcast/quicktime/UBAudioQueueRecorder.h
#}
OBJECTIVE_SOURCES += src/podcast/quicktime/UBQuickTimeFile.mm
}

@ -151,10 +151,10 @@ QString UBAudioQueueRecorder::deviceUIDFromDeviceID(AudioDeviceID id)
{ {
char *cname = new char[1024]; char *cname = new char[1024];
CFStringGetCString (name, cname, 1024, kCFStringEncodingASCII); CFStringGetCString (name, cname, 1024, kCFStringEncodingISOLatin1);
int length = CFStringGetLength (name); int length = CFStringGetLength (name);
uid = QString::fromAscii(cname, length); uid = QString::fromLatin1(cname, length);
delete cname; delete cname;

@ -1,656 +0,0 @@
/*
* Copyright (C) 2013 Open Education Foundation
*
* Copyright (C) 2010-2013 Groupement d'Intérêt Public pour
* l'Education Numérique en Afrique (GIP ENA)
*
* This file is part of OpenBoard.
*
* OpenBoard is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License,
* with a specific linking exception for the OpenSSL project's
* "OpenSSL" library (or with modified versions of it that use the
* same license as the "OpenSSL" library).
*
* OpenBoard is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenBoard. If not, see <http://www.gnu.org/licenses/>.
*/
#include "UBQuickTimeFile.h"
#include <AudioToolbox/AudioToolbox.h>
#include "UBAudioQueueRecorder.h"
#include <QtGui>
#include "core/memcheck.h"
QQueue<UBQuickTimeFile::VideoFrame> UBQuickTimeFile::frameQueue;
QMutex UBQuickTimeFile::frameQueueMutex;
QWaitCondition UBQuickTimeFile::frameBufferNotEmpty;
UBQuickTimeFile::UBQuickTimeFile(QObject * pParent)
: QThread(pParent)
, mVideoCompressionSession(0)
, mVideoMedia(0)
, mSoundMedia(0)
, mVideoOutputTrack(0)
, mSoundOutputTrack(0)
, mCVPixelBufferPool(0)
, mOutputMovie(0)
, mFramesPerSecond(-1)
, mTimeScale(100)
, mRecordAudio(true)
, mWaveRecorder(0)
, mSouldStopCompression(false)
, mCompressionSessionRunning(false)
, mPendingFrames(0)
{
// NOOP
}
bool UBQuickTimeFile::init(const QString& pVideoFileName, const QString& pProfileData, int pFramesPerSecond
, const QSize& pFrameSize, bool pRecordAudio, const QString& audioRecordingDevice)
{
mFrameSize = pFrameSize;
mFramesPerSecond = pFramesPerSecond;
mVideoFileName = pVideoFileName;
mRecordAudio = pRecordAudio && QSysInfo::MacintoshVersion >= QSysInfo::MV_10_5; // Audio Queue are available in 10.5 +;
if (mRecordAudio)
mAudioRecordingDeviceName = audioRecordingDevice;
else
mAudioRecordingDeviceName = "";
if (pProfileData.toLower() == "lossless")
mSpatialQuality = codecLosslessQuality;
if (pProfileData.toLower() == "high")
mSpatialQuality = codecHighQuality;
else if (pProfileData.toLower() == "normal")
mSpatialQuality = codecNormalQuality;
else if (pProfileData.toLower() == "low")
mSpatialQuality = codecLowQuality;
else
mSpatialQuality = codecHighQuality;
qDebug() << "Quality " << pProfileData << mSpatialQuality;
return true;
}
void UBQuickTimeFile::run()
{
EnterMoviesOnThread(kCSAcceptThreadSafeComponentsOnlyMode);
mSouldStopCompression = false;
mPendingFrames = 0;
createCompressionSession();
mCompressionSessionRunning = true;
emit compressionSessionStarted();
while(!mSouldStopCompression)
{
frameQueueMutex.lock();
//qDebug() << "run .... wait" << QTime::currentTime();
frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex);
//qDebug() << "awakend ..." << QTime::currentTime();
if (!frameQueue.isEmpty())
{
QQueue<VideoFrame> localQueue = frameQueue;
frameQueue.clear();
frameQueueMutex.unlock();
while (!localQueue.isEmpty())
{
VideoFrame frame = localQueue.dequeue();
appendVideoFrame(frame.buffer, frame.timestamp);
}
}
else
{
frameQueueMutex.unlock();
}
}
flushPendingFrames();
}
bool UBQuickTimeFile::createCompressionSession()
{
CodecType codecType = kH264CodecType;
CFStringRef keys[] = {kCVPixelBufferPixelFormatTypeKey, kCVPixelBufferWidthKey, kCVPixelBufferHeightKey};
int width = mFrameSize.width();
int height = mFrameSize.height();
int pixelFormat = k32BGRAPixelFormat;
CFTypeRef values[] =
{
(CFTypeRef)CFNumberCreate(0, kCFNumberIntType, (void*)&pixelFormat),
(CFTypeRef)CFNumberCreate(0, kCFNumberIntType, (void*)&width),
(CFTypeRef)CFNumberCreate(0, kCFNumberIntType, (void*)&height)
};
CFDictionaryRef pixelBufferAttributes = CFDictionaryCreate(kCFAllocatorDefault
, (const void **)keys, (const void **)values, 3, 0, 0);
if(!pixelBufferAttributes)
{
setLastErrorMessage("Could not create CV buffer pool pixel buffer attributes");
return false;
}
OSStatus err = noErr;
ICMEncodedFrameOutputRecord encodedFrameOutputRecord = {NULL, NULL, NULL};
ICMCompressionSessionOptionsRef sessionOptions = 0;
err = ICMCompressionSessionOptionsCreate(0, &sessionOptions);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsCreate() failed %1").arg(err));
goto bail;
}
// We must set this flag to enable P or B frames.
err = ICMCompressionSessionOptionsSetAllowTemporalCompression(sessionOptions, true);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsSetAllowTemporalCompression() failed %1").arg(err));
goto bail;
}
// We must set this flag to enable B frames.
err = ICMCompressionSessionOptionsSetAllowFrameReordering(sessionOptions, true);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsSetAllowFrameReordering() failed %1").arg(err));
goto bail;
}
// Set the maximum key frame interval, also known as the key frame rate.
err = ICMCompressionSessionOptionsSetMaxKeyFrameInterval(sessionOptions, mFramesPerSecond);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsSetMaxKeyFrameInterval() failed %1").arg(err));
goto bail;
}
// This allows the compressor more flexibility (ie, dropping and coalescing frames).
err = ICMCompressionSessionOptionsSetAllowFrameTimeChanges(sessionOptions, true);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsSetAllowFrameTimeChanges() failed %1").arg(err));
goto bail;
}
// Set the average quality.
err = ICMCompressionSessionOptionsSetProperty(sessionOptions,
kQTPropertyClass_ICMCompressionSessionOptions,
kICMCompressionSessionOptionsPropertyID_Quality,
sizeof(mSpatialQuality),
&mSpatialQuality);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionOptionsSetProperty(Quality) failed %1").arg(err));
goto bail;
}
//qDebug() << "available quality" << mSpatialQuality;
encodedFrameOutputRecord.encodedFrameOutputCallback = addEncodedFrameToMovie;
encodedFrameOutputRecord.encodedFrameOutputRefCon = this;
encodedFrameOutputRecord.frameDataAllocator = 0;
err = ICMCompressionSessionCreate(0, mFrameSize.width(), mFrameSize.height(), codecType, mTimeScale,
sessionOptions, pixelBufferAttributes, &encodedFrameOutputRecord, &mVideoCompressionSession);
if(err)
{
setLastErrorMessage(QString("ICMCompressionSessionCreate() failed %1").arg(err));
goto bail;
}
mCVPixelBufferPool = ICMCompressionSessionGetPixelBufferPool(mVideoCompressionSession);
if(!mCVPixelBufferPool)
{
setLastErrorMessage("ICMCompressionSessionGetPixelBufferPool() failed.");
err = !noErr;
goto bail;
}
if(mRecordAudio)
{
mWaveRecorder = new UBAudioQueueRecorder();
if(mWaveRecorder->init(mAudioRecordingDeviceName))
{
connect(mWaveRecorder, SIGNAL(newWaveBuffer(void*, long, int , const AudioStreamPacketDescription*))
, this, SLOT(appendAudioBuffer(void*, long, int, const AudioStreamPacketDescription*)));
connect(mWaveRecorder, SIGNAL(audioLevelChanged(quint8)), this, SIGNAL(audioLevelChanged(quint8)));
}
else
{
setLastErrorMessage(mWaveRecorder->lastErrorMessage());
mWaveRecorder->deleteLater();
}
}
createMovie();
bail:
ICMCompressionSessionOptionsRelease(sessionOptions);
sessionOptions = 0;
CFRelease(pixelBufferAttributes);
return err == noErr;
}
void UBQuickTimeFile::stop()
{
mSouldStopCompression = true;
}
bool UBQuickTimeFile::flushPendingFrames()
{
mCompressionSessionRunning = false;
if (mWaveRecorder)
{
mWaveRecorder->close();
mWaveRecorder->deleteLater();
}
//Flush pending frames in compression session
OSStatus err = ICMCompressionSessionCompleteFrames(mVideoCompressionSession, true, 0, 0);
if (err)
{
setLastErrorMessage(QString("ICMCompressionSessionCompleteFrames() failed %1").arg(err));
return false;
}
return true;
}
bool UBQuickTimeFile::closeCompressionSession()
{
OSStatus err = noErr;
if (mVideoMedia)
{
// End the media sample-adding session.
err = EndMediaEdits(mVideoMedia);
if (err)
{
setLastErrorMessage(QString("EndMediaEdits(mVideoMedia) failed %1").arg(err));
return false;
}
// Make sure things are extra neat.
ExtendMediaDecodeDurationToDisplayEndTime(mVideoMedia, 0);
// Insert the stuff we added into the track, at the end.
Track videoTrack = GetMediaTrack(mVideoMedia);
err = InsertMediaIntoTrack(videoTrack,
GetTrackDuration(videoTrack),
0, GetMediaDisplayDuration(mVideoMedia),
fixed1);
mVideoMedia = 0;
if (err)
{
setLastErrorMessage(QString("InsertMediaIntoTrack() failed %1").arg(err));
return false;
}
if (mSoundMedia)
{
err = EndMediaEdits(mSoundMedia);
if(err)
{
setLastErrorMessage(QString("EndMediaEdits(mAudioMedia) failed %1").arg(err));
return false;
}
Track soundTrack = GetMediaTrack(mSoundMedia);
err = InsertMediaIntoTrack(soundTrack,
GetTrackDuration(soundTrack),
0, GetMediaDisplayDuration(mSoundMedia),
fixed1);
mSoundMedia = 0;
if (err)
{
setLastErrorMessage(QString("InsertMediaIntoTrack(mAudioMedia) failed %1").arg(err));
}
TimeValue soundTrackDuration = GetTrackDuration(soundTrack);
TimeValue videoTrackDuration = GetTrackDuration(videoTrack);
if (soundTrackDuration > videoTrackDuration)
{
qDebug() << "Sound track is longer then video track" << soundTrackDuration << ">" << videoTrackDuration;
DeleteTrackSegment(soundTrack, videoTrackDuration, soundTrackDuration - videoTrackDuration);
}
DisposeHandle((Handle)mSoundDescription);
}
}
// Write the movie header to the file.
err = AddMovieToStorage(mOutputMovie, mOutputMovieDataHandler);
if (err)
{
setLastErrorMessage(QString("AddMovieToStorage() failed %1").arg(err));
return false;
}
err = UpdateMovieInStorage(mOutputMovie, mOutputMovieDataHandler);
if (err)
{
setLastErrorMessage(QString("UpdateMovieInStorage() failed %1").arg(err));
return false;
}
err = CloseMovieStorage(mOutputMovieDataHandler);
if (err)
{
setLastErrorMessage(QString("CloseMovieStorage() failed %1").arg(err));
return false;
}
CVPixelBufferPoolRelease(mCVPixelBufferPool);
mCVPixelBufferPool = 0;
mOutputMovie = 0;
mOutputMovieDataHandler = 0;
mVideoCompressionSession = 0;
ExitMoviesOnThread();
return true;
}
OSStatus UBQuickTimeFile::addEncodedFrameToMovie(void *encodedFrameOutputRefCon,
ICMCompressionSessionRef session,
OSStatus err,
ICMEncodedFrameRef encodedFrame,
void *reserved)
{
Q_UNUSED(session);
Q_UNUSED(reserved);
UBQuickTimeFile *quickTimeFile = (UBQuickTimeFile *)encodedFrameOutputRefCon;
if(quickTimeFile)
quickTimeFile->addEncodedFrame(encodedFrame, err);
return noErr;
}
void UBQuickTimeFile::addEncodedFrame(ICMEncodedFrameRef encodedFrame, OSStatus frameErr)
{
mPendingFrames--;
//qDebug() << "addEncodedFrame" << mSouldStopCompression << mPendingFrames;
if(frameErr == noErr)
{
if (mVideoMedia)
{
OSStatus err = AddMediaSampleFromEncodedFrame(mVideoMedia, encodedFrame, 0);
if(err)
{
setLastErrorMessage(QString("AddMediaSampleFromEncodedFrame() failed %1").arg(err));
}
}
}
else
{
setLastErrorMessage(QString("addEncodedFrame received an error %1").arg(frameErr));
}
if (mSouldStopCompression && mPendingFrames == 0)
{
closeCompressionSession();
}
}
bool UBQuickTimeFile::createMovie()
{
if(!mOutputMovie)
{
OSStatus err = noErr;
Handle dataRef;
OSType dataRefType;
CFStringRef filePath = CFStringCreateWithCString(0, mVideoFileName.toUtf8().constData(), kCFStringEncodingUTF8);
QTNewDataReferenceFromFullPathCFString(filePath, kQTPOSIXPathStyle, 0, &dataRef, &dataRefType);
err = CreateMovieStorage(dataRef, dataRefType, 'TVOD', 0, createMovieFileDeleteCurFile, &mOutputMovieDataHandler, &mOutputMovie);
if(err)
{
setLastErrorMessage(QString("CreateMovieStorage() failed %1").arg(err));
return false;
}
mVideoOutputTrack = NewMovieTrack(mOutputMovie, X2Fix(mFrameSize.width()), X2Fix(mFrameSize.height()), 0);
err = GetMoviesError();
if( err )
{
setLastErrorMessage(QString("NewMovieTrack(Video) failed %1").arg(err));
return false;
}
if(!createVideoMedia())
return false;
if(mRecordAudio)
{
mSoundOutputTrack = NewMovieTrack(mOutputMovie, 0, 0, kFullVolume);
err = GetMoviesError();
if(err)
{
setLastErrorMessage(QString("NewMovieTrack(Sound) failed %1").arg(err));
return false;
}
if(!createAudioMedia())
return false;
}
}
return true;
}
bool UBQuickTimeFile::createVideoMedia()
{
mVideoMedia = NewTrackMedia(mVideoOutputTrack, VideoMediaType, mTimeScale, 0, 0);
OSStatus err = GetMoviesError();
if (err)
{
setLastErrorMessage(QString("NewTrackMedia(VideoMediaType) failed %1").arg(err));
return false;
}
err = BeginMediaEdits(mVideoMedia);
if (err)
{
setLastErrorMessage(QString("BeginMediaEdits(VideoMediaType) failed %1").arg(err));
return false;
}
return true;
}
bool UBQuickTimeFile::createAudioMedia()
{
if(mRecordAudio)
{
mAudioDataFormat = UBAudioQueueRecorder::audioFormat();
mSoundMedia = NewTrackMedia(mSoundOutputTrack, SoundMediaType, mAudioDataFormat.mSampleRate, 0, 0);
OSStatus err = GetMoviesError();
if(err)
{
setLastErrorMessage(QString("NewTrackMedia(AudioMediaType) failed %1").arg(err));
return false;
}
err = BeginMediaEdits(mSoundMedia);
if(err)
{
setLastErrorMessage(QString("BeginMediaEdits(AudioMediaType) failed %1").arg(err));
return false;
}
err = QTSoundDescriptionCreate(&mAudioDataFormat, 0, 0, 0, 0,
kQTSoundDescriptionKind_Movie_LowestPossibleVersion,
&mSoundDescription);
if (err)
{
setLastErrorMessage(QString("QTSoundDescriptionCreate() failed %1").arg(err));
return false;
}
err = QTSoundDescriptionGetProperty(mSoundDescription, kQTPropertyClass_SoundDescription,
kQTSoundDescriptionPropertyID_AudioStreamBasicDescription,
sizeof(mAudioDataFormat), &mAudioDataFormat, 0);
if (err)
{
setLastErrorMessage(QString("QTSoundDescriptionGetProperty() failed %1").arg(err));
return false;
}
}
return true;
}
UBQuickTimeFile::~UBQuickTimeFile()
{
// NOOP
}
CVPixelBufferRef UBQuickTimeFile::newPixelBuffer()
{
CVPixelBufferRef pixelBuffer = 0;
if(CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, mCVPixelBufferPool, &pixelBuffer) != kCVReturnSuccess)
{
setLastErrorMessage("Could not retreive CV buffer from pool");
return 0;
}
return pixelBuffer;
}
void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp)
{
TimeValue64 msTimeStampScaled = msTimeStamp * mTimeScale / 1000;
/*
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0) ;
void *pixelBufferAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
qDebug() << "will comp newVideoFrame - PixelBuffer @" << pixelBufferAddress
<< QTime::currentTime().toString("ss:zzz") << QThread::currentThread();
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
*/
OSStatus err = ICMCompressionSessionEncodeFrame(mVideoCompressionSession, pixelBuffer,
msTimeStampScaled, 0, kICMValidTime_DisplayTimeStampIsValid,
0, 0, 0);
if (err == noErr)
{
mPendingFrames++;
}
else
{
setLastErrorMessage(QString("Could not encode frame %1").arg(err));
}
CVPixelBufferRelease(pixelBuffer);
}
void UBQuickTimeFile::appendAudioBuffer(void* pBuffer, long pLength, int inNumberPacketDescriptions, const AudioStreamPacketDescription* inPacketDescs)
{
Q_UNUSED(pLength);
//qDebug() << "appendAudioBuffer" << QThread::currentThread();
if(mRecordAudio)
{
for (int i = 0; i < inNumberPacketDescriptions; i++)
{
OSStatus err = AddMediaSample2(mSoundMedia,
(UInt8*)pBuffer + inPacketDescs[i].mStartOffset,
inPacketDescs[i].mDataByteSize,
mAudioDataFormat.mFramesPerPacket,
0,
(SampleDescriptionHandle)mSoundDescription,
1,
0,
0);
if (err)
{
setLastErrorMessage(QString("AddMediaSample2(soundMedia) failed %1").arg(err));
}
}
}
#ifdef Q_OS_OSX
free((void*)inPacketDescs);
#endif
}
void UBQuickTimeFile::setLastErrorMessage(const QString& error)
{
mLastErrorMessage = error;
qWarning() << "UBQuickTimeFile error" << error;
}

@ -30,12 +30,29 @@
#include <QtCore> #include <QtCore>
#include <ApplicationServices/ApplicationServices.h> #include <CoreVideo/CoreVideo.h>
#include <QuickTime/QuickTime.h>
#include <AudioToolbox/AudioToolbox.h>
#include "UBAudioQueueRecorder.h" #include "UBAudioQueueRecorder.h"
// Trick to get around the fact that the C++ compiler doesn't
// like Objective C code.
#ifdef __OBJC__ // defined by the Objective C compiler
@class AVAssetWriter;
@class AVAssetWriterInput;
@class AVAssetWriterInputPixelBufferAdaptor;
typedef AVAssetWriter* AssetWriterPTR;
typedef AVAssetWriterInput* AssetWriterInputPTR;
typedef AVAssetWriterInputPixelBufferAdaptor* AssetWriterInputAdaptorPTR;
#else
typedef void* AssetWriterPTR;
typedef void* AssetWriterInputPTR;
typedef void* AssetWriterInputAdaptorPTR;
#endif
class UBQuickTimeFile : public QThread class UBQuickTimeFile : public QThread
{ {
Q_OBJECT; Q_OBJECT;
@ -52,15 +69,11 @@ class UBQuickTimeFile : public QThread
CVPixelBufferRef newPixelBuffer(); CVPixelBufferRef newPixelBuffer();
bool isCompressionSessionRunning() bool isCompressionSessionRunning() { return mCompressionSessionRunning; }
{
return mCompressionSessionRunning;
}
QString lastErrorMessage() const QString lastErrorMessage() const { return mLastErrorMessage; }
{
return mLastErrorMessage; void endSession();
}
struct VideoFrame struct VideoFrame
{ {
@ -79,47 +92,19 @@ class UBQuickTimeFile : public QThread
protected: protected:
void run(); void run();
private slots:
void appendAudioBuffer(void* pBuffer, long pLength, int inNumberPacketDescriptions
, const AudioStreamPacketDescription* inPacketDescs);
private: private:
static OSStatus addEncodedFrameToMovie(void *encodedFrameOutputRefCon, bool beginSession();
ICMCompressionSessionRef session,
OSStatus err,
ICMEncodedFrameRef encodedFrame,
void *reserved);
void appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp); void appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp);
void addEncodedFrame(ICMEncodedFrameRef encodedFrame, OSStatus err);
bool createCompressionSession();
bool closeCompressionSession();
bool createMovie();
bool createVideoMedia();
bool createAudioMedia();
void setLastErrorMessage(const QString& error); void setLastErrorMessage(const QString& error);
bool flushPendingFrames(); bool flushPendingFrames();
ICMCompressionSessionRef mVideoCompressionSession;
Media mVideoMedia;
Media mSoundMedia;
Track mVideoOutputTrack;
Track mSoundOutputTrack;
volatile CVPixelBufferPoolRef mCVPixelBufferPool; volatile CVPixelBufferPoolRef mCVPixelBufferPool;
SoundDescriptionHandle mSoundDescription;
Movie mOutputMovie;
DataHandler mOutputMovieDataHandler;
int mFramesPerSecond; int mFramesPerSecond;
QSize mFrameSize; QSize mFrameSize;
@ -130,17 +115,17 @@ class UBQuickTimeFile : public QThread
QString mLastErrorMessage; QString mLastErrorMessage;
AudioStreamBasicDescription mAudioDataFormat; QString mSpatialQuality;
QPointer<UBAudioQueueRecorder> mWaveRecorder;
CodecQ mSpatialQuality; volatile bool mShouldStopCompression;
volatile bool mSouldStopCompression;
volatile bool mCompressionSessionRunning; volatile bool mCompressionSessionRunning;
QString mAudioRecordingDeviceName; QString mAudioRecordingDeviceName;
volatile int mPendingFrames; volatile int mPendingFrames;
AssetWriterPTR mVideoWriter;
AssetWriterInputPTR mVideoWriterInput;
AssetWriterInputAdaptorPTR mAdaptor;
}; };
#endif /* UBQUICKTIMEFILE_H_ */ #endif /* UBQUICKTIMEFILE_H_ */

@ -0,0 +1,262 @@
/*
* Copyright (C) 2013 Open Education Foundation
*
* Copyright (C) 2010-2013 Groupement d'Intérêt Public pour
* l'Education Numérique en Afrique (GIP ENA)
*
* This file is part of OpenBoard.
*
* OpenBoard is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License,
* with a specific linking exception for the OpenSSL project's
* "OpenSSL" library (or with modified versions of it that use the
* same license as the "OpenSSL" library).
*
* OpenBoard is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenBoard. If not, see <http://www.gnu.org/licenses/>.
*/
#include "UBQuickTimeFile.h"
#include <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
#include "UBAudioQueueRecorder.h"
#include <QtGui>
#include "core/memcheck.h"
QQueue<UBQuickTimeFile::VideoFrame> UBQuickTimeFile::frameQueue;
QMutex UBQuickTimeFile::frameQueueMutex;
QWaitCondition UBQuickTimeFile::frameBufferNotEmpty;
UBQuickTimeFile::UBQuickTimeFile(QObject * pParent)
: QThread(pParent)
, mVideoWriter(0)
, mVideoWriterInput(0)
, mAdaptor(0)
, mCVPixelBufferPool(0)
, mFramesPerSecond(-1)
, mTimeScale(100)
, mRecordAudio(true)
, mShouldStopCompression(false)
, mCompressionSessionRunning(false)
, mPendingFrames(0)
{
// NOOP
}
UBQuickTimeFile::~UBQuickTimeFile()
{
// NOOP
}
bool UBQuickTimeFile::init(const QString& pVideoFileName, const QString& pProfileData, int pFramesPerSecond
, const QSize& pFrameSize, bool pRecordAudio, const QString& audioRecordingDevice)
{
mFrameSize = pFrameSize;
mFramesPerSecond = pFramesPerSecond;
mVideoFileName = pVideoFileName;
mRecordAudio = pRecordAudio;
mSpatialQuality = pProfileData;
if (mRecordAudio)
mAudioRecordingDeviceName = audioRecordingDevice;
else
mAudioRecordingDeviceName = "";
qDebug() << "UBQuickTimeFile created; video size: " << pFrameSize.width() << " x " << pFrameSize.height();
return true;
}
void UBQuickTimeFile::run()
{
mShouldStopCompression = false;
mPendingFrames = 0;
if (!beginSession())
return;
mCompressionSessionRunning = true;
emit compressionSessionStarted();
do {
frameQueueMutex.lock();
frameBufferNotEmpty.wait(&UBQuickTimeFile::frameQueueMutex);
if (!frameQueue.isEmpty()) {
QQueue<VideoFrame> localQueue = frameQueue;
frameQueue.clear();
frameQueueMutex.unlock();
while (!localQueue.isEmpty()) {
if ([mVideoWriterInput isReadyForMoreMediaData]) {
VideoFrame frame = localQueue.dequeue();
appendVideoFrame(frame.buffer, frame.timestamp);
}
else
usleep(10000);
}
}
else
frameQueueMutex.unlock();
} while(!mShouldStopCompression);
endSession();
}
/**
* \brief Initialize the AVAssetWriter, which handles writing the media to file
*/
bool UBQuickTimeFile::beginSession()
{
NSError *outError;
NSString * outputPath = [[NSString alloc] initWithUTF8String: mVideoFileName.toUtf8().data()];
NSURL * outputUrl = [[NSURL alloc] initFileURLWithPath: outputPath];
if (!outputUrl) {
qDebug() << "Podcast video URL invalid; not recording";
return false;
}
// Create and check the assetWriter
mVideoWriter = [[AVAssetWriter assetWriterWithURL:outputUrl
fileType:AVFileTypeQuickTimeMovie
error:&outError] retain];
NSCParameterAssert(mVideoWriter);
mVideoWriter.movieTimeScale = mTimeScale;
int frameWidth = mFrameSize.width();
int frameHeight = mFrameSize.height();
// Create the input and check it
NSDictionary * videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameWidth], AVVideoWidthKey,
[NSNumber numberWithInt:frameHeight], AVVideoHeightKey,
nil];
mVideoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSCParameterAssert(mVideoWriterInput);
// Pixel Buffer Adaptor. This makes it possible to pass CVPixelBuffers to the WriterInput
NSDictionary* pixelBufSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: frameWidth], kCVPixelBufferWidthKey,
[NSNumber numberWithInt: frameHeight], kCVPixelBufferHeightKey,
nil];
mAdaptor = [[AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:mVideoWriterInput
sourcePixelBufferAttributes:pixelBufSettings] retain];
// Add the input(s) to the assetWriter
NSCParameterAssert([mVideoWriter canAddInput:mVideoWriterInput]);
[mVideoWriter addInput:mVideoWriterInput];
// begin the writing session
bool canStartWriting = [mVideoWriter startWriting];
[mVideoWriter startSessionAtSourceTime:CMTimeMake(0, mTimeScale)];
// return true if everything was created and started successfully
return (mVideoWriter != nil) && (mVideoWriterInput != nil) && canStartWriting;
}
/**
* \brief Close the recording sesion and finish writing the video file
*/
void UBQuickTimeFile::endSession()
{
[mVideoWriterInput markAsFinished];
bool success = [mVideoWriter finishWriting];
[mAdaptor release];
[mVideoWriterInput release];
[mVideoWriter release];
mAdaptor = nil;
mVideoWriterInput = nil;
mVideoWriter = nil;
}
/**
* \brief Request the recording to stop
*/
void UBQuickTimeFile::stop()
{
mShouldStopCompression = true;
}
/**
* \brief Create a CVPixelBufferRef from the input adaptor's CVPixelBufferPool
*/
CVPixelBufferRef UBQuickTimeFile::newPixelBuffer()
{
CVPixelBufferRef pixelBuffer = 0;
if(CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, mAdaptor.pixelBufferPool, &pixelBuffer) != kCVReturnSuccess)
{
setLastErrorMessage("Could not retrieve CV buffer from pool");
return 0;
}
return pixelBuffer;
}
/**
* \brief Add a frame to the pixel buffer adaptor
*/
void UBQuickTimeFile::appendVideoFrame(CVPixelBufferRef pixelBuffer, long msTimeStamp)
{
//qDebug() << "adding video frame at time: " << msTimeStamp;
CMTime t = CMTimeMake((msTimeStamp * mTimeScale / 1000.0), mTimeScale);
bool added = [mAdaptor appendPixelBuffer: pixelBuffer
withPresentationTime: t];
if (!added)
setLastErrorMessage(QString("Could not encode frame at time %1").arg(msTimeStamp));
CVPixelBufferRelease(pixelBuffer);
}
void UBQuickTimeFile::setLastErrorMessage(const QString& error)
{
mLastErrorMessage = error;
qWarning() << "UBQuickTimeFile error" << error;
}

@ -130,6 +130,14 @@ void UBQuickTimeVideoEncoder::newPixmap(const QImage& pImage, long timestamp)
} }
} }
/**
* \brief Encode QImage into a video frame and add it to the UBQuickTimeFile's queue.
*
* This method retrieves the raw image from the supplied QImage, and uses memcpy to
* dump it into a CVPixelBuffer, obtained through the UBQuickTimeFile member. The
* pixel buffer, along with the timestamp, constitute a video frame which is added
* to the member UBQuickTimeFile's queue.
*/
void UBQuickTimeVideoEncoder::encodeFrame(const QImage& pImage, long timestamp) void UBQuickTimeVideoEncoder::encodeFrame(const QImage& pImage, long timestamp)
{ {
Q_ASSERT(pImage.format() == QImage::QImage::Format_RGB32); // <=> CVPixelBuffers / k32BGRAPixelFormat Q_ASSERT(pImage.format() == QImage::QImage::Format_RGB32); // <=> CVPixelBuffers / k32BGRAPixelFormat
@ -157,7 +165,7 @@ void UBQuickTimeVideoEncoder::encodeFrame(const QImage& pImage, long timestamp)
const uchar* imageBuffer = pImage.bits(); const uchar* imageBuffer = pImage.bits();
memcpy((void*) pixelBufferAddress, imageBuffer, pImage.numBytes()); memcpy((void*) pixelBufferAddress, imageBuffer, pImage.byteCount());
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

Loading…
Cancel
Save