I m new to both iOS development and to stackoverflow, so please bear with me if my code doesn t look as nice as it should.
I ve set up a test application using ARC and AVAssetWriter
to create a video with images that live in my application bundle. Everything works as expected and the video gets created properly, but when I profile my application with Instruments I get memory leaks which I don t really know how to fix as I can t see anything in the Detail view which is related to my code (all the Leaked Objects are Malloc and the Responsible Library is VideoToolbox
).
Here is the method I call to start writing the video in my view controller class:
- (void)writeVideo
{
// Set the frameDuration ivar (50/600 = 1 sec / 12 number of frames)
frameDuration = CMTimeMake(50, 600);
nextPresentationTimeStamp = kCMTimeZero;
[self deleteTempVideo];
NSError *error = nil;
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.videoPath] fileType:AVFileTypeQuickTimeMovie error:&error];
if (!error) {
// Define video settings to be passed to the AVAssetWriterInput instance
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640],AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey, nil];
// Instanciate the AVAssetWriterInput
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
// Instanciate the AVAssetWriterInputPixelBufferAdaptor to be connected to the writer input
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
// Add the writer input to the writer and begin writing
[writer addInput:writerInput];
[writer startWriting];
[writer startSessionAtSourceTime:nextPresentationTimeStamp];
//
dispatch_queue_t mediaDataRequestQueue = dispatch_queue_create("Media data request queue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaDataRequestQueue usingBlock:^{
while (writerInput.isReadyForMoreMediaData) {
CVPixelBufferRef nextBuffer = [self fetchNextPixelBuffer];
if (nextBuffer) {
[pixelBufferAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextPresentationTimeStamp];
nextPresentationTimeStamp = CMTimeAdd(nextPresentationTimeStamp, frameDuration);
CVPixelBufferRelease(nextBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
NSUInteger totalFrames = [self.imagesNames count];
float progress = 1.0 * (totalFrames - [self.imageNamesCopy count]) / totalFrames;
[self.progressBar setProgress:progress animated:YES];
});
} else {
[writerInput markAsFinished];
[writer finishWriting];
[self loadVideo];
dispatch_release(mediaDataRequestQueue);
break;
}
}
}];
}
}
And here is the method I use for fetching the pixel buffers to append to the pixel buffer adaptor instantiated in the previous method:
// Consume the imageNamesCopy mutable array and return a CVPixelBufferRef relative to the last object of the array
- (CVPixelBufferRef)fetchNextPixelBuffer
{
NSString *imageName = [self.imageNamesCopy lastObject];
if (imageName) [self.imageNamesCopy removeLastObject];
// Create an UIImage instance
UIImage *image = [UIImage imageNamed:imageName];
CGImageRef imageRef = image.CGImage;
CVPixelBufferRef buffer = NULL;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
// Pixel buffer options
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
// Create the pixel buffer
CVReturn result = CVPixelBufferCreate(NULL, width, height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &buffer);
if (result == kCVReturnSuccess && buffer) {
CVPixelBufferLockBaseAddress(buffer, 0);
void *bufferPointer = CVPixelBufferGetBaseAddress(buffer);
// Define the color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create the bitmap context to draw the image
CGContextRef context = CGBitmapContextCreate(bufferPointer, width, height, 8, 4 * width, colorSpace, kCGImageAlphaNoneSkipFirst);
CGColorSpaceRelease(colorSpace);
if (context) {
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
}
CVPixelBufferUnlockBaseAddress(buffer, 0);
}
return buffer;
}