//
// ViewController.m
// ScreenRecordingSelfTut
//
// Created by Taranjit Singh on 16/08/13.
// Copyright (c) 2013 Taranjit Singh. All rights reserved.
//
#import "ViewController.h"
#import <QuartzCore/QuartzCore.h>
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreGraphics/CoreGraphics.h>
#import <MediaPlayer/MediaPlayer.h>
#import <CoreMedia/CMTime.h>
#import <CoreVideo/CVPixelBuffer.h>
#import <AVFoundation/AVAssetWriter.h>
@interface ViewController ()
{
NSFileManager *fileManager;
NSString *documentsDirectory;
NSMutableArray* arrayOfImages;
NSMutableArray* arrayOfSounds;
NSMutableArray* arrayOfTime;
AVAssetWriter *assetWriter;
AVAssetWriterInput* assetWriterInput;
AVAssetWriterInputPixelBufferAdaptor* assetWriterPixelBufferAdaptor;
NSTimer* assetWriterTimer;
CFAbsoluteTime firstFrameWallClockTime;
}
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self addGuesturesToImgView];
fileManager = [NSFileManager defaultManager];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); //create an array and store result of our search for the documents directory in it
documentsDirectory = [paths objectAtIndex:0]; //create NSString object, that holds our exact path to the documents directory
arrayOfImages = [[NSMutableArray alloc] init];
arrayOfSounds = [[NSMutableArray alloc] initWithObjects:@"Dog",@"Fart",@"fly",nil];
arrayOfTime = [[NSMutableArray alloc] initWithObjects:@"1",@"5",@"10", nil];
}
-(void)addGuesturesToImgView
{
UITapGestureRecognizer* tapGuest= [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapPressed:)];
tapGuest.numberOfTapsRequired=1;
[self.imgView addGestureRecognizer:tapGuest];
UIPanGestureRecognizer* panGuest = [[UIPanGestureRecognizer alloc]initWithTarget:self action:@selector(tapPressed:)];
panGuest.maximumNumberOfTouches=1;
[self.imgView addGestureRecognizer:panGuest];
}
-(void)tapPressed:(UITapGestureRecognizer*)guesture
{
CGPoint touchPoint = [guesture locationInView:self.view];
self.imgView.center= touchPoint;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)startVideoRecording:(UIButton *)sender {
if([sender.titleLabel.text isEqualToString:@"Start"]){
[sender setTitle:@"Stop" forState:UIControlStateNormal];
self.timer = [NSTimer scheduledTimerWithTimeInterval:0.03 target:self selector:@selector(captureScreenImages) userInfo:nil repeats:YES];
}
else if([sender.titleLabel.text isEqualToString:@"Stop"]){
[sender setTitle:@"Start" forState:UIControlStateNormal];
[self.timer invalidate];
[self writeImagesAsMovie];
}
}
-(void)captureScreenImages{
static int counter =0;
UIGraphicsBeginImageContext(self.view.bounds.size);
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *screenshotImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// UIImageWriteToSavedPhotosAlbum(screenshotImage, nil, nil, nil);
NSData* imageData = UIImagePNGRepresentation(screenshotImage);
NSString *fullPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.png", counter]]; //add our image to the path
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil]; //finally save the path (image)
[arrayOfImages addObject:[NSString stringWithFormat:@"%d.png",counter]];
counter++;
}
/*
-(void)createVideoFromImages{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:documentsDirectory] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
}
*/
- (void) writeImagesAsMovie{
//NSLog(path);
NSString *filename = [documentsDirectory stringByAppendingPathComponent:[arrayOfImages objectAtIndex:0]];
UIImage *first = [UIImage imageWithContentsOfFile:filename];
CGSize frameSize = first.size;
NSString* videoFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectory,@"Export.mov"];
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoFilePath] fileType:AVFileTypeQuickTimeMovie error:&error];
if(error) {
NSLog(@"error creating AssetWriter: %@",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
BOOL start = [videoWriter startWriting];
NSLog(@"Session started? %d", start);
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(@"failed to append buffer");
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
// int reverseSort = NO;
// NSArray *newArray = [arrayOfImages sortedArrayUsingFunction:sort context:&reverseSort];
// int delta = 1.0/[arrayOfImages count];
int fps = 30;//(int)fpsSlider.value;
int i = 0;
for (NSString *filename in arrayOfImages)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
i++;
NSLog(@"inside for loop %d %@ ",i, filename);
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime=CMTimeMake(i, fps);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:filename];
UIImage *imgFrame = [UIImage imageWithContentsOfFile:filePath] ;
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) //failes on 3GS, but works on iphone 4
{
NSLog(@"failed to append buffer");
NSLog(@"The error is %@", [videoWriter error]);
}
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
NSLog(@"error");
i--;
}
[NSThread sleepForTimeInterval:0.02];
}
//Finish the session:
[writerInput markAsFinished];
CMTime cmTime = CMTimeMake(10, 1);
[videoWriter endSessionAtSourceTime:cmTime];
float version = [[[UIDevice currentDevice] systemVersion] floatValue];
if (version < 6.0)
{
[videoWriter finishWriting];
//NSLog (@"finished writing iOS version:%f",version);
} else {
// [videoWriter finishWritingWithCompletionHandler:^(){
// //NSLog (@"finished writing iOS version:%f",version);
// }];
// [videoWriter finishWritingWithCompletionHandler:^{
// NSLog(@"%@",videoWriter);
// NSLog(@"Write Ended");
// }];
// [videoWriter finishWritingWithCompletionHandler:^{
// NSLog(@"Done Writing successfully");
// }];
[videoWriter finishWritingWithCompletionHandler:^{
// NSLog(@"%@",videoWriter);
// NSLog(@"Write Ended");
}];
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
// CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
// CGAffineTransform flipVertical = CGAffineTransformMake(
// 1, 0, 0, -1, 0, CGImageGetHeight(image)
// );
// CGContextConcatCTM(context, flipVertical);
//
// CGAffineTransform flipHorizontal = CGAffineTransformMake(
// -1.0, 0.0, 0.0, 1.0, CGImageGetWidth(image), 0.0
// );
//
// CGContextConcatCTM(context, flipHorizontal);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
/*
- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef) image
{
CGSize frameSize = CGSizeMake(320, 480);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
*/
#pragma mark - Audio Processing
- (IBAction)callAudioProcessing:(UIButton *)sender {
NSString* videoFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectory,@"Export.mov"];
NSURL* newVideoUrlPath = [NSURL fileURLWithPath:videoFilePath];
[self processVideo:newVideoUrlPath];
}
-(void) processVideo: (NSURL*) path
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSError * error = nil;
for (int i=0;i<[arrayOfSounds count];i++)
{
NSString *pathString = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@", [arrayOfSounds objectAtIndex:i]] ofType:@".mp3"];
NSLog(@"pathString = %@",pathString);
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
int startDur= [[arrayOfTime objectAtIndex:i] integerValue];
NSLog(@"startDur = %d",startDur);
CMTime audioStartTime = CMTimeMake(startDur*100,100);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
}
NSString* movPath = [NSString stringWithFormat:@"%@/%@",documentsDirectory,@"Export.mov"];
AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:movPath] options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSError* vidError;
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count >0)? [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]:nil atTime:kCMTimeZero error:&vidError];
NSLog(@"Vid error = %@",vidError);
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSString* videoName = @"exportFinal.mov";
NSString *exportPath = [NSString stringWithFormat:@"%@/%@",documentsDirectory,videoName];
NSLog(@"exportPath = %@",exportPath);
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = @"com.apple.quicktime-movie";
NSLog(@"file type %@",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(@"Export Complete");
//[self uploadToYouTube];
[self cleanUpProcess];
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}
- (void)cleanUpProcess{
NSError* error;
NSArray *files = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectory error:&error];
for(NSString* myFiles in files){
if([[myFiles pathExtension] isEqualToString:@"mov"]){
continue;
}
NSString* filePath = [NSString stringWithFormat:@"%@/%@",documentsDirectory,myFiles];
if(![[NSFileManager defaultManager] removeItemAtPath:filePath error:&error]){
NSLog(@"Error Deleting");
}
}
}
@end