Transferred from: http://blog.iosxcode4.com/archives/160
The framework used is:
Mediaplayer.framework,quartzcore.framework,corevideoframework,coremedia.framework,avfoundation.framework
The code is as follows:
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
Nsparameterassert (Status = = Kcvreturnsuccess && pxbuffer! = NULL);
Cvpixelbufferlockbaseaddress (pxbuffer, 0);
void *pxdata = Cvpixelbuffergetbaseaddress (Pxbuffer);
Nsparameterassert (Pxdata! = NULL);
Cgcolorspaceref rgbcolorspace = Cgcolorspacecreatedevicergb ();
Cgcontextref context = Cgbitmapcontextcreate (Pxdata, Size.width, Size.Height, 8, 4*size.width, Rgbcolorspace, Kcgimagealphapremultipliedfirst);
Nsparameterassert (context);
Cgcontextdrawimage (context, CGRectMake (0, 0, cgimagegetwidth (image), cgimagegetheight (image)), image);
Cgcolorspacerelease (Rgbcolorspace);
Cgcontextrelease (context);
Cvpixelbufferunlockbaseaddress (pxbuffer, 0);
return pxbuffer;
}
-(Ibaction) testcompressionsession
{
NSString *moviepath = [[NSBundle mainbundle] pathforresource:@ "Movie" oftype:@ "mov"];
Cgsize size = Cgsizemake (320,400);//define Video sizes
Nserror *error = nil;
Unlink ([betacompressiondirectory utf8string]);
--initialize compression engine
Avassetwriter *videowriter = [[Avassetwriter alloc] Initwithurl:[nsurl fileurlwithpath:betacompressiondirectory]
Filetype:avfiletypequicktimemovie
error:&error];
Nsparameterassert (Videowriter);
if (Error)
NSLog (@ "error =%@", [Error localizeddescription]);
Nsdictionary *videosettings = [Nsdictionary dictionarywithobjectsandkeys:avvideocodech264, AVVideoCodecKey,
[NSNumber NumberWithInt:size.width], Avvideowidthkey,
[NSNumber NumberWithInt:size.height], avvideoheightkey, nil];
Avassetwriterinput *writerinput = [Avassetwriterinput assetwriterinputwithmediatype:avmediatypevideo outputSettings : Videosettings];
Nsdictionary *sourcepixelbufferattributesdictionary = [Nsdictionary dictionarywithobjectsandkeys:[nsnumber NUMBERWITHINT:KCVPIXELFORMATTYPE_32ARGB], Kcvpixelbufferpixelformattypekey, nil];
Avassetwriterinputpixelbufferadaptor *adaptor = [Avassetwriterinputpixelbufferadaptor
Assetwriterinputpixelbufferadaptorwithassetwriterinput:writerinput sourcepixelbufferattributes: Sourcepixelbufferattributesdictionary];
Nsparameterassert (Writerinput);
Nsparameterassert ([Videowriter canaddinput:writerinput]);
if ([Videowriter canaddinput:writerinput])
NSLog (@ "");
Else
NSLog (@ "");
[Videowriter Addinput:writerinput];
[Videowriter startwriting];
[Videowriter Startsessionatsourcetime:kcmtimezero];
Synthesize multiple pictures as a video file
dispatch_queue_t dispatchqueue = dispatch_queue_create ("Mediainputqueue", NULL);
int __block frame = 0;
[Writerinput requestmediadatawhenreadyonqueue:dispatchqueue usingblock:^{
while ([Writerinput Isreadyformoremediadata])
{
if (++frame >= [Imagearr count]*10)
{
[Writerinput markasfinished];
[Videowriter finishwriting];
[Videowriter release];
Break
}
Cvpixelbufferref buffer = NULL;
int idx = FRAME/10;
Buffer = (cvpixelbufferref) [self Pixelbufferfromcgimage:[[imagearr objectatindex:idx] cgimage] size:size];
if (buffer)
{
if (![ Adaptor Appendpixelbuffer:buffer Withpresentationtime:cmtimemake (frame, 10)])
NSLog (@ "FAIL");
Else
Cfrelease (buffer);
}
}
}];
}