iOS使用AVFoundation展示视频
程序员文章站
2023-11-30 08:05:40
本文实例为大家分享了ios使用avfoundation展示视频的具体代码,供大家参考,具体内容如下
//
// capter2viewcontroller.m...
本文实例为大家分享了ios使用avfoundation展示视频的具体代码,供大家参考,具体内容如下
// // capter2viewcontroller.m // iostest // // created by garin on 13-7-19. // copyright (c) 2013年 garin. all rights reserved. // #import "capter2viewcontroller.h" @interface capter2viewcontroller () @end @implementation capter2viewcontroller -(void) dealloc { [session release]; [super dealloc]; } - (id)initwithnibname:(nsstring *)nibnameornil bundle:(nsbundle *)nibbundleornil { self = [super initwithnibname:nibnameornil bundle:nibbundleornil]; if (self) { // custom initialization } return self; } - (void)viewdidload { [super viewdidload]; videopreviewview=[[uiview alloc] initwithframe:cgrectmake(10, 10, 320, 200)]; [self.view addsubview:videopreviewview]; [videopreviewview release]; // do any additional setup after loading the view. //在viewdidload调用下面的函数显示摄像信息 [self setupcapturesession]; // imgview=[[uiimageview alloc] initwithframe:cgrectmake(10, 230, 320, 100)]; // imgview.backgroundcolor=[uicolor graycolor]; // [self.view addsubview:imgview]; // [imgview release]; uibutton *cloesebtn=[uibutton buttonwithtype:uibuttontyperoundedrect]; cloesebtn.frame=cgrectmake(10, 220, 300, 50); [cloesebtn settitle:@"press" forstate:uicontrolstatenormal]; [cloesebtn addtarget:self action:@selector(closebtnclick:) forcontrolevents:uicontroleventtouchupinside]; [self.view addsubview:cloesebtn]; } -(void) closebtnclick:(id) sender { [session stoprunning]; } - (void)didreceivememorywarning { [super didreceivememorywarning]; // dispose of any resources that can be recreated. } - (void)setupcapturesession { nserror *error = nil; // create the session session = [[avcapturesession alloc] init]; // configure the session to produce lower resolution video frames, if your // processing algorithm can cope. we'll specify medium quality for the // chosen device. session.sessionpreset = avcapturesessionpresetlow; // find a suitable avcapturedevice avcapturedevice *device = [avcapturedevice defaultdevicewithmediatype:avmediatypevideo]; // create a device input with the device and add it to the session. avcapturedeviceinput *input = [avcapturedeviceinput deviceinputwithdevice:device error:&error]; if (!input) { // handling the error appropriately. } [session addinput:input]; // create a videodataoutput and add it to the session avcapturevideodataoutput *output = [[[avcapturevideodataoutput alloc] init] autorelease]; [session addoutput:output]; // configure your output. dispatch_queue_t queue = dispatch_queue_create("myqueue", null); [output setsamplebufferdelegate:self queue:queue]; dispatch_release(queue); // specify the pixel format output.videosettings = [nsdictionary dictionarywithobject: [nsnumber numberwithint:kcvpixelformattype_32bgra] forkey:(id)kcvpixelbufferpixelformattypekey]; // if you wish to cap the frame rate to a known value, such as 15 fps, set // minframeduration. //output.minframeduration = cmtimemake(1, 15); //avcaptureconnection *avcaptureconn=[[avcaptureconnection alloc] init]; //[avcaptureconn setvideominframeduration:cmtimemake(1, 15)]; // start the session running to start the flow of data [session startrunning]; avcapturevideopreviewlayer* previewlayer = [avcapturevideopreviewlayer layerwithsession: session]; previewlayer.frame = videopreviewview.bounds; //视频显示到的uiview previewlayer.videogravity = avlayervideogravityresizeaspectfill; // [previewlayer setorientation:avcapturevideoorientationlandscaperight]; // if(previewlayer.orientationsupported){ // previewlayer.orientation = morientation; // } [videopreviewview.layer addsublayer: previewlayer]; if(![session isrunning]){ [session startrunning]; } // assign session to an ivar. //[self setsession:session]; } //得到视频流 - (void)captureoutput:(avcaptureoutput *)captureoutput didoutputsamplebuffer:(cmsamplebufferref)samplebuffer fromconnection:(avcaptureconnection *)connection { // create a uiimage from the sample buffer data return; uiimage *image = [self imagefromsamplebuffer:samplebuffer]; //得到的视频流图片 imgview.image=image; } // create a uiimage from sample buffer data - (uiimage *) imagefromsamplebuffer:(cmsamplebufferref) samplebuffer { // get a cmsamplebuffer's core video image buffer for the media data cvimagebufferref imagebuffer = cmsamplebuffergetimagebuffer(samplebuffer); // lock the base address of the pixel buffer cvpixelbufferlockbaseaddress(imagebuffer, 0); // get the number of bytes per row for the pixel buffer void *baseaddress = cvpixelbuffergetbaseaddress(imagebuffer); // get the number of bytes per row for the pixel buffer size_t bytesperrow = cvpixelbuffergetbytesperrow(imagebuffer); // get the pixel buffer width and height size_t width = cvpixelbuffergetwidth(imagebuffer); size_t height = cvpixelbuffergetheight(imagebuffer); // create a device-dependent rgb color space cgcolorspaceref colorspace = cgcolorspacecreatedevicergb(); // create a bitmap graphics context with the sample buffer data cgcontextref context = cgbitmapcontextcreate(baseaddress, width, height, 8, bytesperrow, colorspace, kcgbitmapbyteorder32little | kcgimagealphapremultipliedfirst); // create a quartz image from the pixel data in the bitmap graphics context cgimageref quartzimage = cgbitmapcontextcreateimage(context); // unlock the pixel buffer cvpixelbufferunlockbaseaddress(imagebuffer,0); // free up the context and color space cgcontextrelease(context); cgcolorspacerelease(colorspace); // create an image object from the quartz image uiimage *image = [uiimage imagewithcgimage:quartzimage]; // release the quartz image cgimagerelease(quartzimage); return (image); } @end
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。