iOS 在线视频生成GIF图功能的方法
程序员文章站
2023-12-18 23:41:04
在一些视频app中,都可以看到一个将在线视频转成gif图的功能。下面就来说说思路以及实现。我们知道本地视频可以生成gif,那么将在线视频截取成本地视频不就可以了吗?经过比较...
在一些视频app中,都可以看到一个将在线视频转成gif图的功能。下面就来说说思路以及实现。我们知道本地视频可以生成gif,那么将在线视频截取成本地视频不就可以了吗?经过比较,腾讯视频app也是这么做的。话不多说,下面开始上代码:
第一步:截取视频
#pragma mark -截取视频 - (void)interceptvideoandvideourl:(nsurl *)videourl withoutpath:(nsstring *)outpath outputfiletype:(nsstring *)outputfiletype range:(nsrange)videorange intercept:(interceptblock)interceptblock { _interceptblock =interceptblock; //不添加背景音乐 nsurl *audiourl =nil; //avurlasset此类主要用于获取媒体信息,包括视频、声音等 avurlasset* audioasset = [[avurlasset alloc] initwithurl:audiourl options:nil]; avurlasset* videoasset = [[avurlasset alloc] initwithurl:videourl options:nil]; //创建avmutablecomposition对象来添加视频音频资源的avmutablecompositiontrack avmutablecomposition* mixcomposition = [avmutablecomposition composition]; //cmtimerangemake(start, duration),start起始时间,duration时长,都是cmtime类型 //cmtimemake(int64_t value, int32_t timescale),返回cmtime,value视频的一个总帧数,timescale是指每秒视频播放的帧数,视频播放速率,(value / timescale)才是视频实际的秒数时长,timescale一般情况下不改变,截取视频长度通过改变value的值 //cmtimemakewithseconds(float64 seconds, int32_t preferredtimescale),返回cmtime,seconds截取时长(单位秒),preferredtimescale每秒帧数 //开始位置starttime cmtime starttime = cmtimemakewithseconds(videorange.location, videoasset.duration.timescale); //截取长度videoduration cmtime videoduration = cmtimemakewithseconds(videorange.length, videoasset.duration.timescale); cmtimerange videotimerange = cmtimerangemake(starttime, videoduration); //视频采集compositionvideotrack avmutablecompositiontrack *compositionvideotrack = [mixcomposition addmutabletrackwithmediatype:avmediatypevideo preferredtrackid:kcmpersistenttrackid_invalid]; // 避免数组越界 trackswithmediatype 找不到对应的文件时候返回空数组 //timerange截取的范围长度 //oftrack来源 //attime插放在视频的时间位置 [compositionvideotrack inserttimerange:videotimerange oftrack:([videoasset trackswithmediatype:avmediatypevideo].count>0) ? [videoasset trackswithmediatype:avmediatypevideo].firstobject : nil attime:kcmtimezero error:nil]; //视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音) avmutablecompositiontrack *compositionvoicetrack = [mixcomposition addmutabletrackwithmediatype:avmediatypeaudio preferredtrackid:kcmpersistenttrackid_invalid]; [compositionvoicetrack inserttimerange:videotimerange oftrack:([videoasset trackswithmediatype:avmediatypeaudio].count>0)?[videoasset trackswithmediatype:avmediatypeaudio].firstobject:nil attime:kcmtimezero error:nil]; //声音长度截取范围==视频长度 cmtimerange audiotimerange = cmtimerangemake(kcmtimezero, videoduration); //音频采集compositioncommentarytrack avmutablecompositiontrack *compositionaudiotrack = [mixcomposition addmutabletrackwithmediatype:avmediatypeaudio preferredtrackid:kcmpersistenttrackid_invalid]; [compositionaudiotrack inserttimerange:audiotimerange oftrack:([audioasset trackswithmediatype:avmediatypeaudio].count > 0) ? [audioasset trackswithmediatype:avmediatypeaudio].firstobject : nil attime:kcmtimezero error:nil]; //avassetexportsession用于合并文件,导出合并后文件,presetname文件的输出类型 avassetexportsession *assetexportsession = [[avassetexportsession alloc] initwithasset:mixcomposition presetname:avassetexportpresetpassthrough]; //混合后的视频输出路径 nsurl *outputurl = [nsurl fileurlwithpath:outpath]; if ([[nsfilemanager defaultmanager] fileexistsatpath:outpath]) { [[nsfilemanager defaultmanager] removeitematpath:outpath error:nil]; } //输出视频格式 assetexportsession.outputfiletype = outputfiletype; assetexportsession.outputurl = outputurl; //输出文件是否网络优化 assetexportsession.shouldoptimizefornetworkuse = yes; [assetexportsession exportasynchronouslywithcompletionhandler:^{ dispatch_async(dispatch_get_main_queue(), ^{ switch (assetexportsession.status) { case avassetexportsessionstatusfailed: if (_interceptblock) { _interceptblock(assetexportsession.error,outputurl); } break; case avassetexportsessionstatuscancelled:{ logdebug(@"export status: cancell"); break; } case avassetexportsessionstatuscompleted: { if (_interceptblock) { _interceptblock(nil,outputurl); } break; } case avassetexportsessionstatusunknown: { logdebug(@"export status: unknown"); } case avassetexportsessionstatusexporting : { logdebug(@"export status: exporting"); } case avassetexportsessionstatuswaiting: { logdebug(@"export status: wating"); } } }); }]; }
第二步:本地视频生成gif图
/** 生成gif图片 @param videourl 视频的路径url @param loopcount 播放次数 @param time 每帧的时间间隔 默认0.25s @param imagepath 存放gif图片的文件路径 @param completeblock 完成的回调 */ #pragma mark--制作gif - (void)creategiffromurl:(nsurl*)videourl loopcount:(int)loopcount delaytime:(cgfloat )time gifimagepath:(nsstring *)imagepath complete:(completeblock)completeblock { _completeblock =completeblock; float delaytime = time?:0.25; // create properties dictionaries nsdictionary *fileproperties = [self filepropertieswithloopcount:loopcount]; nsdictionary *frameproperties = [self framepropertieswithdelaytime:delaytime]; avurlasset *asset = [avurlasset assetwithurl:videourl]; float videowidth = [[[asset trackswithmediatype:avmediatypevideo] objectatindex:0] naturalsize].width; float videoheight = [[[asset trackswithmediatype:avmediatypevideo] objectatindex:0] naturalsize].height; gifsize optimalsize = gifsizemedium; if (videowidth >= 1200 || videoheight >= 1200) optimalsize = gifsizeverylow; else if (videowidth >= 800 || videoheight >= 800) optimalsize = gifsizelow; else if (videowidth >= 400 || videoheight >= 400) optimalsize = gifsizemedium; else if (videowidth < 400|| videoheight < 400) optimalsize = gifsizehigh; // get the length of the video in seconds float videolength = (float)asset.duration.value/asset.duration.timescale; int framespersecond = 4; int framecount = videolength*framespersecond; // how far along the video track we want to move, in seconds. float increment = (float)videolength/framecount; // add frames to the buffer nsmutablearray *timepoints = [nsmutablearray array]; for (int currentframe = 0; currentframe<framecount; ++currentframe) { float seconds = (float)increment * currentframe; cmtime time = cmtimemakewithseconds(seconds, [timeinterval intvalue]); [timepoints addobject:[nsvalue valuewithcmtime:time]]; } //completion block nsurl *gifurl = [self creategiffortimepoints:timepoints fromurl:videourl fileproperties:fileproperties frameproperties:frameproperties gifimagepath:imagepath framecount:framecount gifsize:_gifsize?:gifsizemedium]; if (_completeblock) { // return gif url _completeblock(_error,gifurl); } }
经过上面两步,就可以生成本地的视频和gif图了,存储在沙盒即可。贴上两步所用到的方法:
#pragma mark - base methods - (nsurl *)creategiffortimepoints:(nsarray *)timepoints fromurl:(nsurl *)url fileproperties:(nsdictionary *)fileproperties frameproperties:(nsdictionary *)frameproperties gifimagepath:(nsstring *)imagepath framecount:(int)framecount gifsize:(gifsize)gifsize{ nsurl *fileurl = [nsurl fileurlwithpath:imagepath]; if (fileurl == nil) return nil; cgimagedestinationref destination = cgimagedestinationcreatewithurl((__bridge cfurlref)fileurl, kuttypegif , framecount, null); cgimagedestinationsetproperties(destination, (cfdictionaryref)fileproperties); avurlasset *asset = [avurlasset urlassetwithurl:url options:nil]; avassetimagegenerator *generator = [avassetimagegenerator assetimagegeneratorwithasset:asset]; generator.appliespreferredtracktransform = yes; cmtime tol = cmtimemakewithseconds([tolerance floatvalue], [timeinterval intvalue]); generator.requestedtimetolerancebefore = tol; generator.requestedtimetoleranceafter = tol; nserror *error = nil; cgimageref previousimagerefcopy = nil; for (nsvalue *time in timepoints) { cgimageref imageref; #if target_os_iphone || target_iphone_simulator imageref = (float)gifsize/10 != 1 ? createimagewithscale([generator copycgimageattime:[time cmtimevalue] actualtime:nil error:&error], (float)gifsize/10) : [generator copycgimageattime:[time cmtimevalue] actualtime:nil error:&error]; #elif target_os_mac imageref = [generator copycgimageattime:[time cmtimevalue] actualtime:nil error:&error]; #endif if (error) { _error =error; logdebug(@"error copying image: %@", error); return nil; } if (imageref) { cgimagerelease(previousimagerefcopy); previousimagerefcopy = cgimagecreatecopy(imageref); } else if (previousimagerefcopy) { imageref = cgimagecreatecopy(previousimagerefcopy); } else { _error =[nserror errorwithdomain:nsstringfromclass([self class]) code:0 userinfo:@{nslocalizeddescriptionkey:@"error copying image and no previous frames to duplicate"}]; logdebug(@"error copying image and no previous frames to duplicate"); return nil; } cgimagedestinationaddimage(destination, imageref, (cfdictionaryref)frameproperties); cgimagerelease(imageref); } cgimagerelease(previousimagerefcopy); // finalize the gif if (!cgimagedestinationfinalize(destination)) { _error =error; logdebug(@"failed to finalize gif destination: %@", error); if (destination != nil) { cfrelease(destination); } return nil; } cfrelease(destination); return fileurl; } #pragma mark - helpers cgimageref createimagewithscale(cgimageref imageref, float scale) { #if target_os_iphone || target_iphone_simulator cgsize newsize = cgsizemake(cgimagegetwidth(imageref)*scale, cgimagegetheight(imageref)*scale); cgrect newrect = cgrectintegral(cgrectmake(0, 0, newsize.width, newsize.height)); uigraphicsbeginimagecontextwithoptions(newsize, no, 0); cgcontextref context = uigraphicsgetcurrentcontext(); if (!context) { return nil; } // set the quality level to use when rescaling cgcontextsetinterpolationquality(context, kcginterpolationhigh); cgaffinetransform flipvertical = cgaffinetransformmake(1, 0, 0, -1, 0, newsize.height); cgcontextconcatctm(context, flipvertical); // draw into the context; this scales the image cgcontextdrawimage(context, newrect, imageref); //release old image cfrelease(imageref); // get the resized image from the context and a uiimage imageref = cgbitmapcontextcreateimage(context); uigraphicsendimagecontext(); #endif return imageref; } #pragma mark - properties - (nsdictionary *)filepropertieswithloopcount:(int)loopcount { return @{(nsstring *)kcgimagepropertygifdictionary: @{(nsstring *)kcgimagepropertygifloopcount: @(loopcount)} }; } - (nsdictionary *)framepropertieswithdelaytime:(float)delaytime { return @{(nsstring *)kcgimagepropertygifdictionary: @{(nsstring *)kcgimagepropertygifdelaytime: @(delaytime)}, (nsstring *)kcgimagepropertycolormodel:(nsstring *)kcgimagepropertycolormodelrgb }; }
最后,截取的本地视频可用avplayer播放,生成的gif图则用uiwebview或者wkwebview又或者 yyimage 加载即可。
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。