用到的FrameWork有:
MediaPlayer.framework,QuartzCore.framework,CoreVideoframework,CoreMedia.framework,AVFoundation.framework
代码如下:
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- (IBAction)testCompressionSession
{
NSString *moviePath = [[NSBundle mainBundle] pathForResource:@”Movie” ofType:@”mov”];
CGSize size = CGSizeMake(320,400);//定义视频的大小
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//—-initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@”error = %@”, [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@” “);
else
NSLog(@” “);
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多张图片为一个视频文件
dispatch_queue_t dispatchQueue = dispatch_queue_create(“mediaInputQueue”, NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= [imageArr count]*10)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = NULL;
int idx = frame/10;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[imageArr objectAtIndex:idx] CGImage] size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 10)])
NSLog(@”FAIL”);
else
CFRelease(buffer);
}
}
}];
}
用到的FrameWork有:
MediaPlayer.framework,QuartzCore.framework,CoreVideoframework,CoreMedia.framework,AVFoundation.framework
代码如下:
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- (IBAction)testCompressionSession
{
NSString *moviePath = [[NSBundle mainBundle] pathForResource:@”Movie” ofType:@”mov”];
CGSize size = CGSizeMake(320,400);//定义视频的大小
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//—-initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@”error = %@”, [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@” “);
else
NSLog(@” “);
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多张图片为一个视频文件
dispatch_queue_t dispatchQueue = dispatch_queue_create(“mediaInputQueue”, NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= [imageArr count]*10)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = NULL;
int idx = frame/10;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[imageArr objectAtIndex:idx] CGImage] size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 10)])
NSLog(@”FAIL”);
else
CFRelease(buffer);
}
}
}];
}
分享到:
相关推荐
一个可以将多张图片合成成一个Gif图片的工具,可以通过iTunes下的Document对外导出
多个视频合成的demo
NULL 博文链接:https://zhangmingwei.iteye.com/blog/1990661
iOS视频图片混合轮播,支持视频播放和图片预览
iOS(OC)实现通过多张图片生成一个完成的PDF文件,pdf单页宽高克自己调整
使用ios的sdk,进行视频合成,把两个视频整合到一起(前后),并且可以选择添加音频 ----libn
ios 通过单帧图片合成gif图片
ios 从相册选择多张照片 可以自己限定最大选择照片数量
iOS 音视频合成,通过传入一个视频通道,一个音乐通道,然后输出一个合成之后的文件,每行都有注释
1.uniapp写视频上传时,小程序端有多视频上传,但是Android和ios没有,当前插件就是解决iOS端多视频上传的问题。 2.如果需要Android端的相关插件可以搜索我的资源库里面有相关插件。 3.使用方法: let video=uni....
iOS视频和图片混合轮播 2种方案,开发者任选其一
ios无人直播-虚拟视频-刷脸 打开相机播放指定视频 视频内录搬运 iphone全局 deb文件完整版 直接安装即可文件内包含了视频使用说明 还有安装步骤 不明白也可以联系我 有言必回
七牛 ios sdk上传多张图片,一个block 返回所有上传成功的url
一个iOS应用,用于清除iOS照片库中所有照片和视频的定位信息,避免第三方应用擅自收集定位信息
ios多文件编程用法: 作用:保存应用的信息,软件名称、版本号等等,相当于身份证 Bundle name:程序名称,不能超过12个字节 Bundle versions string, short:APP版本号 Bundle identifier:APP项目唯一标识 Bundle ...
苹果虚拟摄像头|无人直播|虚拟相机deb|硬改手机摄像头|搬运去重|tiktok抖音搬运直播ios无人直播虚拟视频虚拟相机deb文件ios虚拟视频无人直播-appel6键deb最新版摄像头模块,不限时长,高清不卡顿!ios虚拟视频无人...
iOS图片生成PDF,支持单张或者多张,图片越大,清晰度就会越高
ios 从相册选择多张照片 可以自己限定最大选择照片数量
ios11真机支持文件,能让XCode在ios11上运行,ios11可用
iOS手机移动端开发,使用socket通讯实现文件以及多文件的上传和下载,可以实现多线程,与君共享。