`
chriszeng87
  • 浏览: 738532 次
  • 性别: Icon_minigender_1
  • 来自: 北京
社区版块
存档分类
最新评论

录制视频(帧数版本) IOS

    博客分类:
  • iOS
iOS 
阅读更多

//
//  CameraHelp.h
//  
//
//  Created by Zhuang Chuan Xian. on 11-6-28.
//  Copyright 2011  . All rights reserved.
//
#import <UIKit/UIKit.h>

#import <AVFoundation/AVFoundation.h>
#undef PRODUCER_HAS_VIDEO_CAPTURE
#define PRODUCER_HAS_VIDEO_CAPTURE (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000 && TARGET_OS_EMBEDDED)
@protocol CameraHelpDelegate
-(void) getSampleBufferImage:(UIImage *) v_image ;
@end

@interface CameraHelp : NSObject
#if PRODUCER_HAS_VIDEO_CAPTURE
<AVCaptureVideoDataOutputSampleBufferDelegate>
#endif
{
@private
    int mWidth;
    int mHeight;
    int mFps;
    BOOL mFrontCamera;
    BOOL mFirstFrame;
    BOOL mStarted;
    UIView* mPreview;
    id<CameraHelpDelegate> outDelegate;
#if PRODUCER_HAS_VIDEO_CAPTURE
    AVCaptureSession* mCaptureSession;
    AVCaptureDevice *mCaptureDevice;
#endif
}
//单例模式
+ (CameraHelp*)shareCameraHelp;
+ (void)closeCamera;
//设置前置摄像头
- (BOOL)setFrontCamera;
//设置后置摄像头
- (BOOL)setBackCamera;
//开始前设置捕获参数
- (void)prepareVideoCapture:(int) width andHeight: (int)height andFps: (int) fps andFrontCamera:(BOOL) bfront andPreview:(UIView*) view;
//开始捕获
- (void)startVideoCapture;
//停止捕获
- (void)stopVideoCapture;
//设置要显示到得View
- (void)setPreview: (UIView*)preview;
//设置数据输出
- (void)setVideoDataOutputBuffer:(id<CameraHelpDelegate>)delegate;
@end
-------------------------------------------------------------------------
//
//  CameraHelp.m
// 
//
//  Created by zcx. on 11-6-28.
//  Copyright 2011  . All rights reserved.
//

#import "CameraHelp.h"
//
//    Private
//
@interface CameraHelp (Private)

#if PRODUCER_HAS_VIDEO_CAPTURE
+(AVCaptureDevice *)cameraAtPosition:(AVCaptureDevicePosition)position;
- (void)startPreview;
- (void)stopPreview;
#endif

@end

@implementation CameraHelp (Private)

#if PRODUCER_HAS_VIDEO_CAPTURE
+ (AVCaptureDevice *)cameraAtPosition:(AVCaptureDevicePosition)position{
    NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in cameras){
        if (device.position == position){
            return device;
        }
    }
    return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}

- (void)startPreview{
    if(mCaptureSession && mPreview && mStarted){
        AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: mCaptureSession];
        previewLayer.frame = mPreview.bounds;
        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//        if(previewLayer.orientationSupported){
//            previewLayer.orientation = mOrientation;
//        }
        [mPreview.layer addSublayer: previewLayer];
       
        if(![mCaptureSession isRunning]){
            [mCaptureSession startRunning];
        }
    }
}

- (void)stopPreview{
    if(mCaptureSession){       
        if([mCaptureSession isRunning]){
            [mCaptureSession stopRunning];
           
            // remove all sublayers
            if(mPreview){
                for(CALayer *ly in mPreview.layer.sublayers){
                    if([ly isKindOfClass: [AVCaptureVideoPreviewLayer class]])
                    {
                        [ly removeFromSuperlayer];
                        break;
                    }
                }
            }
        }
    }
}
#endif
@end

@implementation CameraHelp
static CameraHelp* g_camera = 0;
- (id)init
{
    if(g_camera)
        return g_camera;
    else
    {
        if(self = [super init])
        {
            self->mWidth = 30;
            self->mHeight = 30;

            self->mFps = 60;
            self->mFrontCamera = NO;
            self->mStarted = NO;
            g_camera = self;
            outDelegate = nil;
        }
        return g_camera;
    }
}
-(void)dealloc
{
#if PRODUCER_HAS_VIDEO_CAPTURE
    [mCaptureSession release];
    [mCaptureDevice release];
    [mPreview release];
#endif
    [super dealloc];
}
+ (CameraHelp*)shareCameraHelp
{
    if(!g_camera)
        g_camera = [[CameraHelp alloc] init];
    return g_camera;
}
+ (void)closeCamera
{
    if(g_camera)
    {
        [g_camera dealloc];
        g_camera = nil;
    }
}
- (void)prepareVideoCapture:(int) width andHeight: (int)height andFps: (int) fps andFrontCamera:(BOOL) bfront andPreview:(UIView*) view
{
    self->mWidth = width;
    self->mHeight = height;
    self->mFps = fps;
    self->mFrontCamera = bfront;
    if(view)
        self->mPreview = [view retain];
#if PRODUCER_HAS_VIDEO_CAPTURE   
    if([mCaptureSession isRunning])
    {
        [self stopVideoCapture];
        [self startVideoCapture];
    }
#endif
}
- (void)startVideoCapture
{
#if PRODUCER_HAS_VIDEO_CAPTURE   
    //防锁
    [[UIApplication sharedApplication] setIdleTimerDisabled:YES];
    //打开摄像设备,并开始捕抓图像
    //[labelState setText:@"Starting Video stream"];
    if(mCaptureDevice || mCaptureSession)
    {
        NSLog(@"Already capturing");
        return;
    }
   
    if((mCaptureDevice = [CameraHelp cameraAtPosition:mFrontCamera? AVCaptureDevicePositionFront:AVCaptureDevicePositionBack]) == nil)
    {
        NSLog(@"Failed to get valide capture device");
        return;
    }
   
    NSError *error = nil;
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:mCaptureDevice error:&error];
    if (!videoInput)
    {
        NSLog(@"Failed to get video input");
        mCaptureDevice = nil;
        return;
    }
   
    mCaptureSession = [[AVCaptureSession alloc] init];

    mCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;

    [mCaptureSession addInput:videoInput];
   
    // Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Recommended pixel format choices are
    // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange or kCVPixelFormatType_32BGRA.
    // On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
    //
    AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
   
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* settings = [NSDictionary dictionaryWithObject:value forKey:key];
//    NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:
//                              //[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], //kCVPixelBufferPixelFormatTypeKey,
//                              [NSNumber numberWithInt: mWidth], (id)kCVPixelBufferWidthKey,
//                              [NSNumber numberWithInt: mHeight], (id)kCVPixelBufferHeightKey,
//                              nil];

    avCaptureVideoDataOutput.videoSettings = settings;
    //[settings release];
//    avCaptureVideoDataOutput.minFrameDuration = CMTimeMake(1, 1.0f/30);
    avCaptureVideoDataOutput.alwaysDiscardsLateVideoFrames = YES;
   
   
   
    dispatch_queue_t queue = dispatch_queue_create("com.gh.cecall", NULL);
    [avCaptureVideoDataOutput setSampleBufferDelegate:self queue:queue];
    [mCaptureSession addOutput:avCaptureVideoDataOutput];
    [settings release];
    [avCaptureVideoDataOutput release];
    dispatch_release(queue);
    mFirstFrame = YES;
    mStarted = YES;
   
    //start preview
    [self startPreview];
   
#endif
}
- (void)stopVideoCapture
{
#if PRODUCER_HAS_VIDEO_CAPTURE   
    if(mCaptureSession){
        [mCaptureSession stopRunning];
        [mCaptureSession release], mCaptureSession = nil;
        NSLog(@"Video capture stopped");
    }
    [mCaptureDevice release], mCaptureDevice = nil;
   
    if(mPreview){
        for (UIView *view in mPreview.subviews) {
            [view removeFromSuperview];
        }
    }
#endif
}



- (BOOL)setFrontCamera
{
    if(mFrontCamera)
        return YES;
    [self stopVideoCapture];
    mFrontCamera = YES;
    [self startVideoCapture];
    return YES;
}

- (BOOL)setBackCamera{
    if(!mFrontCamera)
        return YES;
    [self stopVideoCapture];
    mFrontCamera = NO;
    [self startVideoCapture];
    return YES;
}

- (void) setPreview: (UIView*)preview{
#if PRODUCER_HAS_VIDEO_CAPTURE   
    if(preview == nil){
        // stop preview
        [self stopPreview];
        // remove layers
        if(mPreview){
            for(CALayer *ly in mPreview.layer.sublayers){
                if([ly isKindOfClass: [AVCaptureVideoPreviewLayer class]]){
                    [ly removeFromSuperlayer];
                    break;
                }
            }
            [mPreview release], mPreview = nil;
        }
    }
    else {
        //start preview
        if (mPreview) {
            [mPreview release];
            mPreview = nil;
        }
        if((mPreview = [preview retain])){
            [self startPreview];
        }
    }
   
#endif
}
- (void)setVideoDataOutputBuffer:(id<CameraHelpDelegate>)delegate
{
    outDelegate = delegate;
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
#if PRODUCER_HAS_VIDEO_CAPTURE   
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
 
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
    if (outDelegate) {
        [outDelegate getSampleBufferImage:image];
    }
#if 0
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
   
    //捕捉数据输出 要怎么处理虽你便
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
   
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    if(CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
    {
//        void *bufferPtr = CVPixelBufferGetBaseAddress(imageBuffer);
        UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
        size_t buffeSize = CVPixelBufferGetDataSize(imageBuffer);
        NSLog(@"%ld",buffeSize);
        if(self->mFirstFrame)
        {
            //第一次数据要求:宽高,类型
            size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            NSNumber *numberRow = [NSNumber numberWithInteger:bytesPerRow];
            NSNumber *numberWidth = [NSNumber numberWithInteger:width];
            NSNumber *numberHeight = [NSNumber numberWithInteger:height];
           
            NSArray *array = [NSArray arrayWithObjects:numberRow,numberWidth,numberHeight, nil];
           
            if (outDelegate) {
                [outDelegate getVideoSizeInfo:array];
            }
            int pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
            switch (pixelFormat) {
                case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
                    //engine->srcFormat = VideoFormat_NV12;//PIX_FMT_NV12;
                    NSLog(@"Capture pixel format=NV12");
                    break;
                case kCVPixelFormatType_422YpCbCr8:
                    //engine->srcFormat = VideoFormat_UYVY;//PIX_FMT_UYVY422;
                    NSLog(@"Capture pixel format=UYUY422");
                    break;
                default:
                    //engine->srcFormat = VideoFormat_BGR32;//PIX_FMT_RGB32;
                    NSLog(@"Capture pixel format=RGB32");
            }
            mFirstFrame = NO;
        }
        //send data
        //engine->SendVideoFrame((unsigned char*)bufferPtr,buffeSize);
        if(outDelegate){
            [outDelegate videoDataOutputBuffer:(char*)bufferPtr dataSize:buffeSize];
        }
       
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    [pool release];
#endif
}

// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
   
    // Get the number of bytes per row for the pixel buffer
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
   
    // Get the number of bytes per row for the pixel buffer
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // Get the pixel buffer width and height
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
   
    // Create a device-dependent RGB color space
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
   
    // Create a bitmap graphics context with the sample buffer data
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    // Create a Quartz image from the pixel data in the bitmap graphics context
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
   
    // Free up the context and color space
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
   
    // Create an image object from the Quartz image
    UIImage *image = [UIImage imageWithCGImage:quartzImage];
   
    // Release the Quartz image
    CGImageRelease(quartzImage);
   
    return (image);
}

#endif
@end
-----------------------------------将图片保存为视频-------------------------------
- (void) saveVideo {
    NSString *strSpeed = nil;
    NSString *strAgle = nil;
    if (m_saveMutableDict) {
        strSpeed = [m_saveMutableDict objectForKey:SWING_SPEED];
        strAgle = [m_saveMutableDict objectForKey:SWING_ANGLE];
    }
   
    //定义视频的大小
    CGSize size ;
#if isPad
    size = CGSizeMake(480,640); // 960*640
#else
    size = CGSizeMake(480,640);
#endif
   
    NSError *error = nil;

    NSString *filePath = [[Utilities getSanBoxPath] stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",self.m_strUUID]];
   
    unlink([filePath UTF8String]);
   
    //—-initialize compression engine
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:filePath]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);
    if(error)
        NSLog(@"error = %@", [error localizedDescription]);
   
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
   
    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
   
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
   
    if ([videoWriter canAddInput:writerInput])
        NSLog(@"  ");
    else
        NSLog(@"  ");
   
    [videoWriter addInput:writerInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
   
    //合成多张图片为一个视频文件
    dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
    int __block frame = 0;
   
    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while ([writerInput isReadyForMoreMediaData])
            {
            if(++frame >= [m_mutableArrayDatas count])
                {
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                [videoWriter release];
                dispatch_release(dispatchQueue);
                [NSThread detachNewThreadSelector:@selector(saveOneImageAndPlist) toTarget:self withObject:nil];
                break;
                }
            CVPixelBufferRef buffer = NULL;
           
            int idx = frame;
            UIImage *imageOld = [m_mutableArrayDatas objectAtIndex:idx];
            // 给外部传递百分比
            if (m_delegate && [m_delegate respondsToSelector:@selector(saveVideoWithProgress:)]) {
                [m_delegate saveVideoWithProgress:(1.0f*frame/[m_mutableArrayDatas count])];
            }
            // 图片 cpmvert buffer
            buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[imageOld CGImage] size:size andSpeed:strSpeed andAngle:strAgle];
            if (buffer)
                {
                //                RECORD_VIDEO_FPS
                if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, m_floatFPS)]) {
                    dispatch_release(dispatchQueue);
                    [self restoreDefault];
                    // 出错的情况吓会执行这些。
                    // 此处应该恢复刚进来的状况
                    NSLog(@"视频录制出错了");
                }else
                    CFRelease(buffer);
                }
            }
    }];
}


- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size andSpeed:(NSString *)v_speed andAngle:(NSString*)v_angle
{
    //Impact Speed : = %f , Club Angle
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
   
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
   
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);
   
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);
    CGContextSaveGState(context);
   
    // 旋转
    CGContextRotateCTM(context, -M_PI_2);
    CGContextTranslateCTM(context, -size.height, 0);
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),CGImageGetHeight(image)), image);
    CGContextRestoreGState(context);
    // 添加logo
    UIImage *imageLogo = [UIImage imageNamed:@"Watermark.png"];
    CGRect rectLogo ;
    //  1280 720
#if isPad
    rectLogo = CGRectMake(size.width-imageLogo.size.width-20.0f, size.height-imageLogo.size.height-170.0f, imageLogo.size.width, imageLogo.size.height);
#else
    rectLogo = CGRectMake(size.width-imageLogo.size.width-50.0f, size.height-imageLogo.size.height-25.0f, imageLogo.size.width, imageLogo.size.height);
#endif
    CGContextDrawImage(context, rectLogo, imageLogo.CGImage);
    // 球杆挥动的时候才显示数据
    if (m_saveMutableDict) {       
#if isPad   
        MyDrawText(context , CGPointMake(20.0f, size.height-imageLogo.size.height-150.0f),v_speed);
        MyDrawText(context , CGPointMake(20.0f, size.height-imageLogo.size.height-180.0f),v_angle);
#else
        MyDrawText(context , CGPointMake(70.0f, size.height-30.0f),v_speed);
        MyDrawText(context , CGPointMake(70.0f, size.height-53.0f),v_angle);
#endif  
    }
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
   
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
   
    return pxbuffer;
}

void MyDrawText (CGContextRef myContext, CGPoint point, NSString *v_strContext) {
#if isPad
    CGContextSelectFont (myContext,
                         "Impact",
                         20.0f,
                         kCGEncodingMacRoman);
#else
    CGContextSelectFont (myContext,
                         "Impact",
                         20.0f,
                         kCGEncodingMacRoman);
#endif
    //    CGContextTranslateCTM(myContext, 0, 768);
    //    CGContextScaleCTM(myContext, 1, -1);
    CGContextSetCharacterSpacing (myContext, 1);
    CGContextSetTextDrawingMode (myContext, kCGTextFillStroke);
    CGContextSetLineWidth(myContext, 1.0f);
    CGContextSetFillColorWithColor(myContext, [UIColor colorWithRed:251.0f/255.0f green:237.0f/255.0f blue:75.0f/255.0f alpha:1.0f].CGColor);
    CGContextSetStrokeColorWithColor(myContext, [UIColor blackColor].CGColor) ;
    CGContextShowTextAtPoint (myContext, point.x, point.y, v_strContext.UTF8String, strlen(v_strContext.UTF8String)); // 10
    //    [v_strContext drawAtPoint:CGPointMake(100  , 100) withFont:[UIFont fontWithName:@"Helvetica" size:20]];
}

 

转自:http://blog.sina.com.cn/s/blog_64ea868501018a7x.html

分享到:
评论

相关推荐

    iOS 录制视频流 转uiimage

    在iOS开发中,录制视频流并将其转换为UIImage是一项常见的需求,这通常涉及到多媒体处理、图形编程以及实时渲染等技术。下面将详细讲解这个过程涉及的知识点。 首先,我们需要了解如何在iOS上进行实时视频录制。...

    ios-录制视频.zip

    在iOS开发中,录制视频并将其保存到应用的沙盒目录是一项常见的任务,尤其是在构建具有媒体功能的应用时。本示例“ios-录制视频.zip”提供了一个演示如何实现这一功能的代码实例,尤其是结合了动画和文字渲染。下面...

    ios应用源码之获取本地视频库和获取摄像头视频流 videoupload 20181210

    首先添加`AVCaptureDeviceInput`代表摄像头输入,然后添加`AVCaptureVideoDataOutput`或`AVCaptureMovieFileOutput`来接收或录制视频数据。 5. **设置AVCaptureDevice**: 需要获取摄像头设备,通常使用`...

    Opencv For IOS 配置视频

    在iOS中,你可以使用AVFoundation框架来录制视频。创建一个AVCaptureSession实例,添加视频输入和输出设备,然后开始录制。确保设置正确的捕获设备、分辨率和帧率。例如: ```swift let captureSession = ...

    ios视频拍摄&转码mp4

    添加AVCaptureVideoDataOutput到会话中,可以捕获视频帧数据。设置合适的 AVCaptureDeviceInput 作为输入源,比如前摄像头或后摄像头,然后开始会话,即可开始拍摄。 视频拍摄过程中,我们还可以自定义拍摄参数。...

    ios-视频水印.zip

    通过分析和学习这个文件,你可以更好地理解和掌握如何在iOS应用中实现实时录制视频并添加水印的功能。 总之,AVFoundation框架为开发者提供了强大的视频处理能力,包括添加水印。通过深入研究“ios-视频水印.zip”...

    ios屏幕录制

    2. **录制视频初始化**:使用AVFoundation框架中的AVAssetWriter和AVAssetWriterInput来准备录制视频。创建一个AVAssetWriter实例,设置其输出格式为MP4,因为这是一个常见的、跨平台的视频格式。同时,创建一个...

    iOS屏幕录制DEMO

    AVFoundation框架也提供了AVCaptureAudioInput和AVAssetWriterInput的对应音频版本,可以同时捕获设备的音频,将其与视频同步。 6. **权限管理**: iOS系统对屏幕录制有严格的权限控制,开发者需要在Info.plist中...

    ios视频通话demo

    - **渲染**:解码后的视频帧将被渲染到屏幕上的某个UIView或CALayer,而音频则通过AVAudioPlayer或AVAudioUnit进行播放。 5. **用户界面与交互**: - **UI设计**:创建具有呼叫按钮、挂断按钮、摄像头切换、静音...

    iOS视频引导页demo

    视频可以是静态图片序列帧合成的,也可以是实际录制的动态内容。 2. **AVFoundation框架**:苹果的AVFoundation框架提供了处理多媒体内容的能力,包括播放视频。在这个demo中,我们主要会用到`AVPlayer`和`...

    ios-仿抖音单击拍摄.zip

    - `AVCaptureVideoDataOutput`用于接收来自摄像头的原始视频帧数据,可以设置其sample buffer delegate,以便在每一帧被捕捉时进行处理。 3. **捕获视频帧** - 在`AVCaptureVideoDataOutputSampleBufferDelegate`...

    iOS音视频相机组件

    开发者可以通过设置AVCaptureDeviceInput来添加视频输入源,并通过AVCaptureVideoDataOutput或AVCaptureMetadataOutput来接收视频帧或元数据。 视频编码是指将原始视频数据转换成可存储或传输的格式。在iOS中,通常...

    IOS 下实现直播视频

    在iOS平台上实现直播视频涉及到多个技术领域,包括多媒体处理、网络传输、硬件访问以及软件架构设计。本项目"LiveStreamer-iOS-master"旨在提供一套完整的解决方案,帮助开发者将手机摄像头捕获的视频实时推送到...

    mp4v2 for iOS

    H.264编码器将视频帧转换为NAL单元,AAC编码器将音频样本编码为AAC帧。 2. **封装码流**:MP4v2库提供了函数来创建一个新的MP4文件,并添加音视频轨。对于视频,每个NAL单元被封装为一个媒体样本;对于音频,每个...

    ios - 视频合并

    在iOS开发中,视频合并是一项常见的任务,尤其在制作应用如短视频编辑、视频分享或多媒体交互等场景下。本文将详细讲解如何利用苹果的AVFoundation框架来实现视频合并的功能。 首先,我们需要理解AVFoundation框架...

    DMGetVideoDemo

    我们需要配置一个`AVCaptureVideoDataOutput`对象来捕获视频帧,并设置其代理以接收这些帧。通过`AVCaptureVideoPreviewLayer`,可以在屏幕上预览相机的实时画面。当用户点击拍摄按钮时,可以使用`AVAssetWriter`将...

    iOS 摄像头捕获视频流

    本篇文章将深入探讨如何在iOS中实现这一技术,包括视频帧的捕获、转化以及连续处理。 首先,我们需要引入`AVFoundation`框架,它是iOS中用于多媒体处理的核心框架。`AVCaptureSession`类是捕获流程的中心,它负责...

    iOS-多媒体-录制特效影片保存到相册-29GPUImage8-record

    AVCaptureVideoDataOutput用于捕获视频帧,而AVAssetWriter则负责将这些帧写入到一个文件中。 3. 实时特效:GPUImage框架的优势在于其高效的GPU处理能力。在录制过程中,我们可以将GPUImageFilter添加到...

    swift-视频录制美化GPUImage是一个基于GPU图像和视频处理的开源iOS框架

    它包含了一系列预定义的滤镜,如模糊、锐化、色彩平衡等,开发者可以轻松地将这些滤镜应用到图片或视频帧上。通过GPUImage,我们可以创建自定义滤镜,或者组合多个滤镜来实现独特的视觉效果。例如,你可以先用一个...

Global site tag (gtag.js) - Google Analytics