经验首页 前端设计 程序设计 Java相关 移动开发 数据库/运维 软件/图像 大数据/云计算 其他经验
当前位置:技术经验 » JS/JS库/框架 » Foundation » 查看文章
iOS使用AVFoundation展示视频
来源:jb51  时间:2019/4/22 8:38:01  对本文有异议

本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下

  1. //
  2. // Capter2ViewController.m
  3. // IosTest
  4. //
  5. // Created by garin on 13-7-19.
  6. // Copyright (c) 2013年 garin. All rights reserved.
  7. //
  8. #import "Capter2ViewController.h"
  9. @interface Capter2ViewController ()
  10. @end
  11. @implementation Capter2ViewController
  12. -(void) dealloc
  13. {
  14. [session release];
  15. [super dealloc];
  16. }
  17. - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
  18. {
  19. self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
  20. if (self) {
  21. // Custom initialization
  22. }
  23. return self;
  24. }
  25. - (void)viewDidLoad
  26. {
  27. [super viewDidLoad];
  28. videoPreviewView=[[UIView alloc] initWithFrame:CGRectMake(10, 10, 320, 200)];
  29. [self.view addSubview:videoPreviewView];
  30. [videoPreviewView release];
  31. // Do any additional setup after loading the view.
  32. //在viewdidload调用下面的函数显示摄像信息
  33. [self setupCaptureSession];
  34. // imgView=[[UIImageView alloc] initWithFrame:CGRectMake(10, 230, 320, 100)];
  35. // imgView.backgroundColor=[UIColor grayColor];
  36. // [self.view addSubview:imgView];
  37. // [imgView release];
  38. UIButton *cloeseBtn=[UIButton buttonWithType:UIButtonTypeRoundedRect];
  39. cloeseBtn.frame=CGRectMake(10, 220, 300, 50);
  40. [cloeseBtn setTitle:@"Press" forState:UIControlStateNormal];
  41. [cloeseBtn addTarget:self action:@selector(closeBtnClick:) forControlEvents:UIControlEventTouchUpInside];
  42. [self.view addSubview:cloeseBtn];
  43. }
  44. -(void) closeBtnClick:(id) sender
  45. {
  46. [session stopRunning];
  47. }
  48. - (void)didReceiveMemoryWarning
  49. {
  50. [super didReceiveMemoryWarning];
  51. // Dispose of any resources that can be recreated.
  52. }
  53. - (void)setupCaptureSession
  54. {
  55. NSError *error = nil;
  56. // Create the session
  57. session = [[AVCaptureSession alloc] init];
  58. // Configure the session to produce lower resolution video frames, if your
  59. // processing algorithm can cope. We'll specify medium quality for the
  60. // chosen device.
  61. session.sessionPreset = AVCaptureSessionPresetLow;
  62. // Find a suitable AVCaptureDevice
  63. AVCaptureDevice *device = [AVCaptureDevice
  64. defaultDeviceWithMediaType:AVMediaTypeVideo];
  65. // Create a device input with the device and add it to the session.
  66. AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
  67. error:&error];
  68. if (!input) {
  69. // Handling the error appropriately.
  70. }
  71. [session addInput:input];
  72. // Create a VideoDataOutput and add it to the session
  73. AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
  74. [session addOutput:output];
  75. // Configure your output.
  76. dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
  77. [output setSampleBufferDelegate:self queue:queue];
  78. dispatch_release(queue);
  79. // Specify the pixel format
  80. output.videoSettings =
  81. [NSDictionary dictionaryWithObject:
  82. [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
  83. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  84. // If you wish to cap the frame rate to a known value, such as 15 fps, set
  85. // minFrameDuration.
  86. //output.minFrameDuration = CMTimeMake(1, 15);
  87. //AVCaptureConnection *avcaptureconn=[[AVCaptureConnection alloc] init];
  88. //[avcaptureconn setVideoMinFrameDuration:CMTimeMake(1, 15)];
  89. // Start the session running to start the flow of data
  90. [session startRunning];
  91. AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];
  92. previewLayer.frame = videoPreviewView.bounds; //视频显示到的UIView
  93. previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  94. // [previewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight];
  95. // if(previewLayer.orientationSupported){
  96. // previewLayer.orientation = mOrientation;
  97. // }
  98. [videoPreviewView.layer addSublayer: previewLayer];
  99. if(![session isRunning]){
  100. [session startRunning];
  101. }
  102. // Assign session to an ivar.
  103. //[self setSession:session];
  104. }
  105. //得到视频流
  106. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  107. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  108. fromConnection:(AVCaptureConnection *)connection
  109. {
  110. // Create a UIImage from the sample buffer data
  111. return;
  112. UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
  113. //得到的视频流图片
  114. imgView.image=image;
  115. }
  116. // Create a UIImage from sample buffer data
  117. - (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
  118. {
  119. // Get a CMSampleBuffer's Core Video image buffer for the media data
  120. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  121. // Lock the base address of the pixel buffer
  122. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  123. // Get the number of bytes per row for the pixel buffer
  124. void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
  125. // Get the number of bytes per row for the pixel buffer
  126. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  127. // Get the pixel buffer width and height
  128. size_t width = CVPixelBufferGetWidth(imageBuffer);
  129. size_t height = CVPixelBufferGetHeight(imageBuffer);
  130. // Create a device-dependent RGB color space
  131. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  132. // Create a bitmap graphics context with the sample buffer data
  133. CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
  134. bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
  135. // Create a Quartz image from the pixel data in the bitmap graphics context
  136. CGImageRef quartzImage = CGBitmapContextCreateImage(context);
  137. // Unlock the pixel buffer
  138. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  139. // Free up the context and color space
  140. CGContextRelease(context);
  141. CGColorSpaceRelease(colorSpace);
  142. // Create an image object from the Quartz image
  143. UIImage *image = [UIImage imageWithCGImage:quartzImage];
  144. // Release the Quartz image
  145. CGImageRelease(quartzImage);
  146. return (image);
  147. }
  148. @end

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持w3xue。

 友情链接:直通硅谷  点职佳  北美留学生论坛

本站QQ群:前端 618073944 | Java 606181507 | Python 626812652 | C/C++ 612253063 | 微信 634508462 | 苹果 692586424 | C#/.net 182808419 | PHP 305140648 | 运维 608723728

W3xue 的所有内容仅供测试,对任何法律问题及风险不承担任何责任。通过使用本站内容随之而来的风险与本站无关。
关于我们  |  意见建议  |  捐助我们  |  报错有奖  |  广告合作、友情链接(目前9元/月)请联系QQ:27243702 沸活量
皖ICP备17017327号-2 皖公网安备34020702000426号