tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

rtc_video_capture_objc.mm (11563B)


      1 /*
      2 *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
      3 *
      4 *  Use of this source code is governed by a BSD-style license
      5 *  that can be found in the LICENSE file in the root of the source
      6 *  tree. An additional intellectual property rights grant can be found
      7 *  in the file PATENTS.  All contributing project authors may
      8 *  be found in the AUTHORS file in the root of the source tree.
      9 */
     10 
     11 #if !defined(__has_feature) || !__has_feature(objc_arc)
     12 #  error "This file requires ARC support."
     13 #endif
     14 
     15 #import <AVFoundation/AVFoundation.h>
     16 #ifdef WEBRTC_IOS
     17 #  import <UIKit/UIKit.h>
     18 #endif
     19 
     20 #import "device_info_objc.h"
     21 #import "rtc_video_capture_objc.h"
     22 
     23 #include "rtc_base/logging.h"
     24 
     25 using namespace webrtc;
     26 using namespace webrtc::videocapturemodule;
     27 
     28 @interface RTCVideoCaptureIosObjC (hidden)
     29 - (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
     30 @end
     31 
     32 @implementation RTCVideoCaptureIosObjC {
     33  webrtc::videocapturemodule::VideoCaptureIos* _owner;
     34  webrtc::VideoCaptureCapability _capability;
     35  AVCaptureSession* _captureSession;
     36  BOOL _orientationHasChanged;
     37  AVCaptureConnection* _connection;
     38  BOOL _captureChanging;  // Guarded by _captureChangingCondition.
     39  NSCondition* _captureChangingCondition;
     40  dispatch_queue_t _frameQueue;
     41 }
     42 
     43 @synthesize frameRotation = _framRotation;
     44 
     45 - (id)initWithOwner:(VideoCaptureIos*)owner {
     46  if (self == [super init]) {
     47    _owner = owner;
     48    _captureSession = [[AVCaptureSession alloc] init];
     49 #if defined(WEBRTC_IOS)
     50    _captureSession.usesApplicationAudioSession = NO;
     51 #endif
     52    _captureChanging = NO;
     53    _captureChangingCondition = [[NSCondition alloc] init];
     54 
     55    if (!_captureSession || !_captureChangingCondition) {
     56      return nil;
     57    }
     58 
     59    // create and configure a new output (using callbacks)
     60    AVCaptureVideoDataOutput* captureOutput =
     61        [[AVCaptureVideoDataOutput alloc] init];
     62    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
     63 
     64    NSNumber* val =
     65        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8];
     66    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:val
     67                                                              forKey:key];
     68    captureOutput.videoSettings = videoSettings;
     69 
     70    // add new output
     71    if ([_captureSession canAddOutput:captureOutput]) {
     72      [_captureSession addOutput:captureOutput];
     73    } else {
     74      RTC_LOG(LS_ERROR) << __FUNCTION__
     75                        << ": Could not add output to AVCaptureSession";
     76    }
     77 
     78 #ifdef WEBRTC_IOS
     79    [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
     80 
     81    NSNotificationCenter* notify = [NSNotificationCenter defaultCenter];
     82    [notify addObserver:self
     83               selector:@selector(onVideoError:)
     84                   name:AVCaptureSessionRuntimeErrorNotification
     85                 object:_captureSession];
     86    [notify addObserver:self
     87               selector:@selector(deviceOrientationDidChange:)
     88                   name:UIDeviceOrientationDidChangeNotification
     89                 object:nil];
     90 #endif
     91  }
     92 
     93  // Create a serial queue on which video capture will run. By setting the
     94  // target, blocks should still run on DISPATH_QUEUE_PRIORITY_DEFAULT rather
     95  // than creating a new thread.
     96  _frameQueue =
     97      dispatch_queue_create("org.webrtc.videocapture", DISPATCH_QUEUE_SERIAL);
     98  dispatch_set_target_queue(
     99      _frameQueue,
    100      dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0));
    101 
    102  return self;
    103 }
    104 
    105 - (void)directOutputToSelf {
    106  [[self currentOutput] setSampleBufferDelegate:self queue:_frameQueue];
    107 }
    108 
    109 - (void)directOutputToNil {
    110  [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
    111 }
    112 
    113 - (void)deviceOrientationDidChange:(NSNotification*)notification {
    114  _orientationHasChanged = YES;
    115  [self setRelativeVideoOrientation];
    116 }
    117 
    118 - (void)dealloc {
    119  [[NSNotificationCenter defaultCenter] removeObserver:self];
    120 }
    121 
    122 - (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
    123  [self waitForCaptureChangeToFinish];
    124  // check to see if the camera is already set
    125  if (_captureSession) {
    126    NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
    127    if ([currentInputs count] > 0) {
    128      AVCaptureDeviceInput* currentInput = [currentInputs objectAtIndex:0];
    129      if ([uniqueId isEqualToString:[currentInput.device localizedName]]) {
    130        return YES;
    131      }
    132    }
    133  }
    134 
    135  return [self changeCaptureInputByUniqueId:uniqueId];
    136 }
    137 
    138 - (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
    139  [self waitForCaptureChangeToFinish];
    140  if (!_captureSession) {
    141    return NO;
    142  }
    143 
    144  // check limits of the resolution
    145  if (capability.maxFPS < 0 || capability.maxFPS > 60) {
    146    return NO;
    147  }
    148 
    149  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
    150    if (capability.width > 1280 || capability.height > 720) {
    151      return NO;
    152    }
    153  } else if ([_captureSession
    154                 canSetSessionPreset:AVCaptureSessionPreset640x480]) {
    155    if (capability.width > 640 || capability.height > 480) {
    156      return NO;
    157    }
    158  } else if ([_captureSession
    159                 canSetSessionPreset:AVCaptureSessionPreset352x288]) {
    160    if (capability.width > 352 || capability.height > 288) {
    161      return NO;
    162    }
    163  } else if (capability.width < 0 || capability.height < 0) {
    164    return NO;
    165  }
    166 
    167  _capability = capability;
    168 
    169  AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
    170  if (!currentOutput) return NO;
    171 
    172  [self directOutputToSelf];
    173 
    174  _orientationHasChanged = NO;
    175  _captureChanging = YES;
    176  dispatch_async(_frameQueue, ^{
    177    [self startCaptureInBackgroundWithOutput:currentOutput];
    178  });
    179  return YES;
    180 }
    181 
    182 - (AVCaptureVideoDataOutput*)currentOutput {
    183  return [[_captureSession outputs] firstObject];
    184 }
    185 
    186 - (void)startCaptureInBackgroundWithOutput:
    187    (AVCaptureVideoDataOutput*)currentOutput {
    188  NSString* captureQuality =
    189      [NSString stringWithString:AVCaptureSessionPresetLow];
    190  if (_capability.width >= 1280 || _capability.height >= 720) {
    191    captureQuality = [NSString stringWithString:AVCaptureSessionPreset1280x720];
    192  } else if (_capability.width >= 640 || _capability.height >= 480) {
    193    captureQuality = [NSString stringWithString:AVCaptureSessionPreset640x480];
    194  } else if (_capability.width >= 352 || _capability.height >= 288) {
    195    captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
    196  }
    197 
    198  // begin configuration for the AVCaptureSession
    199  [_captureSession beginConfiguration];
    200 
    201  // picture resolution
    202  [_captureSession setSessionPreset:captureQuality];
    203 
    204  _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
    205  [self setRelativeVideoOrientation];
    206 
    207  // finished configuring, commit settings to AVCaptureSession.
    208  [_captureSession commitConfiguration];
    209 
    210  [_captureSession startRunning];
    211  [self signalCaptureChangeEnd];
    212 }
    213 
    214 - (void)setRelativeVideoOrientation {
    215  if (!_connection.supportsVideoOrientation) {
    216    return;
    217  }
    218 #ifndef WEBRTC_IOS
    219  _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
    220  return;
    221 #else
    222  switch ([UIDevice currentDevice].orientation) {
    223    case UIDeviceOrientationPortrait:
    224      _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
    225      break;
    226    case UIDeviceOrientationPortraitUpsideDown:
    227      _connection.videoOrientation =
    228          AVCaptureVideoOrientationPortraitUpsideDown;
    229      break;
    230    case UIDeviceOrientationLandscapeLeft:
    231      _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
    232      break;
    233    case UIDeviceOrientationLandscapeRight:
    234      _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
    235      break;
    236    case UIDeviceOrientationFaceUp:
    237    case UIDeviceOrientationFaceDown:
    238    case UIDeviceOrientationUnknown:
    239      if (!_orientationHasChanged) {
    240        _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
    241      }
    242      break;
    243  }
    244 #endif
    245 }
    246 
    247 - (void)onVideoError:(NSNotification*)notification {
    248  NSLog(@"onVideoError: %@", notification);
    249  // TODO(sjlee): make the specific error handling with this notification.
    250  RTC_LOG(LS_ERROR) << __FUNCTION__
    251                    << ": [AVCaptureSession startRunning] error.";
    252 }
    253 
    254 - (BOOL)stopCapture {
    255 #ifdef WEBRTC_IOS
    256  [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
    257 #endif
    258  _orientationHasChanged = NO;
    259  [self waitForCaptureChangeToFinish];
    260  [self directOutputToNil];
    261 
    262  if (!_captureSession) {
    263    return NO;
    264  }
    265 
    266  _captureChanging = YES;
    267  [_captureSession stopRunning];
    268 
    269  dispatch_sync(_frameQueue, ^{
    270    [self signalCaptureChangeEnd];
    271  });
    272  return YES;
    273 }
    274 
    275 - (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
    276  [self waitForCaptureChangeToFinish];
    277  NSArray* currentInputs = [_captureSession inputs];
    278  // remove current input
    279  if ([currentInputs count] > 0) {
    280    AVCaptureInput* currentInput =
    281        (AVCaptureInput*)[currentInputs objectAtIndex:0];
    282 
    283    [_captureSession removeInput:currentInput];
    284  }
    285 
    286  // Look for input device with the name requested (as our input param)
    287  // get list of available capture devices
    288  int captureDeviceCount = [DeviceInfoIosObjC captureDeviceCount];
    289  if (captureDeviceCount <= 0) {
    290    return NO;
    291  }
    292 
    293  AVCaptureDevice* captureDevice =
    294      [DeviceInfoIosObjC captureDeviceForUniqueId:uniqueId];
    295 
    296  if (!captureDevice) {
    297    return NO;
    298  }
    299 
    300  // now create capture session input out of AVCaptureDevice
    301  NSError* deviceError = nil;
    302  AVCaptureDeviceInput* newCaptureInput =
    303      [AVCaptureDeviceInput deviceInputWithDevice:captureDevice
    304                                            error:&deviceError];
    305 
    306  if (!newCaptureInput) {
    307    const char* errorMessage = [[deviceError localizedDescription] UTF8String];
    308 
    309    RTC_LOG(LS_ERROR) << __FUNCTION__
    310                      << ": deviceInputWithDevice error:" << errorMessage;
    311 
    312    return NO;
    313  }
    314 
    315  // try to add our new capture device to the capture session
    316  [_captureSession beginConfiguration];
    317 
    318  BOOL addedCaptureInput = NO;
    319  if ([_captureSession canAddInput:newCaptureInput]) {
    320    [_captureSession addInput:newCaptureInput];
    321    addedCaptureInput = YES;
    322  } else {
    323    addedCaptureInput = NO;
    324  }
    325 
    326  [_captureSession commitConfiguration];
    327 
    328  return addedCaptureInput;
    329 }
    330 
    331 - (void)captureOutput:(AVCaptureOutput*)captureOutput
    332    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
    333           fromConnection:(AVCaptureConnection*)connection {
    334  const int kFlags = 0;
    335  CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
    336 
    337  if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
    338    return;
    339  }
    340 
    341  uint8_t* baseAddress = (uint8_t*)CVPixelBufferGetBaseAddress(videoFrame);
    342  const size_t width = CVPixelBufferGetWidth(videoFrame);
    343  const size_t height = CVPixelBufferGetHeight(videoFrame);
    344  const size_t frameSize = width * height * 2;
    345 
    346  VideoCaptureCapability tempCaptureCapability;
    347  tempCaptureCapability.width = width;
    348  tempCaptureCapability.height = height;
    349  tempCaptureCapability.maxFPS = _capability.maxFPS;
    350  tempCaptureCapability.videoType = VideoType::kUYVY;
    351 
    352  _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
    353 
    354  CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
    355 }
    356 
    357 - (void)signalCaptureChangeEnd {
    358  [_captureChangingCondition lock];
    359  _captureChanging = NO;
    360  [_captureChangingCondition signal];
    361  [_captureChangingCondition unlock];
    362 }
    363 
    364 - (void)waitForCaptureChangeToFinish {
    365  [_captureChangingCondition lock];
    366  while (_captureChanging) {
    367    [_captureChangingCondition wait];
    368  }
    369  [_captureChangingCondition unlock];
    370 }
    371 @end