WFCUFileVideoSource.m 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. //
  2. // WFCUFileVideoSource.m
  3. // WFChatUIKit
  4. //
  5. // Created by Rain on 2022/8/1.
  6. // Copyright © 2022 Wildfirechat. All rights reserved.
  7. //
  8. #import "WFCUFileVideoSource.h"
  9. #if WFCU_SUPPORT_VOIP
  10. @interface WFCUFileVideoSource ()
  11. @property(nonatomic, weak)id<WFAVExternalFrameDelegate> frameDelegate;
  12. @property(nonatomic, assign) CMTime lastPresentationTime;
  13. @property(nonatomic, strong) NSURL *fileURL;
  14. @end
  15. typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
  16. RTCFileVideoCapturerStatusNotInitialized,
  17. RTCFileVideoCapturerStatusStarted,
  18. RTCFileVideoCapturerStatusStopped
  19. };
  20. @implementation WFCUFileVideoSource{
  21. AVAssetReader *_reader;
  22. AVAssetReaderTrackOutput *_outTrack;
  23. RTCFileVideoCapturerStatus _status;
  24. dispatch_queue_t _frameQueue;
  25. }
  26. - (instancetype)initWithFile:(NSString *)filePath {
  27. self = [super init];
  28. if(self) {
  29. self.fileURL = [NSURL fileURLWithPath:filePath];
  30. }
  31. return self;
  32. }
  33. - (void)startCapture:(id<WFAVExternalFrameDelegate>_Nonnull)delegate {
  34. self.frameDelegate = delegate;
  35. self.lastPresentationTime = CMTimeMake(0, 0);
  36. [self setupReader];
  37. }
  38. - (void)stopCapture {
  39. self.frameDelegate = nil;
  40. _status = RTCFileVideoCapturerStatusStopped;
  41. }
  42. - (void)setupReader {
  43. AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
  44. NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
  45. NSError *error = nil;
  46. _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
  47. if (error) {
  48. return;
  49. }
  50. NSDictionary *options = @{
  51. (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
  52. };
  53. _outTrack =
  54. [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
  55. [_reader addOutput:_outTrack];
  56. [_reader startReading];
  57. RTCLog(@"File capturer started reading");
  58. [self readNextBuffer];
  59. }
  60. - (dispatch_queue_t)frameQueue {
  61. if (!_frameQueue) {
  62. if (@available(iOS 10, macOS 10.12, tvOS 10, watchOS 3, *)) {
  63. _frameQueue = dispatch_queue_create_with_target(
  64. "org.webrtc.filecapturer.video",
  65. DISPATCH_QUEUE_SERIAL,
  66. dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
  67. } else {
  68. dispatch_queue_t _frameQueue = dispatch_queue_create(
  69. "org.webrtc.filecapturer.video",
  70. DISPATCH_QUEUE_SERIAL);
  71. dispatch_set_target_queue(_frameQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
  72. }
  73. }
  74. return _frameQueue;
  75. }
  76. - (void)readNextBuffer {
  77. if (_status == RTCFileVideoCapturerStatusStopped) {
  78. [_reader cancelReading];
  79. _reader = nil;
  80. return;
  81. }
  82. if (_reader.status == AVAssetReaderStatusCompleted) {
  83. [_reader cancelReading];
  84. _reader = nil;
  85. [self setupReader];
  86. return;
  87. }
  88. CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
  89. if (!sampleBuffer) {
  90. [self readNextBuffer];
  91. return;
  92. }
  93. if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
  94. !CMSampleBufferDataIsReady(sampleBuffer)) {
  95. CFRelease(sampleBuffer);
  96. [self readNextBuffer];
  97. return;
  98. }
  99. [self publishSampleBuffer:sampleBuffer];
  100. }
  101. - (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
  102. CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  103. Float64 presentationDifference =
  104. CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
  105. _lastPresentationTime = presentationTime;
  106. int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
  107. __block dispatch_source_t timer = [self createStrictTimer];
  108. // Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
  109. dispatch_source_set_timer(timer,
  110. dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
  111. DISPATCH_TIME_FOREVER,
  112. 0);
  113. dispatch_source_set_event_handler(timer, ^{
  114. dispatch_source_cancel(timer);
  115. timer = nil;
  116. CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  117. if (!pixelBuffer) {
  118. CFRelease(sampleBuffer);
  119. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  120. [self readNextBuffer];
  121. });
  122. return;
  123. }
  124. RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
  125. [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
  126. NSTimeInterval timeStampSeconds = CACurrentMediaTime();
  127. int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
  128. RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
  129. [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
  130. rotation:0
  131. timeStampNs:timeStampNs];
  132. CFRelease(sampleBuffer);
  133. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  134. [self readNextBuffer];
  135. });
  136. [self.frameDelegate capturer:nil didCaptureVideoFrame:videoFrame];
  137. });
  138. dispatch_activate(timer);
  139. }
  140. - (dispatch_source_t)createStrictTimer {
  141. dispatch_source_t timer = dispatch_source_create(
  142. DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
  143. return timer;
  144. }
  145. @end
  146. #endif