SampleHandler.mm 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. //
  2. // SampleHandler.m
  3. // Broadcast
  4. //
  5. // Created by Rain on 2022/10/11.
  6. // Copyright © 2022 WildFireChat. All rights reserved.
  7. //
  8. #import "SampleHandler.h"
  9. #import "GCDAsyncSocket.h"
  10. #import <libyuv.h>
  11. #import "WFCUI420VideoFrame.h"
  12. #import "WFCUBroadcastDefine.h"
  13. #import <CoreMedia/CoreMedia.h>
  14. @interface SampleHandler () <GCDAsyncSocketDelegate>
  15. @property (nonatomic, strong) dispatch_queue_t sampleHadlerQueue;
  16. @property (nonatomic, assign) BOOL connected;
  17. @property (nonatomic, strong) GCDAsyncSocket *socket;
  18. @property (nonatomic, strong) dispatch_queue_t socketQueue;
  19. @property(nonatomic, strong)NSMutableData *receivedData;
  20. @property (nonatomic, assign) CGFloat cropRate;
  21. @property (nonatomic, assign) CGSize targetSize;
  22. @property (nonatomic, assign) int orientation; //0 竖屏,1转90,2转180,3转270
  23. @property (nonatomic, assign) BOOL audio;
  24. @property (nonatomic, assign) int audioBigEnd; //0 未知,1大端,-1小端
  25. @property (nonatomic, assign) int64_t lastTimeStampNs;
  26. @property(nonatomic, assign)BOOL stoped;
  27. @end
  28. @implementation SampleHandler
  29. - (instancetype)init {
  30. self = [super init];
  31. if(self) {
  32. [self setup];
  33. }
  34. return self;
  35. }
  36. - (void)setup {
  37. self.receivedData = [[NSMutableData alloc] init];
  38. self.cropRate = 3.f/4;
  39. self.targetSize = CGSizeMake(640, 1280);
  40. self.sampleHadlerQueue = dispatch_queue_create("cn.wildfirechat.conference.broadcast.sample", DISPATCH_QUEUE_SERIAL);
  41. }
  42. - (void)setupSocket {
  43. self.socketQueue = dispatch_queue_create("cn.wildfirechat.conference.broadcast.client", DISPATCH_QUEUE_SERIAL);
  44. self.socket = [[GCDAsyncSocket alloc] initWithDelegate:self delegateQueue:self.socketQueue];
  45. NSError *error;
  46. [self.socket connectToHost:@"127.0.0.1" onPort:36622 error:&error];
  47. [self.socket readDataWithTimeout:-1 tag:0];
  48. if(!error) {
  49. NSLog(@"服务监听开启成功");
  50. } else {
  51. NSLog(@"服务监听开启失败");
  52. }
  53. }
  54. - (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
  55. [self setupSocket];
  56. }
  57. - (void)broadcastPaused {
  58. // User has requested to pause the broadcast. Samples will stop being delivered.
  59. if (self.connected) {
  60. NSString * str =@"Paused";
  61. NSData *data =[str dataUsingEncoding:NSUTF8StringEncoding];
  62. [self sendType:0 data:data tag:0];
  63. }
  64. }
  65. - (void)broadcastResumed {
  66. // User has requested to resume the broadcast. Samples delivery will resume.
  67. if (self.connected) {
  68. NSString * str =@"Resumed";
  69. NSData *data =[str dataUsingEncoding:NSUTF8StringEncoding];
  70. [self sendType:0 data:data tag:0];
  71. }
  72. }
  73. - (void)broadcastFinished {
  74. // User has requested to finish the broadcast.
  75. if (!self.stoped && self.connected) {
  76. NSString * str =@"Finish";
  77. NSData *data =[str dataUsingEncoding:NSUTF8StringEncoding];
  78. [self sendType:0 data:data tag:0];
  79. __weak typeof(self) ws = self;
  80. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
  81. [ws disconnect];
  82. });
  83. self.stoped = YES;
  84. }
  85. }
  86. - (void)sendType:(uint16_t)type data:(NSData *)data tag:(int)tag {
  87. PacketHeader header;
  88. header.dataType = type;
  89. header.dataLen = (int)data.length;
  90. NSMutableData *md = [[NSMutableData alloc] initWithBytes:&header length:sizeof(PacketHeader)];
  91. [md appendData:data];
  92. [self.socket writeData:md withTimeout:5 tag:tag];
  93. }
  94. - (void)sendAudioDataToContainerApp:(CMSampleBufferRef)ref {
  95. CFRetain(ref);
  96. dispatch_async(self.sampleHadlerQueue, ^{
  97. @autoreleasepool {
  98. AudioBufferList audioBufferList;
  99. NSMutableData *data=[[NSMutableData alloc] init];
  100. CMBlockBufferRef blockBuffer;
  101. CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(ref,
  102. NULL,
  103. &audioBufferList,
  104. sizeof(audioBufferList),
  105. NULL,
  106. NULL,
  107. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
  108. &blockBuffer);
  109. if (self.audioBigEnd == 0) {
  110. CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(ref);
  111. const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
  112. if (asbd->mFormatFlags & kAudioFormatFlagIsBigEndian) {
  113. self.audioBigEnd = 1;
  114. } else {
  115. self.audioBigEnd = -1;
  116. }
  117. }
  118. if (audioBufferList.mNumberBuffers > 0) {
  119. AudioBuffer audioBuffer = audioBufferList.mBuffers[0];
  120. uint8_t *frame = (uint8_t *)audioBuffer.mData;
  121. if (self.audioBigEnd == 1) {
  122. for (int i = 0; i < (audioBuffer.mDataByteSize - 1); i += 2) {
  123. int8_t temp = frame[i];
  124. frame[i] = frame[i+1];
  125. frame[i+1] = temp;
  126. }
  127. }
  128. [data appendBytes:frame length:audioBuffer.mDataByteSize];
  129. SampleInfo sampleInfo;
  130. sampleInfo.width = audioBuffer.mNumberChannels;
  131. sampleInfo.height = (int)data.length/audioBuffer.mNumberChannels/2;
  132. sampleInfo.dataLen = (int)data.length;
  133. sampleInfo.type = 1;
  134. NSMutableData *dataWithHeader = [[NSMutableData alloc] initWithBytes:&sampleInfo length:sizeof(SampleInfo)];
  135. [dataWithHeader appendData:data];
  136. [self sendType:1 data:[dataWithHeader copy] tag:1];
  137. }
  138. CFRelease(blockBuffer);
  139. CFRelease(ref);
  140. }
  141. });
  142. }
  143. #define FPS 15
  144. - (void)sendVideoDataToContainerApp:(CMSampleBufferRef)sampleBuffer {
  145. NSTimeInterval timeStampSeconds = CACurrentMediaTime();
  146. int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
  147. if(timeStampNs - self.lastTimeStampNs < (1000000000L/FPS)) { //1000000000 / FPS = 100000000
  148. return;
  149. }
  150. self.lastTimeStampNs = timeStampNs;
  151. CFRetain(sampleBuffer);
  152. dispatch_async(self.sampleHadlerQueue, ^{
  153. @autoreleasepool {
  154. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  155. int orientation = self.orientation;
  156. if (UIDevice.currentDevice.systemVersion.floatValue > 11.1) {
  157. CGImagePropertyOrientation cgOr = (CGImagePropertyOrientation)((__bridge NSNumber*)CMGetAttachment(sampleBuffer, (__bridge CFStringRef)RPVideoSampleOrientationKey , NULL)).unsignedIntValue;
  158. switch (cgOr) {
  159. case kCGImagePropertyOrientationUp:
  160. orientation = 0;
  161. break;
  162. case kCGImagePropertyOrientationDown:
  163. orientation = 2;
  164. break;
  165. case kCGImagePropertyOrientationLeft:
  166. orientation = 1;
  167. break;
  168. case kCGImagePropertyOrientationRight:
  169. orientation = 3;
  170. break;
  171. default:
  172. break;
  173. }
  174. }
  175. WFCUI420VideoFrame *i420Frame = [self resizeAndConvert:imageBuffer orientation:orientation];
  176. if(!i420Frame) {
  177. return;
  178. }
  179. NSData *frameData = [i420Frame toBytes];
  180. SampleInfo sampleInfo;
  181. sampleInfo.width = i420Frame.width;
  182. sampleInfo.height = i420Frame.height;
  183. sampleInfo.dataLen = (int)frameData.length;
  184. sampleInfo.type = 0;
  185. NSMutableData *dataWithHeader = [[NSMutableData alloc] initWithBytes:&sampleInfo length:sizeof(SampleInfo)];
  186. [dataWithHeader appendData:frameData];
  187. [self sendType:1 data:[dataWithHeader copy] tag:1];
  188. }
  189. CFRelease(sampleBuffer);
  190. });
  191. }
  192. - (WFCUI420VideoFrame *)resizeAndConvert:(CVImageBufferRef)pixelBuffer orientation:(int)orientation {
  193. CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
  194. OSType sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer );
  195. size_t bufferWidth = 0;
  196. size_t bufferHeight = 0;
  197. size_t rowSize = 0;
  198. uint8_t *pixel = NULL;
  199. if (CVPixelBufferIsPlanar(pixelBuffer)) {
  200. int basePlane = 0;
  201. pixel = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, basePlane);
  202. bufferHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, basePlane);
  203. bufferWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, basePlane);
  204. rowSize = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, basePlane);
  205. } else {
  206. pixel = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);
  207. bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
  208. bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
  209. rowSize = CVPixelBufferGetBytesPerRow(pixelBuffer);
  210. }
  211. WFCUI420VideoFrame *convertedI420Frame = [[WFCUI420VideoFrame alloc] initWithWidth:(int)bufferWidth height:(int)bufferHeight];
  212. int error = -1;
  213. if (kCVPixelFormatType_32BGRA == sourcePixelFormat) {
  214. error = libyuv::ARGBToI420(pixel, (int)rowSize,
  215. convertedI420Frame.dataOfPlaneY, convertedI420Frame.strideY,
  216. convertedI420Frame.dataOfPlaneU, convertedI420Frame.strideU,
  217. convertedI420Frame.dataOfPlaneV, convertedI420Frame.strideV,
  218. (int)bufferWidth,
  219. (int)bufferHeight);
  220. } else if (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange == sourcePixelFormat || kCVPixelFormatType_420YpCbCr8BiPlanarFullRange == sourcePixelFormat) {
  221. error = libyuv::NV12ToI420(pixel, (int)rowSize,
  222. (const uint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1),
  223. (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1),
  224. convertedI420Frame.dataOfPlaneY, convertedI420Frame.strideY,
  225. convertedI420Frame.dataOfPlaneU, convertedI420Frame.strideU,
  226. convertedI420Frame.dataOfPlaneV, convertedI420Frame.strideV,
  227. (int)bufferWidth,
  228. (int)bufferHeight);
  229. }
  230. if (error) {
  231. CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
  232. NSLog(@"error convert pixel buffer to i420 with error %d", error);
  233. return nil;
  234. } else {
  235. rowSize = convertedI420Frame.strideY;
  236. pixel = convertedI420Frame.data;
  237. }
  238. //屏幕分享不能裁边,裁边后对方收到图片不全。
  239. #define NO_CROP 1
  240. #ifdef NO_CROP
  241. int cropedWidth = bufferWidth;
  242. int cropedHeight = bufferHeight;
  243. WFCUI420VideoFrame *croppedI420Frame = convertedI420Frame;
  244. #else
  245. int cropX, cropY;
  246. int cropedWidth, cropedHeight;
  247. if(bufferWidth*1.f/bufferHeight > self.cropRate) {
  248. cropedWidth = bufferHeight * self.cropRate;
  249. cropedHeight = bufferHeight;
  250. } else {
  251. cropedWidth = bufferWidth;
  252. cropedHeight = bufferWidth/self.cropRate;
  253. }
  254. cropX = (bufferWidth-cropedWidth)/2;
  255. cropY = (bufferHeight-cropedHeight)/2;
  256. cropedWidth = cropedWidth>>1<<1;
  257. cropedHeight = cropedHeight>>1<<1;
  258. cropX = cropX>>1<<1;
  259. cropY = cropY>>1<<1;
  260. WFCUI420VideoFrame *croppedI420Frame = [[WFCUI420VideoFrame alloc] initWithWidth:cropedWidth height:cropedHeight];
  261. error = libyuv::ConvertToI420(pixel, bufferHeight * rowSize * 1.5,
  262. croppedI420Frame.dataOfPlaneY, croppedI420Frame.strideY,
  263. croppedI420Frame.dataOfPlaneU, croppedI420Frame.strideU,
  264. croppedI420Frame.dataOfPlaneV, croppedI420Frame.strideV,
  265. cropX, cropY,
  266. (int)bufferWidth, (int)bufferHeight,
  267. croppedI420Frame.width, croppedI420Frame.height,
  268. libyuv::kRotate0, libyuv::FOURCC_I420);
  269. if (error) {
  270. CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
  271. NSLog(@"error convert pixel buffer to i420 with error %d", error);
  272. return nil;
  273. }
  274. #endif
  275. WFCUI420VideoFrame *i420Frame;
  276. float scale = MIN(self.targetSize.width*1.0/cropedWidth, self.targetSize.height*1.0/cropedHeight);
  277. if (scale == 1.0) {
  278. i420Frame = croppedI420Frame;
  279. }else {
  280. int width = cropedWidth * scale;
  281. int height = cropedHeight * scale;
  282. i420Frame = [[WFCUI420VideoFrame alloc] initWithWidth:width height:height];
  283. libyuv::I420Scale(croppedI420Frame.dataOfPlaneY, croppedI420Frame.strideY,
  284. croppedI420Frame.dataOfPlaneU, croppedI420Frame.strideU,
  285. croppedI420Frame.dataOfPlaneV, croppedI420Frame.strideV,
  286. croppedI420Frame.width, croppedI420Frame.height,
  287. i420Frame.dataOfPlaneY, i420Frame.strideY,
  288. i420Frame.dataOfPlaneU, i420Frame.strideU,
  289. i420Frame.dataOfPlaneV, i420Frame.strideV,
  290. i420Frame.width, i420Frame.height,
  291. libyuv::kFilterBilinear);
  292. }
  293. int dstWidth, dstHeight;
  294. libyuv::RotationModeEnum rotateMode = (libyuv::RotationModeEnum)(orientation*90);
  295. if (rotateMode != libyuv::kRotateNone) {
  296. if (rotateMode == libyuv::kRotate270 || rotateMode == libyuv::kRotate90) {
  297. dstWidth = i420Frame.height;
  298. dstHeight = i420Frame.width;
  299. }
  300. else {
  301. dstWidth = i420Frame.width;
  302. dstHeight = i420Frame.height;
  303. }
  304. WFCUI420VideoFrame *rotatedI420Frame = [[WFCUI420VideoFrame alloc]initWithWidth:dstWidth height:dstHeight];
  305. libyuv::I420Rotate(i420Frame.dataOfPlaneY, i420Frame.strideY,
  306. i420Frame.dataOfPlaneU, i420Frame.strideU,
  307. i420Frame.dataOfPlaneV, i420Frame.strideV,
  308. rotatedI420Frame.dataOfPlaneY, rotatedI420Frame.strideY,
  309. rotatedI420Frame.dataOfPlaneU, rotatedI420Frame.strideU,
  310. rotatedI420Frame.dataOfPlaneV, rotatedI420Frame.strideV,
  311. i420Frame.width, i420Frame.height,
  312. rotateMode);
  313. i420Frame = rotatedI420Frame;
  314. }
  315. CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
  316. return i420Frame;
  317. }
  318. - (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
  319. switch (sampleBufferType) {
  320. case RPSampleBufferTypeVideo:
  321. if(self.connected && !self.stoped) {
  322. [self sendVideoDataToContainerApp:sampleBuffer];
  323. }
  324. break;
  325. case RPSampleBufferTypeAudioApp:
  326. if(self.connected && self.audio && !self.stoped) {
  327. [self sendAudioDataToContainerApp:sampleBuffer];
  328. }
  329. break;
  330. case RPSampleBufferTypeAudioMic:
  331. // Handle audio sample buffer for mic audio
  332. break;
  333. default:
  334. break;
  335. }
  336. }
  337. - (void)disconnect {
  338. _connected = NO;
  339. if (_socket) {
  340. [_socket disconnect];
  341. _socket = nil;
  342. }
  343. }
  344. - (void)onReceiveCommandFromContainerApp:(int)command value:(int)value {
  345. dispatch_async(dispatch_get_main_queue(), ^{
  346. switch (command) {
  347. case 0: //rotation
  348. self.orientation = value;
  349. break;
  350. case 1: //audio
  351. self.audio = value>0;
  352. break;
  353. case 2: //resulation
  354. {
  355. int width = value >> 16;
  356. int height = (value - (width<<16)) & 0xFFFF;
  357. if(width > height) {
  358. self.targetSize = CGSizeMake(width, height);
  359. } else {
  360. self.targetSize = CGSizeMake(height, width);
  361. }
  362. break;
  363. }
  364. case 3:
  365. {
  366. [self finishBroadcastWithError:nil];
  367. if (@available(iOS 14.0, *)) {
  368. NSLog(@"broadcast will finished");
  369. } else {
  370. dispatch_async(dispatch_get_main_queue(), ^{
  371. [self broadcastFinished];
  372. });
  373. }
  374. break;
  375. }
  376. default:
  377. break;
  378. }
  379. });
  380. }
  381. #pragma mark - GCDAsyncSocketDelegate
  382. - (void)socket:(GCDAsyncSocket *)sock didConnectToUrl:(NSURL *)url {
  383. [self.socket readDataWithTimeout:-1 tag:0];
  384. }
  385. - (void)socket:(GCDAsyncSocket *)sock didConnectToHost:(NSString *)host port:(uint16_t)port {
  386. [self.socket readDataWithTimeout:-1 tag:0];
  387. self.connected = YES;
  388. NSString * str =@"Start";
  389. NSData *data =[str dataUsingEncoding:NSUTF8StringEncoding];
  390. [self sendType:0 data:data tag:0];
  391. }
  392. - (void)socket:(GCDAsyncSocket *)sock didWriteDataWithTag:(long)tag {
  393. }
  394. - (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag {
  395. [self.receivedData appendData:data];
  396. while(self.receivedData.length >= sizeof(Command)) {
  397. Command command;
  398. memcpy(&command, self.receivedData.bytes, sizeof(Command));
  399. [self onReceiveCommandFromContainerApp:command.type value:command.value];
  400. [self.receivedData replaceBytesInRange:NSMakeRange(0, sizeof(Command)) withBytes:NULL length:0];
  401. }
  402. [sock readDataWithTimeout:-1 tag:0];
  403. }
  404. - (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(NSError *)err {
  405. if(!self.stoped) {
  406. self.connected = NO;
  407. [self.socket disconnect];
  408. self.socket = nil;
  409. [self setupSocket];
  410. [self.socket readDataWithTimeout:-1 tag:0];
  411. }
  412. }
  413. @end