2
0

WFCUConferenceManager.m 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979
  1. //
  2. // WFCUConferenceManager.m
  3. // WFChatUIKit
  4. //
  5. // Created by Tom Lee on 2021/2/15.
  6. // Copyright © 2020 WildFireChat. All rights reserved.
  7. //
  8. #if WFCU_SUPPORT_VOIP
  9. #import "WFCUConferenceManager.h"
  10. #import <WFChatClient/WFCChatClient.h>
  11. #import <WFAVEngineKit/WFAVEngineKit.h>
  12. #import "WFCUConferenceChangeModelContent.h"
  13. #import "WFZConferenceInfo.h"
  14. #import "WFCUConferenceHistory.h"
  15. #import "WFCUConferenceCommandContent.h"
  16. #import "WFCUConfigManager.h"
  17. #import "WFCUImage.h"
  18. #import "GCDAsyncSocket.h"
  19. #import <ReplayKit/ReplayKit.h>
  20. #import "WFCUI420VideoFrame.h"
  21. #import "WFCUBroadcastDefine.h"
  22. NSString *kMuteStateChanged = @"kMuteStateChanged";
  23. @interface WFCUConferenceManager () <GCDAsyncSocketDelegate, WFAVExternalVideoSource, WFAVCallSessionAudioDataDelegate>
  24. @property(nonatomic, strong)UIButton *alertViewCheckBtn;
  25. @property (nonatomic, strong)GCDAsyncSocket *socket;
  26. @property (nonatomic, strong)dispatch_queue_t queue;
  27. @property (nonatomic, strong)NSMutableArray *sockets;
  28. @property (nonatomic,strong)RPSystemBroadcastPickerView *broadPickerView;
  29. @property(nonatomic, strong)NSMutableData *receivedData;
  30. @property(nonatomic, strong)NSMutableData *receivedAudioData;
  31. @property (nonatomic, strong) NSLock *audioDataLock;
  32. @property(nonatomic, assign)BOOL broadcastWithAudio;
  33. @property(nonatomic, weak)id<WFAVExternalFrameDelegate> frameDelegate;
  34. @end
  35. static WFCUConferenceManager *sharedSingleton = nil;
  36. @implementation WFCUConferenceManager
  37. + (WFCUConferenceManager *)sharedInstance {
  38. if (sharedSingleton == nil) {
  39. @synchronized (self) {
  40. if (sharedSingleton == nil) {
  41. sharedSingleton = [[WFCUConferenceManager alloc] init];
  42. sharedSingleton.applyingUnmuteAudioMembers = [[NSMutableArray alloc] init];
  43. sharedSingleton.applyingUnmuteVideoMembers = [[NSMutableArray alloc] init];
  44. sharedSingleton.handupMembers = [[NSMutableArray alloc] init];
  45. [[NSNotificationCenter defaultCenter] addObserver:sharedSingleton selector:@selector(onReceiveMessages:) name:kReceiveMessages object:nil];
  46. [[NSNotificationCenter defaultCenter] addObserver:sharedSingleton
  47. selector:@selector(onAppTerminate)
  48. name:UIApplicationWillTerminateNotification
  49. object:nil];
  50. }
  51. }
  52. }
  53. return sharedSingleton;
  54. }
  55. - (void)muteAudio:(BOOL)mute {
  56. if(mute) {
  57. if(![WFAVEngineKit sharedEngineKit].currentSession.isAudience && [WFAVEngineKit sharedEngineKit].currentSession.isVideoMuted && ![[WFAVEngineKit sharedEngineKit].currentSession isBroadcasting]) {
  58. if(![[WFAVEngineKit sharedEngineKit].currentSession switchAudience:YES]) {
  59. NSLog(@"switch to audience failure");
  60. return;
  61. }
  62. }
  63. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:mute];
  64. } else {
  65. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience && ![[WFAVEngineKit sharedEngineKit].currentSession canSwitchAudience]) {
  66. NSLog(@"can not switch to participater");
  67. return;
  68. }
  69. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience && self.currentConferenceInfo.maxParticipants > 0) {
  70. __block int participantCount = 0;
  71. [[WFAVEngineKit sharedEngineKit].currentSession.participants enumerateObjectsUsingBlock:^(WFAVParticipantProfile * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
  72. if(!obj.audience) {
  73. participantCount++;
  74. }
  75. }];
  76. if(participantCount >= self.currentConferenceInfo.maxParticipants) {
  77. if([self.delegate respondsToSelector:@selector(showToast:)]) {
  78. [self.delegate showToast:@"发言人数已满,无法切换到发言人!"];
  79. }
  80. return;
  81. }
  82. }
  83. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:mute];
  84. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  85. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:YES];
  86. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:NO];
  87. }
  88. }
  89. [self notifyMuteStateChanged];
  90. }
  91. - (void)muteVideo:(BOOL)mute {
  92. if(mute) {
  93. if(![WFAVEngineKit sharedEngineKit].currentSession.isAudience && [WFAVEngineKit sharedEngineKit].currentSession.isAudioMuted && ![[WFAVEngineKit sharedEngineKit].currentSession isBroadcasting]) {
  94. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:YES];
  95. }
  96. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:mute];
  97. } else {
  98. if([WFAVEngineKit sharedEngineKit].screenSharingReplaceMode && [self isBroadcasting]) {
  99. return;
  100. }
  101. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience && self.currentConferenceInfo.maxParticipants > 0) {
  102. __block int participantCount = 0;
  103. [[WFAVEngineKit sharedEngineKit].currentSession.participants enumerateObjectsUsingBlock:^(WFAVParticipantProfile * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
  104. if(!obj.audience) {
  105. participantCount++;
  106. }
  107. }];
  108. if(participantCount >= self.currentConferenceInfo.maxParticipants) {
  109. if([self.delegate respondsToSelector:@selector(showToast:)]) {
  110. [self.delegate showToast:@"发言人数已满,无法切换到发言人!"];
  111. }
  112. return;
  113. }
  114. }
  115. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:mute];
  116. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  117. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:YES];
  118. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:NO];
  119. }
  120. }
  121. [self notifyMuteStateChanged];
  122. }
  123. - (void)muteAudioVideo:(BOOL)mute {
  124. if(mute) {
  125. if(![WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  126. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:YES];
  127. }
  128. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:mute];
  129. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:mute];
  130. } else {
  131. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:mute];
  132. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:mute];
  133. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  134. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:NO];
  135. }
  136. }
  137. [self notifyMuteStateChanged];
  138. }
  139. - (void)enableAudioDisableVideo {
  140. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:YES];
  141. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:NO];
  142. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  143. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:NO];
  144. }
  145. [self notifyMuteStateChanged];
  146. }
  147. - (void)startScreansharing:(UIView *)view withAudio:(BOOL)withAudio {
  148. if(![self isBroadcasting]) {
  149. [self broadcast:view withAudio:withAudio];
  150. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:NO];
  151. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:YES];
  152. if([WFAVEngineKit sharedEngineKit].currentSession.isAudience) {
  153. [[WFAVEngineKit sharedEngineKit].currentSession switchAudience:NO];
  154. }
  155. [self notifyMuteStateChanged];
  156. }
  157. }
  158. - (void)stopScreansharing {
  159. if([self isBroadcasting]) {
  160. [self cancelBroadcast];
  161. }
  162. }
  163. - (void)reloadConferenceInfo {
  164. __weak typeof(self)ws = self;
  165. [[WFCUConfigManager globalManager].appServiceProvider queryConferenceInfo:self.currentConferenceInfo.conferenceId password:self.currentConferenceInfo.password success:^(WFZConferenceInfo * _Nonnull conferenceInfo) {
  166. ws.currentConferenceInfo = conferenceInfo;
  167. } error:^(int errorCode, NSString * _Nonnull message) {
  168. }];
  169. }
  170. - (void)setCurrentConferenceInfo:(WFZConferenceInfo *)currentConferenceInfo {
  171. if(![_currentConferenceInfo.conferenceId isEqualToString:currentConferenceInfo.conferenceId]) {
  172. [self resetCommandState];
  173. }
  174. _currentConferenceInfo = currentConferenceInfo;
  175. }
  176. - (void)leaveConference:(BOOL)destroy {
  177. if([[WFAVEngineKit sharedEngineKit].currentSession isBroadcasting]) {
  178. [self cancelBroadcast];
  179. }
  180. [[WFAVEngineKit sharedEngineKit].currentSession leaveConference:NO];
  181. if(destroy) {
  182. [[WFCUConfigManager globalManager].appServiceProvider destroyConference:[WFAVEngineKit sharedEngineKit].currentSession.callId success:^{
  183. } error:^(int errorCode, NSString * _Nonnull message) {
  184. }];
  185. }
  186. }
  187. - (UIButton *)alertViewCheckBtn {
  188. if(!_alertViewCheckBtn) {
  189. CGFloat width = [[[NSUserDefaults standardUserDefaults] objectForKey:@"wfc_conference_alert_checkbox_width"] floatValue];
  190. CGFloat height = [[[NSUserDefaults standardUserDefaults] objectForKey:@"wfc_conference_alert_checkbox_height"] floatValue];
  191. _alertViewCheckBtn = [[UIButton alloc] initWithFrame:CGRectMake(8, 44, width, height)];
  192. [_alertViewCheckBtn setImage:[WFCUImage imageNamed:@"multi_unselected"] forState:UIControlStateNormal];
  193. [_alertViewCheckBtn setImage:[WFCUImage imageNamed:@"multi_selected"] forState:UIControlStateSelected];
  194. [_alertViewCheckBtn.titleLabel setFont:[UIFont systemFontOfSize:14]];
  195. [_alertViewCheckBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
  196. [_alertViewCheckBtn addTarget:self action:@selector(onAlertViewCheckBtnPressed:) forControlEvents:UIControlEventTouchDown];
  197. }
  198. return _alertViewCheckBtn;
  199. }
  200. - (void)onAlertViewCheckBtnPressed:(id)sender {
  201. self.alertViewCheckBtn.selected = !self.alertViewCheckBtn.selected;
  202. }
  203. - (void)requestRecording:(BOOL)recording {
  204. __weak typeof(self)ws = self;
  205. [[WFCUConfigManager globalManager].appServiceProvider recordConference:self.currentConferenceInfo.conferenceId record:recording success:^{
  206. [ws sendCommandMessage:RECORDING targetUserId:nil boolValue:recording];
  207. [ws reloadConferenceInfo];
  208. } error:^(int errorCode, NSString * _Nonnull message) {
  209. }];
  210. }
  211. - (void)presentCommandAlertView:(UIViewController *)controller message:(NSString *)message actionTitle:(NSString *)actionTitle cancelTitle:(NSString *)cancelTitle contentText:(NSString *)contentText checkBox:(BOOL)checkBox actionHandler:(void (^)(BOOL checked))actionHandler cancelHandler:(void (^)(void))cancelHandler {
  212. __weak typeof(self)ws = self;
  213. UIAlertController *alertController = [UIAlertController alertControllerWithTitle:nil message:checkBox?[NSString stringWithFormat:@"%@\n\n\n", message]:message preferredStyle:UIAlertControllerStyleAlert];
  214. UIAlertAction *action1 = [UIAlertAction actionWithTitle:cancelTitle?cancelTitle:WFCString(@"Cancel") style:UIAlertActionStyleCancel handler:^(UIAlertAction *action) {
  215. ws.alertViewCheckBtn = nil;
  216. if(cancelHandler) {
  217. cancelHandler();
  218. }
  219. }];
  220. [alertController addAction:action1];
  221. UIAlertAction *action2 = [UIAlertAction actionWithTitle:actionTitle style:UIAlertActionStyleDestructive handler:^(UIAlertAction * _Nonnull action) {
  222. actionHandler(ws.alertViewCheckBtn.selected);
  223. ws.alertViewCheckBtn = nil;
  224. }];
  225. [alertController addAction:action2];
  226. if(checkBox) {
  227. [self.alertViewCheckBtn setTitle:[NSString stringWithFormat:@" %@", contentText] forState:UIControlStateNormal];
  228. } else {
  229. [self.alertViewCheckBtn setTitle:[NSString stringWithFormat:@"%@", contentText] forState:UIControlStateNormal];
  230. [_alertViewCheckBtn setImage:nil forState:UIControlStateNormal];
  231. [_alertViewCheckBtn setImage:nil forState:UIControlStateSelected];
  232. }
  233. [alertController.view addSubview:self.alertViewCheckBtn];
  234. [controller presentViewController:alertController animated:NO completion:^{
  235. CGSize size = alertController.view.bounds.size;
  236. if(ws.alertViewCheckBtn.frame.size.width != size.width - 16 || ws.alertViewCheckBtn.frame.size.height != size.height - 88) {
  237. [[NSUserDefaults standardUserDefaults] setObject:@(size.width - 16) forKey:@"wfc_conference_alert_checkbox_width"];
  238. [[NSUserDefaults standardUserDefaults] setObject:@(size.height - 88) forKey:@"wfc_conference_alert_checkbox_height"];
  239. dispatch_async(dispatch_get_global_queue(0, 0), ^{
  240. [[NSUserDefaults standardUserDefaults] synchronize];
  241. });
  242. }
  243. ws.alertViewCheckBtn.frame = CGRectMake(8, 44, size.width - 16, size.height - 88);
  244. }];
  245. }
  246. - (void)notifyMuteStateChanged {
  247. [[NSNotificationCenter defaultCenter] postNotificationName:kMuteStateChanged object:nil];
  248. }
  249. - (void)onReceiveMessages:(NSNotification *)notification {
  250. NSArray<WFCCMessage *> *messages = notification.object;
  251. if([WFAVEngineKit sharedEngineKit].currentSession.state == kWFAVEngineStateConnected && [WFAVEngineKit sharedEngineKit].currentSession.isConference) {
  252. for (WFCCMessage *msg in messages) {
  253. if([msg.content isKindOfClass:[WFCUConferenceChangeModelContent class]]) {
  254. WFCUConferenceChangeModelContent *changeModelCnt = (WFCUConferenceChangeModelContent *)msg.content;
  255. if([changeModelCnt.conferenceId isEqualToString:[WFAVEngineKit sharedEngineKit].currentSession.callId]) {
  256. if(changeModelCnt.isAudience) {
  257. [self.delegate onChangeModeRequest:YES];
  258. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:YES];
  259. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:YES];
  260. } else {
  261. [[WFAVEngineKit sharedEngineKit].currentSession muteAudio:NO];
  262. [[WFAVEngineKit sharedEngineKit].currentSession muteVideo:YES];
  263. [self.delegate onChangeModeRequest:NO];
  264. }
  265. }
  266. } else if([msg.content isKindOfClass:[WFCUConferenceCommandContent class]]) {
  267. WFCUConferenceCommandContent *command = (WFCUConferenceCommandContent *)msg.content;
  268. if([command.conferenceId isEqualToString:[WFAVEngineKit sharedEngineKit].currentSession.callId]) {
  269. switch (command.type) {
  270. //全体静音,只有主持人可以操作,结果写入conference profile中。带有参数是否允许成员自主解除静音。
  271. case MUTE_ALL_AUDIO:
  272. [self onMuteAll:YES allowUnMute:command.boolValue];
  273. break;
  274. //全体静音,只有主持人可以操作,结果写入conference profile中。带有参数是否允许成员自主解除静音。
  275. case MUTE_ALL_VIDEO:
  276. [self onMuteAll:NO allowUnMute:command.boolValue];
  277. break;
  278. //取消全体静音,只有主持人可以操作,结果写入conference profile中。带有参数是否邀请成员解除静音。
  279. case CANCEL_MUTE_ALL_AUDIO:
  280. [self onCancelMuteAll:YES requestUnmute:command.boolValue];
  281. break;
  282. //取消全体静音,只有主持人可以操作,结果写入conference profile中。带有参数是否邀请成员解除静音。
  283. case CANCEL_MUTE_ALL_VIDEO:
  284. [self onCancelMuteAll:NO requestUnmute:command.boolValue];
  285. break;
  286. //要求某个用户更改静音状态,只有主持人可以操作。带有参数是否静音/解除静音。
  287. case REQUEST_MUTE_AUDIO:
  288. if([command.targetUserId isEqualToString:[WFCCNetworkService sharedInstance].userId]) {
  289. [self onRequestMute:YES mute:command.boolValue];
  290. } else {
  291. return;
  292. }
  293. break;
  294. //要求某个用户更改静音状态,只有主持人可以操作。带有参数是否静音/解除静音。
  295. case REQUEST_MUTE_VIDEO:
  296. if([command.targetUserId isEqualToString:[WFCCNetworkService sharedInstance].userId]) {
  297. [self onRequestMute:NO mute:command.boolValue];
  298. } else {
  299. return;
  300. }
  301. break;
  302. //拒绝UNMUTE要求。(如果同意不需要通知对方同意)
  303. case REJECT_UNMUTE_AUDIO_REQUEST:
  304. break;
  305. //拒绝UNMUTE要求。(如果同意不需要通知对方同意)
  306. case REJECT_UNMUTE_VIDEO_REQUEST:
  307. //普通用户申请解除静音,带有参数是请求,还是取消请求。
  308. case APPLY_UNMUTE_AUDIO:
  309. if([self.currentConferenceInfo.owner isEqualToString:[WFCCNetworkService sharedInstance].userId]) {
  310. if(command.boolValue) {
  311. [self.applyingUnmuteAudioMembers removeObject:msg.fromUser];
  312. } else {
  313. if(![self.applyingUnmuteAudioMembers containsObject:msg.fromUser]) {
  314. [self.applyingUnmuteAudioMembers addObject:msg.fromUser];
  315. }
  316. }
  317. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  318. }
  319. break;
  320. //普通用户申请解除静音,带有参数是请求,还是取消请求。
  321. case APPLY_UNMUTE_VIDEO:
  322. if([self.currentConferenceInfo.owner isEqualToString:[WFCCNetworkService sharedInstance].userId]) {
  323. if(command.boolValue) {
  324. [self.applyingUnmuteVideoMembers removeObject:msg.fromUser];
  325. } else {
  326. if(![self.applyingUnmuteVideoMembers containsObject:msg.fromUser]) {
  327. [self.applyingUnmuteVideoMembers addObject:msg.fromUser];
  328. }
  329. }
  330. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  331. }
  332. break;
  333. break;
  334. //管理员批准解除静音申请,带有参数是同意,还是拒绝申请。
  335. case APPROVE_UNMUTE_AUDIO:
  336. //管理员批准全部解除静音申请,带有参数是同意,还是拒绝申请。
  337. case APPROVE_ALL_UNMUTE_AUDIO:
  338. if(self.isApplyingUnmuteAudio) {
  339. self.isApplyingUnmuteAudio = NO;
  340. if(command.boolValue) {
  341. [self muteAudio:NO];
  342. }
  343. } else {
  344. return;
  345. }
  346. break;
  347. //管理员批准解除静音申请,带有参数是同意,还是拒绝申请。
  348. case APPROVE_UNMUTE_VIDEO:
  349. //管理员批准全部解除静音申请,带有参数是同意,还是拒绝申请。
  350. case APPROVE_ALL_UNMUTE_VIDEO:
  351. if(self.isApplyingUnmuteVideo) {
  352. self.isApplyingUnmuteVideo = NO;
  353. if(command.boolValue) {
  354. [self muteVideo:NO];
  355. }
  356. } else {
  357. return;
  358. }
  359. break;
  360. //举手,带有参数是举手还是放下举手
  361. case HANDUP:
  362. if(![self.handupMembers containsObject:msg.fromUser]) {
  363. [self.handupMembers addObject:msg.fromUser];
  364. }
  365. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  366. break;
  367. //主持人放下成员的举手
  368. case PUT_HAND_DOWN:
  369. if(self.isHandup) {
  370. self.isHandup = NO;
  371. } else {
  372. return;
  373. }
  374. break;
  375. //主持人放下全体成员的举手
  376. case PUT_ALL_HAND_DOWN:
  377. if(self.isHandup) {
  378. self.isHandup = NO;
  379. } else {
  380. return;
  381. }
  382. break;
  383. case RECORDING:
  384. [self reloadConferenceInfo];
  385. break;
  386. case FOCUS:
  387. case CANCEL_FOCUS:
  388. self.currentConferenceInfo.focus = command.targetUserId;
  389. [self reloadConferenceInfo];
  390. break;
  391. default:
  392. break;
  393. }
  394. //回调给UI来进行提醒或通知
  395. if([self.delegate respondsToSelector:@selector(onReceiveCommand:content:fromUser:)]) {
  396. [self.delegate onReceiveCommand:command.type content:command fromUser:msg.fromUser];
  397. }
  398. }
  399. }
  400. }
  401. }
  402. }
  403. - (void)onMuteAll:(BOOL)audio allowUnMute:(BOOL)allowUnmute {
  404. [self reloadConferenceInfo];
  405. WFAVCallSession *session = [[WFAVEngineKit sharedEngineKit] currentSession];
  406. if(!session.isAudience) {
  407. if(audio) {
  408. self.isAllowUnMuteVideoWhenMuteAll = allowUnmute;
  409. [self muteAudio:YES];
  410. } else {
  411. self.isAllowUnMuteVideoWhenMuteAll = allowUnmute;
  412. [self muteVideo:YES];
  413. }
  414. }
  415. }
  416. - (void)onCancelMuteAll:(BOOL)audio requestUnmute:(BOOL)requestUnmute {
  417. [self reloadConferenceInfo];
  418. }
  419. - (void)onRequestMute:(BOOL)audio mute:(BOOL)mute {
  420. if(mute) {
  421. if(audio) {
  422. [self muteAudio:YES];
  423. } else {
  424. [self muteVideo:YES];
  425. }
  426. }
  427. }
  428. - (void)onAppTerminate {
  429. NSLog(@"conference manager onAppTerminate");
  430. if(self.socket && self.sockets.count) {
  431. NSLog(@"is broadcating...");
  432. [self cancelBroadcast];
  433. }
  434. }
  435. - (void)resetCommandState {
  436. [self.applyingUnmuteAudioMembers removeAllObjects];
  437. [self.applyingUnmuteVideoMembers removeAllObjects];
  438. [self.handupMembers removeAllObjects];
  439. self.isApplyingUnmuteAudio = NO;
  440. self.isApplyingUnmuteVideo = NO;
  441. self.isHandup = NO;
  442. self.isMuteAllAudio = NO;
  443. self.isMuteAllVideo = NO;
  444. }
  445. - (void)request:(NSString *)userId changeModel:(BOOL)isAudience inConference:(NSString *)conferenceId {
  446. WFCUConferenceChangeModelContent *cnt = [[WFCUConferenceChangeModelContent alloc] init];
  447. cnt.conferenceId = conferenceId;
  448. cnt.isAudience = isAudience;
  449. WFCCConversation *conv = [WFCCConversation conversationWithType:Single_Type target:userId line:0];
  450. [[WFCCIMService sharedWFCIMService] send:conv content:cnt success:^(long long messageUid, long long timestamp) {
  451. } error:^(int error_code) {
  452. }];
  453. }
  454. - (void)addHistory:(WFZConferenceInfo *)info duration:(int)duration {
  455. WFCUConferenceHistory *history = [[WFCUConferenceHistory alloc] init];
  456. history.conferenceInfo = info;
  457. history.timestamp = [[[NSDate alloc] init] timeIntervalSince1970];
  458. history.duration = duration;
  459. NSMutableArray<WFCUConferenceHistory *> *conferenceHistorys = [[self getConferenceHistoryList] mutableCopy];
  460. for (WFCUConferenceHistory *his in conferenceHistorys) {
  461. if([his.conferenceInfo.conferenceId isEqualToString:info.conferenceId]) {
  462. history.duration += his.duration;
  463. [conferenceHistorys removeObject:his];
  464. break;
  465. }
  466. }
  467. [conferenceHistorys insertObject:history atIndex:0];
  468. NSMutableArray *dictArray = [NSMutableArray new];
  469. [conferenceHistorys enumerateObjectsUsingBlock:^(WFCUConferenceHistory * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
  470. [dictArray addObject:[obj toDictionary]];
  471. }];
  472. if(dictArray.count > 500) {
  473. dictArray = [[dictArray subarrayWithRange:NSMakeRange(0, 500)] mutableCopy];
  474. }
  475. [[NSUserDefaults standardUserDefaults] setObject:dictArray forKey:@"WFC_CONFERENCE_HISTORY"];
  476. [[NSUserDefaults standardUserDefaults] synchronize];
  477. }
  478. - (NSArray<WFCUConferenceHistory *> *)getConferenceHistoryList {
  479. NSObject *o = [[NSUserDefaults standardUserDefaults] objectForKey:@"WFC_CONFERENCE_HISTORY"];
  480. NSMutableArray *ret = [[NSMutableArray alloc] init];
  481. if([o isKindOfClass:NSArray.class]) {
  482. NSArray<NSDictionary *> *arr = (NSArray<NSDictionary *> *)o;
  483. [arr enumerateObjectsUsingBlock:^(NSDictionary * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
  484. [ret addObject:[WFCUConferenceHistory fromDictionary:obj]];
  485. }];
  486. }
  487. return ret;
  488. }
  489. //链接格式 @"wildfirechat://conference/conferenceid?password=123456";
  490. //如果修改,需要对应修改appdelegate里的openUrl的地址
  491. - (NSString *)linkFromConferenceId:(NSString *)conferenceId password:(NSString *)password {
  492. if(password.length) {
  493. return [NSString stringWithFormat:@"wildfirechat://conference/%@?pwd=%@", conferenceId, password];
  494. } else {
  495. return [NSString stringWithFormat:@"wildfirechat://conference/%@", conferenceId];
  496. }
  497. }
  498. - (void)sendCommandMessage:(WFCUConferenceCommandContent *)commandContent {
  499. WFCCConversation *conv = [WFCCConversation conversationWithType:Chatroom_Type target:self.currentConferenceInfo.conferenceId line:0];
  500. [[WFCCIMService sharedWFCIMService] send:conv content:commandContent success:nil error:nil];
  501. }
  502. - (void)joinChatroom {
  503. __weak typeof(self)ws = self;
  504. [[WFCCIMService sharedWFCIMService] joinChatroom:self.currentConferenceInfo.conferenceId success:nil error:^(int error_code) {
  505. ws.failureJoinChatroom = YES;
  506. }];
  507. }
  508. - (BOOL)isOwner {
  509. return [self.currentConferenceInfo.owner isEqualToString:[WFCCNetworkService sharedInstance].userId];
  510. }
  511. - (BOOL)requestMuteAll:(BOOL)allowMemberUnmute {
  512. if(![self isOwner])
  513. return NO;
  514. self.currentConferenceInfo.audience = YES;
  515. self.currentConferenceInfo.allowTurnOnMic = allowMemberUnmute;
  516. __weak typeof(self)ws = self;
  517. [[WFCUConfigManager globalManager].appServiceProvider updateConference:self.currentConferenceInfo success:^() {
  518. [ws sendCommandMessage:MUTE_ALL_AUDIO targetUserId:nil boolValue:allowMemberUnmute];
  519. } error:^(int errorCode, NSString * _Nonnull message) {
  520. }];
  521. return YES;
  522. }
  523. - (BOOL)requestUnmuteAll:(BOOL)unmute {
  524. if(![self isOwner])
  525. return NO;
  526. self.currentConferenceInfo.audience = NO;
  527. self.currentConferenceInfo.allowTurnOnMic = YES;
  528. __weak typeof(self)ws = self;
  529. [[WFCUConfigManager globalManager].appServiceProvider updateConference:self.currentConferenceInfo success:^(void) {
  530. [ws sendCommandMessage:CANCEL_MUTE_ALL_AUDIO targetUserId:nil boolValue:unmute];
  531. } error:^(int errorCode, NSString * _Nonnull message) {
  532. }];
  533. return YES;
  534. }
  535. - (BOOL)requestFocus:(NSString *)focusedUserId {
  536. if(![self isOwner])
  537. return NO;
  538. __weak typeof(self)ws = self;
  539. [[WFCUConfigManager globalManager].appServiceProvider focusConference:self.currentConferenceInfo.conferenceId userId:focusedUserId success:^{
  540. ws.currentConferenceInfo.focus = focusedUserId;
  541. [ws sendCommandMessage:FOCUS targetUserId:focusedUserId boolValue:NO];
  542. } error:^(int errorCode, NSString * _Nonnull message) {
  543. }];
  544. return YES;
  545. }
  546. - (BOOL)requestCancelFocus {
  547. if(![self isOwner])
  548. return NO;
  549. __weak typeof(self)ws = self;
  550. [[WFCUConfigManager globalManager].appServiceProvider focusConference:self.currentConferenceInfo.conferenceId userId:nil success:^{
  551. ws.currentConferenceInfo.focus = nil;
  552. [ws sendCommandMessage:CANCEL_FOCUS targetUserId:nil boolValue:NO];
  553. } error:^(int errorCode, NSString * _Nonnull message) {
  554. }];
  555. return YES;
  556. }
  557. - (BOOL)requestMember:(NSString *)memberId Mute:(BOOL)isMute {
  558. if(![self isOwner])
  559. return NO;
  560. [self sendCommandMessage:REQUEST_MUTE_AUDIO targetUserId:memberId boolValue:isMute];
  561. return YES;
  562. }
  563. - (void)rejectUnmuteRequest {
  564. [self sendCommandMessage:REJECT_UNMUTE_AUDIO_REQUEST targetUserId:nil boolValue:NO];
  565. }
  566. - (void)applyUnmute:(BOOL)isCancel isAudio:(BOOL)isAudio {
  567. if(isAudio) {
  568. self.isApplyingUnmuteAudio = !isCancel;
  569. [self sendCommandMessage:APPLY_UNMUTE_AUDIO targetUserId:nil boolValue:isCancel];
  570. } else {
  571. self.isApplyingUnmuteVideo = !isCancel;
  572. [self sendCommandMessage:APPLY_UNMUTE_VIDEO targetUserId:nil boolValue:isCancel];
  573. }
  574. }
  575. - (BOOL)approveMember:(NSString *)memberId unmute:(BOOL)isAllow isAudio:(BOOL)isAudio {
  576. if(![self isOwner])
  577. return NO;
  578. if(isAudio) {
  579. [self.applyingUnmuteAudioMembers removeObject:memberId];
  580. } else {
  581. [self.applyingUnmuteVideoMembers removeObject:memberId];
  582. }
  583. [self sendCommandMessage:isAudio?APPROVE_UNMUTE_AUDIO:APPROVE_UNMUTE_VIDEO targetUserId:memberId boolValue:isAllow];
  584. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  585. return YES;
  586. }
  587. - (BOOL)approveAllMemberUnmute:(BOOL)isAllow isAudio:(BOOL)isAudio {
  588. if(![self isOwner])
  589. return NO;
  590. if(isAudio) {
  591. [self.applyingUnmuteAudioMembers removeAllObjects];
  592. } else {
  593. [self.applyingUnmuteVideoMembers removeAllObjects];
  594. }
  595. [self sendCommandMessage:isAudio?APPROVE_ALL_UNMUTE_AUDIO:APPROVE_ALL_UNMUTE_VIDEO targetUserId:nil boolValue:isAllow];
  596. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  597. return YES;
  598. }
  599. - (void)handup:(BOOL)handup {
  600. self.isHandup = handup;
  601. [self sendCommandMessage:HANDUP targetUserId:nil boolValue:handup];
  602. }
  603. - (void)putMemberHandDown:(NSString *)memberId {
  604. [self.handupMembers removeObject:memberId];
  605. [self sendCommandMessage:PUT_HAND_DOWN targetUserId:memberId boolValue:NO];
  606. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  607. }
  608. - (void)putAllHandDown {
  609. [self.handupMembers removeAllObjects];
  610. [self sendCommandMessage:PUT_ALL_HAND_DOWN targetUserId:nil boolValue:NO];
  611. [[NSNotificationCenter defaultCenter] postNotificationName:@"kConferenceCommandStateChanged" object:nil];
  612. }
  613. - (void)sendCommandMessage:(WFCUConferenceCommandType)type targetUserId:(NSString *)userId boolValue:(BOOL)boolValue {
  614. WFCUConferenceCommandContent *command = [WFCUConferenceCommandContent commandOfType:type conference:self.currentConferenceInfo.conferenceId];
  615. command.targetUserId = userId;
  616. command.boolValue = boolValue;
  617. [self sendCommandMessage:command];
  618. }
  619. - (void)broadcast:(UIView *)view withAudio:(BOOL)withAudio {
  620. self.receivedData = [[NSMutableData alloc] init];
  621. self.receivedAudioData = [[NSMutableData alloc] init];
  622. self.audioDataLock = [[NSLock alloc] init];
  623. self.broadcastWithAudio = withAudio;
  624. [self setupSocket:NO];
  625. [view addSubview:self.broadPickerView];
  626. for (UIView *view in self.broadPickerView.subviews) {
  627. if ([view isKindOfClass:[UIButton class]]) {
  628. float iOSVersion = [[UIDevice currentDevice].systemVersion floatValue];
  629. UIButton *button = (UIButton *)view;
  630. if (iOSVersion >= 13) {
  631. [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchDown];
  632. [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchUpInside];
  633. } else {
  634. [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchDown];
  635. }
  636. }
  637. }
  638. [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
  639. [[NSNotificationCenter defaultCenter] addObserver:self
  640. selector:@selector(sendOrientationCommand)
  641. name:UIDeviceOrientationDidChangeNotification
  642. object:nil];
  643. }
  644. - (void)cancelBroadcast {
  645. // for (UIView *view in self.broadPickerView.subviews) {
  646. // if ([view isKindOfClass:[UIButton class]]) {
  647. // float iOSVersion = [[UIDevice currentDevice].systemVersion floatValue];
  648. // UIButton *button = (UIButton *)view;
  649. // if (iOSVersion >= 13) {
  650. // [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchDown];
  651. // [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchUpInside];
  652. // } else {
  653. // [(UIButton *)view sendActionsForControlEvents:UIControlEventTouchDown];
  654. // }
  655. // }
  656. // }
  657. [self sendBroadcastCommand:3 value:0];
  658. }
  659. - (BOOL)isBroadcasting {
  660. return [WFAVEngineKit sharedEngineKit].currentSession.isBroadcasting;
  661. }
  662. - (void)onBroadcastStarted {
  663. [[WFAVEngineKit sharedEngineKit].currentSession setBroadcastingWithVideoSource:self];
  664. [self sendOrientationCommand];
  665. if (self.broadcastWithAudio) {
  666. [self sendWithAudioCommand:YES];
  667. }
  668. [[NSNotificationCenter defaultCenter] postNotificationName:@"kBroadcastingStatusUpdated" object:nil];
  669. [WFAVEngineKit sharedEngineKit].currentSession.audioDataDelegate = self;
  670. }
  671. - (void)onBroadcastStoped {
  672. [self.socket disconnect];
  673. self.socket = nil;
  674. [self.sockets removeAllObjects];
  675. [[WFAVEngineKit sharedEngineKit].currentSession setBroadcastingWithVideoSource:nil];
  676. self.receivedData = nil;
  677. self.receivedAudioData = nil;
  678. self.audioDataLock = nil;
  679. [self.broadPickerView removeFromSuperview];
  680. self.broadPickerView = nil;
  681. [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceOrientationDidChangeNotification object:nil];
  682. [[NSNotificationCenter defaultCenter] postNotificationName:@"kBroadcastingStatusUpdated" object:nil];
  683. [WFAVEngineKit sharedEngineKit].currentSession.audioDataDelegate = nil;
  684. }
  685. - (RPSystemBroadcastPickerView *)broadPickerView{
  686. if(!_broadPickerView){
  687. _broadPickerView = [[RPSystemBroadcastPickerView alloc] initWithFrame:CGRectMake(100, 100, 50, 50)];
  688. _broadPickerView.showsMicrophoneButton = NO;
  689. _broadPickerView.preferredExtension = @"cn.wildfirechat.messanger.Broadcast";
  690. _broadPickerView.hidden = YES;
  691. }
  692. return _broadPickerView;
  693. }
  694. - (void)setupSocket:(BOOL)retry {
  695. self.sockets = [NSMutableArray array];
  696. self.queue = dispatch_queue_create("cn.wildfirechat.conference.broadcast.receive", DISPATCH_QUEUE_SERIAL);
  697. self.socket = [[GCDAsyncSocket alloc] initWithDelegate:self delegateQueue:self.queue];
  698. self.socket.IPv6Enabled = NO;
  699. NSError *error;
  700. [self.socket acceptOnPort:36622 error:&error];
  701. [self.socket readDataWithTimeout:-1 tag:0];
  702. if (error == nil) {
  703. NSLog(@"开启监听成功");
  704. } else {
  705. NSLog(@"开启监听失败");
  706. if(retry) {
  707. } else {
  708. [self setupSocket:YES];
  709. }
  710. }
  711. }
  712. - (void)onReceiveBroadcastCommand:(NSString *)command {
  713. dispatch_async(dispatch_get_main_queue(), ^{
  714. if([command isEqualToString:@"Start"]) {
  715. [self onBroadcastStarted];
  716. } else if([command isEqualToString:@"Finish"]) {
  717. [self onBroadcastStoped];
  718. }
  719. });
  720. }
  721. - (void)sendOrientationCommand {
  722. int orientation = 0;
  723. switch([[UIDevice currentDevice] orientation]) {
  724. case UIDeviceOrientationLandscapeLeft:
  725. orientation = 3;
  726. break;
  727. case UIDeviceOrientationLandscapeRight:
  728. orientation = 1;
  729. break;
  730. case UIDeviceOrientationPortraitUpsideDown:
  731. orientation = 2;
  732. break;
  733. default:
  734. break;
  735. }
  736. [self sendBroadcastCommand:0 value:orientation];
  737. }
  738. - (void)sendWithAudioCommand:(BOOL)withAudio {
  739. [self sendBroadcastCommand:1 value:withAudio?1:0];
  740. }
  741. - (void)sendBroadcastCommand:(int)type value:(int)value {
  742. GCDAsyncSocket *socket = self.sockets.count ? self.sockets[0] : nil;
  743. if(socket) {
  744. Command header;
  745. header.type = type;
  746. header.value = value;
  747. NSData *md = [[NSData alloc] initWithBytes:&header length:sizeof(Command)];
  748. NSLog(@"send command %d, %d", type, value);
  749. [socket writeData:md withTimeout:(NSTimeInterval)5 tag:0];
  750. }
  751. }
  752. #pragma mark - WFAVCallSessionAudioDataDelegate
  753. - (OSStatus)onDeliverRecordeAudiodData:(AudioUnitRenderActionFlags *)flags timestamp:(const AudioTimeStamp *)time_stamp busNumber:(UInt32)bus_number numFrames:(UInt32)num_frames ioData:(AudioBufferList *)io_data {
  754. @autoreleasepool {
  755. NSLock *Lock = self.audioDataLock;
  756. [Lock lock];
  757. NSData *deviceAudioData = nil;
  758. if (self.receivedAudioData.length >= num_frames*4) {
  759. deviceAudioData = [self.receivedAudioData subdataWithRange:NSMakeRange(0, num_frames*4)];
  760. [self.receivedAudioData replaceBytesInRange:NSMakeRange(0, num_frames*4) withBytes:NULL length:0];
  761. self.receivedAudioData = [[NSMutableData alloc] initWithData:self.receivedAudioData];
  762. }
  763. [Lock unlock];
  764. for (int i = 0; i < num_frames; i++) {
  765. if (io_data->mBuffers[0].mNumberChannels == 1) {
  766. short channel1 = 0;
  767. short channel2 = 0;
  768. if (deviceAudioData) {
  769. channel1 = *((short*)(deviceAudioData.bytes+i*4))/10;
  770. channel2 = *((short*)(deviceAudioData.bytes+i*4+2))/10;
  771. }
  772. short deviceFrame = channel1/2+channel2/2;
  773. short micFrame = *((short*)(io_data->mBuffers[0].mData+i*2));
  774. int mixed = deviceFrame+micFrame*9/10;
  775. if(mixed > 32767) {
  776. mixed = 32767;
  777. } else if(mixed < -32767) {
  778. mixed = -32767;
  779. }
  780. *((short*)(io_data->mBuffers[0].mData+i*2)) = mixed;
  781. }
  782. }
  783. }
  784. return 0;
  785. }
  786. #pragma mark - GCDAsyncSocketDelegate
  787. - (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(nullable NSError *)err {
  788. [self.sockets removeObject:sock];
  789. }
  790. - (void)socketDidCloseReadStream:(GCDAsyncSocket *)sock {
  791. [self.sockets removeObject:sock];
  792. }
  793. - (void)socket:(GCDAsyncSocket *)sock didAcceptNewSocket:(GCDAsyncSocket *)newSocket {
  794. [self.sockets addObject:newSocket];
  795. [newSocket readDataWithTimeout:-1 tag:0];
  796. }
  797. - (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag {
  798. [self.receivedData appendData:data];
  799. @autoreleasepool {
  800. if(self.receivedData.length > sizeof(PacketHeader)) {
  801. PacketHeader header;
  802. memcpy(&header, self.receivedData.bytes, sizeof(PacketHeader));
  803. while(self.receivedData.length >= sizeof(PacketHeader) + header.dataLen) {
  804. NSData *rawData = [[NSData alloc] initWithBytes:self.receivedData.bytes+sizeof(PacketHeader) length:header.dataLen];
  805. [self.receivedData replaceBytesInRange:NSMakeRange(0, sizeof(PacketHeader) + header.dataLen) withBytes:NULL length:0];
  806. if(header.dataType == 0) {
  807. if(rawData.length) {
  808. NSString *status = [NSString stringWithUTF8String:rawData.bytes];
  809. NSLog(@"Receive command:%@", status);
  810. [self onReceiveBroadcastCommand:status];
  811. } else {
  812. NSLog(@"Bad command");
  813. }
  814. } else if(header.dataType == 1) {
  815. SampleInfo sampleInfo;
  816. memcpy(&sampleInfo, rawData.bytes, sizeof(SampleInfo));
  817. NSData *frameData = [[NSData alloc] initWithBytes:rawData.bytes+sizeof(SampleInfo) length:sampleInfo.dataLen];
  818. if(sampleInfo.type == 0) { //video
  819. WFCUI420VideoFrame *i420Frame = [[WFCUI420VideoFrame alloc] initWithWidth:sampleInfo.width height:sampleInfo.height];
  820. [i420Frame fromBytes:frameData];
  821. CVPixelBufferRef pixelBuffer = [i420Frame toPixelBuffer];
  822. RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
  823. [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
  824. NSTimeInterval timeStampSeconds = CACurrentMediaTime();
  825. int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
  826. RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
  827. [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
  828. rotation:0
  829. timeStampNs:timeStampNs];
  830. [self.frameDelegate capturer:nil didCaptureVideoFrame:videoFrame];
  831. CVPixelBufferRelease(pixelBuffer);
  832. } else if(sampleInfo.type == 1) { //audio
  833. NSLock *Lock = self.audioDataLock;
  834. [Lock lock];
  835. [self.receivedAudioData appendData:frameData];
  836. [Lock unlock];
  837. }
  838. } else {
  839. NSLog(@"Unknown command");
  840. }
  841. if(self.receivedData.length > sizeof(PacketHeader)) {
  842. memcpy(&header, self.receivedData.bytes, sizeof(PacketHeader));
  843. } else {
  844. break;
  845. }
  846. }
  847. }
  848. }
  849. [sock readDataWithTimeout:-1 tag:0];
  850. }
  851. #pragma - mark WFAVExternalVideoSource
  852. - (void)startCapture:(id<WFAVExternalFrameDelegate>_Nonnull)delegate {
  853. self.frameDelegate = delegate;
  854. }
  855. - (void)stopCapture {
  856. self.frameDelegate = nil;
  857. }
  858. @end
  859. #endif