OngoingCallThreadLocalContext.mm 75 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982
  1. #import <TgVoipWebrtc/OngoingCallThreadLocalContext.h>
  2. #import "MediaUtils.h"
  3. #import "Instance.h"
  4. #import "InstanceImpl.h"
  5. #import "v2/InstanceV2Impl.h"
  6. #import "v2/InstanceV2ReferenceImpl.h"
  7. #import "v2_4_0_0/InstanceV2_4_0_0Impl.h"
  8. #include "StaticThreads.h"
  9. #import "VideoCaptureInterface.h"
  10. #import "platform/darwin/VideoCameraCapturer.h"
  11. #ifndef WEBRTC_IOS
  12. #import "platform/darwin/VideoMetalViewMac.h"
  13. #import "platform/darwin/GLVideoViewMac.h"
  14. #import "platform/darwin/VideoSampleBufferViewMac.h"
  15. #define UIViewContentModeScaleAspectFill kCAGravityResizeAspectFill
  16. #define UIViewContentModeScaleAspect kCAGravityResizeAspect
  17. #else
  18. #import "platform/darwin/VideoMetalView.h"
  19. #import "platform/darwin/GLVideoView.h"
  20. #import "platform/darwin/VideoSampleBufferView.h"
  21. #import "platform/darwin/VideoCaptureView.h"
  22. #import "platform/darwin/CustomExternalCapturer.h"
  23. #include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h"
  24. #endif
  25. #import "group/GroupInstanceImpl.h"
  26. #import "group/GroupInstanceCustomImpl.h"
  27. #import "VideoCaptureInterfaceImpl.h"
  28. #include "sdk/objc/native/src/objc_frame_buffer.h"
  29. #import "components/video_frame_buffer/RTCCVPixelBuffer.h"
  30. #import "platform/darwin/TGRTCCVPixelBuffer.h"
  31. @implementation OngoingCallConnectionDescriptionWebrtc
  32. - (instancetype _Nonnull)initWithReflectorId:(uint8_t)reflectorId hasStun:(bool)hasStun hasTurn:(bool)hasTurn hasTcp:(bool)hasTcp ip:(NSString * _Nonnull)ip port:(int32_t)port username:(NSString * _Nonnull)username password:(NSString * _Nonnull)password {
  33. self = [super init];
  34. if (self != nil) {
  35. _reflectorId = reflectorId;
  36. _hasStun = hasStun;
  37. _hasTurn = hasTurn;
  38. _hasTcp = hasTcp;
  39. _ip = ip;
  40. _port = port;
  41. _username = username;
  42. _password = password;
  43. }
  44. return self;
  45. }
  46. @end
  47. @interface IsProcessingCustomSampleBufferFlag : NSObject
  48. @property (nonatomic) bool value;
  49. @end
  50. @implementation IsProcessingCustomSampleBufferFlag
  51. - (instancetype)init {
  52. self = [super init];
  53. if (self != nil) {
  54. }
  55. return self;
  56. }
  57. @end
  58. @interface OngoingCallThreadLocalContextVideoCapturer () {
  59. std::shared_ptr<tgcalls::VideoCaptureInterface> _interface;
  60. IsProcessingCustomSampleBufferFlag *_isProcessingCustomSampleBuffer;
  61. }
  62. @end
  63. @protocol OngoingCallThreadLocalContextWebrtcVideoViewImpl <NSObject>
  64. @property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
  65. @property (nonatomic, readonly) CGFloat aspect;
  66. @end
  67. @interface VideoMetalView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView, OngoingCallThreadLocalContextWebrtcVideoViewImpl>
  68. @property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
  69. @property (nonatomic, readonly) CGFloat aspect;
  70. @end
  71. @implementation VideoMetalView (VideoViewImpl)
  72. - (OngoingCallVideoOrientationWebrtc)orientation {
  73. return (OngoingCallVideoOrientationWebrtc)self.internalOrientation;
  74. }
  75. - (CGFloat)aspect {
  76. return self.internalAspect;
  77. }
  78. - (void)setOrientation:(OngoingCallVideoOrientationWebrtc)orientation {
  79. [self setInternalOrientation:(int)orientation];
  80. }
  81. - (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc, CGFloat))onOrientationUpdated {
  82. if (onOrientationUpdated) {
  83. [self internalSetOnOrientationUpdated:^(int value, CGFloat aspect) {
  84. onOrientationUpdated((OngoingCallVideoOrientationWebrtc)value, aspect);
  85. }];
  86. } else {
  87. [self internalSetOnOrientationUpdated:nil];
  88. }
  89. }
  90. - (void)setOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated {
  91. if (onIsMirroredUpdated) {
  92. [self internalSetOnIsMirroredUpdated:^(bool value) {
  93. onIsMirroredUpdated(value);
  94. }];
  95. } else {
  96. [self internalSetOnIsMirroredUpdated:nil];
  97. }
  98. }
  99. - (void)updateIsEnabled:(bool)isEnabled {
  100. [self setEnabled:isEnabled];
  101. }
  102. @end
  103. @interface GLVideoView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView, OngoingCallThreadLocalContextWebrtcVideoViewImpl>
  104. @property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
  105. @property (nonatomic, readonly) CGFloat aspect;
  106. @end
  107. @implementation GLVideoView (VideoViewImpl)
  108. - (OngoingCallVideoOrientationWebrtc)orientation {
  109. return (OngoingCallVideoOrientationWebrtc)self.internalOrientation;
  110. }
  111. - (CGFloat)aspect {
  112. return self.internalAspect;
  113. }
  114. - (void)setOrientation:(OngoingCallVideoOrientationWebrtc)orientation {
  115. [self setInternalOrientation:(int)orientation];
  116. }
  117. - (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc, CGFloat))onOrientationUpdated {
  118. if (onOrientationUpdated) {
  119. [self internalSetOnOrientationUpdated:^(int value, CGFloat aspect) {
  120. onOrientationUpdated((OngoingCallVideoOrientationWebrtc)value, aspect);
  121. }];
  122. } else {
  123. [self internalSetOnOrientationUpdated:nil];
  124. }
  125. }
  126. - (void)setOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated {
  127. if (onIsMirroredUpdated) {
  128. [self internalSetOnIsMirroredUpdated:^(bool value) {
  129. onIsMirroredUpdated(value);
  130. }];
  131. } else {
  132. [self internalSetOnIsMirroredUpdated:nil];
  133. }
  134. }
  135. - (void)updateIsEnabled:(bool)__unused isEnabled {
  136. }
  137. @end
  138. @interface VideoSampleBufferView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView, OngoingCallThreadLocalContextWebrtcVideoViewImpl>
  139. @property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
  140. @property (nonatomic, readonly) CGFloat aspect;
  141. @end
  142. @implementation VideoSampleBufferView (VideoViewImpl)
  143. - (OngoingCallVideoOrientationWebrtc)orientation {
  144. return (OngoingCallVideoOrientationWebrtc)self.internalOrientation;
  145. }
  146. - (CGFloat)aspect {
  147. return self.internalAspect;
  148. }
  149. - (void)setOrientation:(OngoingCallVideoOrientationWebrtc)orientation {
  150. [self setInternalOrientation:(int)orientation];
  151. }
  152. - (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc, CGFloat))onOrientationUpdated {
  153. if (onOrientationUpdated) {
  154. [self internalSetOnOrientationUpdated:^(int value, CGFloat aspect) {
  155. onOrientationUpdated((OngoingCallVideoOrientationWebrtc)value, aspect);
  156. }];
  157. } else {
  158. [self internalSetOnOrientationUpdated:nil];
  159. }
  160. }
  161. - (void)setOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated {
  162. if (onIsMirroredUpdated) {
  163. [self internalSetOnIsMirroredUpdated:^(bool value) {
  164. onIsMirroredUpdated(value);
  165. }];
  166. } else {
  167. [self internalSetOnIsMirroredUpdated:nil];
  168. }
  169. }
  170. - (void)updateIsEnabled:(bool)isEnabled {
  171. [self setEnabled:isEnabled];
  172. }
  173. @end
  174. @interface GroupCallDisposable () {
  175. dispatch_block_t _block;
  176. }
  177. @end
  178. @implementation GroupCallDisposable
  179. - (instancetype)initWithBlock:(dispatch_block_t _Nonnull)block {
  180. self = [super init];
  181. if (self != nil) {
  182. _block = [block copy];
  183. }
  184. return self;
  185. }
  186. - (void)dispose {
  187. if (_block) {
  188. _block();
  189. }
  190. }
  191. @end
  192. @implementation CallVideoFrameNativePixelBuffer
  193. - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
  194. self = [super init];
  195. if (self != nil) {
  196. assert(pixelBuffer != nil);
  197. _pixelBuffer = CVPixelBufferRetain(pixelBuffer);
  198. }
  199. return self;
  200. }
  201. - (void)dealloc {
  202. CVPixelBufferRelease(_pixelBuffer);
  203. }
  204. @end
  205. @implementation CallVideoFrameNV12Buffer
  206. - (instancetype)initWithBuffer:(rtc::scoped_refptr<webrtc::NV12BufferInterface>)nv12Buffer {
  207. self = [super init];
  208. if (self != nil) {
  209. _width = nv12Buffer->width();
  210. _height = nv12Buffer->height();
  211. _strideY = nv12Buffer->StrideY();
  212. _strideUV = nv12Buffer->StrideUV();
  213. _y = [[NSData alloc] initWithBytesNoCopy:(void *)nv12Buffer->DataY() length:nv12Buffer->StrideY() * _height deallocator:^(__unused void * _Nonnull bytes, __unused NSUInteger length) {
  214. nv12Buffer.get();
  215. }];
  216. _uv = [[NSData alloc] initWithBytesNoCopy:(void *)nv12Buffer->DataUV() length:nv12Buffer->StrideUV() * _height deallocator:^(__unused void * _Nonnull bytes, __unused NSUInteger length) {
  217. nv12Buffer.get();
  218. }];
  219. }
  220. return self;
  221. }
  222. @end
  223. @implementation CallVideoFrameI420Buffer
  224. - (instancetype)initWithBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
  225. self = [super init];
  226. if (self != nil) {
  227. _width = i420Buffer->width();
  228. _height = i420Buffer->height();
  229. _strideY = i420Buffer->StrideY();
  230. _strideU = i420Buffer->StrideU();
  231. _strideV = i420Buffer->StrideV();
  232. _y = [[NSData alloc] initWithBytesNoCopy:(void *)i420Buffer->DataY() length:i420Buffer->StrideY() * _height deallocator:^(__unused void * _Nonnull bytes, __unused NSUInteger length) {
  233. i420Buffer.get();
  234. }];
  235. _u = [[NSData alloc] initWithBytesNoCopy:(void *)i420Buffer->DataU() length:i420Buffer->StrideU() * _height deallocator:^(__unused void * _Nonnull bytes, __unused NSUInteger length) {
  236. i420Buffer.get();
  237. }];
  238. _v = [[NSData alloc] initWithBytesNoCopy:(void *)i420Buffer->DataV() length:i420Buffer->StrideV() * _height deallocator:^(__unused void * _Nonnull bytes, __unused NSUInteger length) {
  239. i420Buffer.get();
  240. }];
  241. }
  242. return self;
  243. }
  244. @end
  245. @interface CallVideoFrameData () {
  246. }
  247. @end
  248. @implementation CallVideoFrameData
  249. - (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically {
  250. self = [super init];
  251. if (self != nil) {
  252. _buffer = buffer;
  253. _width = frame.width();
  254. _height = frame.height();
  255. switch (frame.rotation()) {
  256. case webrtc::kVideoRotation_0: {
  257. _orientation = OngoingCallVideoOrientation0;
  258. break;
  259. }
  260. case webrtc::kVideoRotation_90: {
  261. _orientation = OngoingCallVideoOrientation90;
  262. break;
  263. }
  264. case webrtc::kVideoRotation_180: {
  265. _orientation = OngoingCallVideoOrientation180;
  266. break;
  267. }
  268. case webrtc::kVideoRotation_270: {
  269. _orientation = OngoingCallVideoOrientation270;
  270. break;
  271. }
  272. default: {
  273. _orientation = OngoingCallVideoOrientation0;
  274. break;
  275. }
  276. }
  277. _mirrorHorizontally = mirrorHorizontally;
  278. _mirrorVertically = mirrorVertically;
  279. }
  280. return self;
  281. }
  282. @end
  283. namespace {
  284. class GroupCallVideoSinkAdapter : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
  285. public:
  286. GroupCallVideoSinkAdapter(void (^frameReceived)(webrtc::VideoFrame const &)) {
  287. _frameReceived = [frameReceived copy];
  288. }
  289. void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
  290. @autoreleasepool {
  291. if (_frameReceived) {
  292. _frameReceived(nativeVideoFrame);
  293. }
  294. }
  295. }
  296. private:
  297. void (^_frameReceived)(webrtc::VideoFrame const &);
  298. };
  299. }
  300. @interface GroupCallVideoSink : NSObject {
  301. std::shared_ptr<GroupCallVideoSinkAdapter> _adapter;
  302. }
  303. @end
  304. @implementation GroupCallVideoSink
  305. - (instancetype)initWithSink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink {
  306. self = [super init];
  307. if (self != nil) {
  308. void (^storedSink)(CallVideoFrameData * _Nonnull) = [sink copy];
  309. _adapter.reset(new GroupCallVideoSinkAdapter(^(webrtc::VideoFrame const &videoFrame) {
  310. id<CallVideoFrameBuffer> mappedBuffer = nil;
  311. bool mirrorHorizontally = false;
  312. bool mirrorVertically = false;
  313. if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
  314. id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> nativeBuffer = static_cast<webrtc::ObjCFrameBuffer *>(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer();
  315. if ([nativeBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
  316. RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)nativeBuffer;
  317. mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
  318. }
  319. if ([nativeBuffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
  320. if (((TGRTCCVPixelBuffer *)nativeBuffer).shouldBeMirrored) {
  321. switch (videoFrame.rotation()) {
  322. case webrtc::kVideoRotation_0:
  323. case webrtc::kVideoRotation_180:
  324. mirrorHorizontally = true;
  325. break;
  326. case webrtc::kVideoRotation_90:
  327. case webrtc::kVideoRotation_270:
  328. mirrorVertically = true;
  329. break;
  330. default:
  331. break;
  332. }
  333. }
  334. }
  335. } else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
  336. rtc::scoped_refptr<webrtc::NV12BufferInterface> nv12Buffer(static_cast<webrtc::NV12BufferInterface *>(videoFrame.video_frame_buffer().get()));
  337. mappedBuffer = [[CallVideoFrameNV12Buffer alloc] initWithBuffer:nv12Buffer];
  338. } else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kI420) {
  339. rtc::scoped_refptr<webrtc::I420BufferInterface> i420Buffer(static_cast<webrtc::I420BufferInterface *>(videoFrame.video_frame_buffer().get()));
  340. mappedBuffer = [[CallVideoFrameI420Buffer alloc] initWithBuffer:i420Buffer];
  341. }
  342. if (storedSink && mappedBuffer) {
  343. storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
  344. }
  345. }));
  346. }
  347. return self;
  348. }
  349. - (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
  350. return _adapter;
  351. }
  352. @end
  353. @interface OngoingCallThreadLocalContextVideoCapturer () {
  354. bool _keepLandscape;
  355. std::shared_ptr<std::vector<uint8_t>> _croppingBuffer;
  356. int _nextSinkId;
  357. NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
  358. }
  359. @end
  360. @implementation OngoingCallThreadLocalContextVideoCapturer
  361. - (instancetype _Nonnull)initWithInterface:(std::shared_ptr<tgcalls::VideoCaptureInterface>)interface {
  362. self = [super init];
  363. if (self != nil) {
  364. _interface = interface;
  365. _isProcessingCustomSampleBuffer = [[IsProcessingCustomSampleBufferFlag alloc] init];
  366. _croppingBuffer = std::make_shared<std::vector<uint8_t>>();
  367. _sinks = [[NSMutableDictionary alloc] init];
  368. }
  369. return self;
  370. }
  371. - (instancetype _Nonnull)initWithDeviceId:(NSString * _Nonnull)deviceId keepLandscape:(bool)keepLandscape {
  372. self = [super init];
  373. if (self != nil) {
  374. _keepLandscape = keepLandscape;
  375. std::string resolvedId = deviceId.UTF8String;
  376. if (keepLandscape) {
  377. resolvedId += std::string(":landscape");
  378. }
  379. _interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), resolvedId);
  380. _sinks = [[NSMutableDictionary alloc] init];
  381. }
  382. return self;
  383. }
  384. #if TARGET_OS_IOS
  385. tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls::VideoCaptureInterface *videoCapture) {
  386. return videoCapture
  387. ? static_cast<tgcalls::VideoCaptureInterfaceImpl*>(videoCapture)->object()->getSyncAssumingSameThread()
  388. : nullptr;
  389. }
  390. + (instancetype _Nonnull)capturerWithExternalSampleBufferProvider {
  391. std::shared_ptr<tgcalls::VideoCaptureInterface> interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), ":ios_custom", true);
  392. return [[OngoingCallThreadLocalContextVideoCapturer alloc] initWithInterface:interface];
  393. }
  394. #endif
  395. - (void)dealloc {
  396. }
  397. #if TARGET_OS_IOS
  398. - (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation {
  399. if (!pixelBuffer) {
  400. return;
  401. }
  402. RTCVideoRotation videoRotation = RTCVideoRotation_0;
  403. switch (rotation) {
  404. case OngoingCallVideoOrientation0:
  405. videoRotation = RTCVideoRotation_0;
  406. break;
  407. case OngoingCallVideoOrientation90:
  408. videoRotation = RTCVideoRotation_90;
  409. break;
  410. case OngoingCallVideoOrientation180:
  411. videoRotation = RTCVideoRotation_180;
  412. break;
  413. case OngoingCallVideoOrientation270:
  414. videoRotation = RTCVideoRotation_270;
  415. break;
  416. }
  417. if (_isProcessingCustomSampleBuffer.value) {
  418. return;
  419. }
  420. _isProcessingCustomSampleBuffer.value = true;
  421. tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, pixelBuffer = CFRetain(pixelBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer]() {
  422. auto capture = GetVideoCaptureAssumingSameThread(interface.get());
  423. auto source = capture->source();
  424. if (source) {
  425. [CustomExternalCapturer passPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer];
  426. }
  427. CFRelease(pixelBuffer);
  428. isProcessingCustomSampleBuffer.value = false;
  429. });
  430. }
  431. #endif
  432. - (GroupCallDisposable * _Nonnull)addVideoOutput:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink {
  433. int sinkId = _nextSinkId;
  434. _nextSinkId += 1;
  435. GroupCallVideoSink *storedSink = [[GroupCallVideoSink alloc] initWithSink:sink];
  436. _sinks[@(sinkId)] = storedSink;
  437. auto sinkReference = [storedSink sink];
  438. tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, sinkReference]() {
  439. interface->setOutput(sinkReference);
  440. });
  441. __weak OngoingCallThreadLocalContextVideoCapturer *weakSelf = self;
  442. return [[GroupCallDisposable alloc] initWithBlock:^{
  443. dispatch_async(dispatch_get_main_queue(), ^{
  444. __strong OngoingCallThreadLocalContextVideoCapturer *strongSelf = weakSelf;
  445. if (!strongSelf) {
  446. return;
  447. }
  448. [strongSelf->_sinks removeObjectForKey:@(sinkId)];
  449. });
  450. }];
  451. }
  452. - (void)switchVideoInput:(NSString * _Nonnull)deviceId {
  453. std::string resolvedId = deviceId.UTF8String;
  454. if (_keepLandscape) {
  455. resolvedId += std::string(":landscape");
  456. }
  457. _interface->switchToDevice(resolvedId, false);
  458. }
  459. - (void)setIsVideoEnabled:(bool)isVideoEnabled {
  460. _interface->setState(isVideoEnabled ? tgcalls::VideoState::Active : tgcalls::VideoState::Paused);
  461. }
  462. - (std::shared_ptr<tgcalls::VideoCaptureInterface>)getInterface {
  463. return _interface;
  464. }
  465. -(void)setOnFatalError:(dispatch_block_t _Nullable)onError {
  466. #if TARGET_OS_IOS
  467. #else
  468. _interface->setOnFatalError(onError);
  469. #endif
  470. }
  471. -(void)setOnPause:(void (^)(bool))onPause {
  472. #if TARGET_OS_IOS
  473. #else
  474. _interface->setOnPause(onPause);
  475. #endif
  476. }
  477. - (void)setOnIsActiveUpdated:(void (^)(bool))onIsActiveUpdated {
  478. _interface->setOnIsActiveUpdated([onIsActiveUpdated](bool isActive) {
  479. if (onIsActiveUpdated) {
  480. onIsActiveUpdated(isActive);
  481. }
  482. });
  483. }
  484. - (void)makeOutgoingVideoView:(bool)requestClone completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable, UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
  485. __weak OngoingCallThreadLocalContextVideoCapturer *weakSelf = self;
  486. void (^makeDefault)() = ^{
  487. dispatch_async(dispatch_get_main_queue(), ^{
  488. __strong OngoingCallThreadLocalContextVideoCapturer *strongSelf = weakSelf;
  489. if (!strongSelf) {
  490. return;
  491. }
  492. std::shared_ptr<tgcalls::VideoCaptureInterface> interface = strongSelf->_interface;
  493. if (false && requestClone) {
  494. VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
  495. remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  496. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  497. interface->setOutput(sink);
  498. VideoSampleBufferView *cloneRenderer = nil;
  499. if (requestClone) {
  500. cloneRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
  501. cloneRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  502. #ifdef WEBRTC_IOS
  503. [remoteRenderer setCloneTarget:cloneRenderer];
  504. #endif
  505. }
  506. completion(remoteRenderer, cloneRenderer);
  507. } else if ([VideoMetalView isSupported]) {
  508. VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
  509. remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  510. VideoMetalView *cloneRenderer = nil;
  511. if (requestClone) {
  512. cloneRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
  513. #ifdef WEBRTC_IOS
  514. cloneRenderer.videoContentMode = UIViewContentModeScaleToFill;
  515. [remoteRenderer setClone:cloneRenderer];
  516. #else
  517. cloneRenderer.videoContentMode = kCAGravityResizeAspectFill;
  518. #endif
  519. }
  520. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  521. interface->setOutput(sink);
  522. completion(remoteRenderer, cloneRenderer);
  523. } else {
  524. GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
  525. #ifndef WEBRTC_IOS
  526. remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  527. #endif
  528. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  529. interface->setOutput(sink);
  530. completion(remoteRenderer, nil);
  531. }
  532. });
  533. };
  534. makeDefault();
  535. }
  536. @end
  537. @interface OngoingCallThreadLocalContextWebrtcTerminationResult : NSObject
  538. @property (nonatomic, readonly) tgcalls::FinalState finalState;
  539. @end
  540. @implementation OngoingCallThreadLocalContextWebrtcTerminationResult
  541. - (instancetype)initWithFinalState:(tgcalls::FinalState)finalState {
  542. self = [super init];
  543. if (self != nil) {
  544. _finalState = finalState;
  545. }
  546. return self;
  547. }
  548. @end
  549. @interface OngoingCallThreadLocalContextWebrtc () {
  550. NSString *_version;
  551. id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
  552. int32_t _contextId;
  553. OngoingCallNetworkTypeWebrtc _networkType;
  554. NSTimeInterval _callReceiveTimeout;
  555. NSTimeInterval _callRingTimeout;
  556. NSTimeInterval _callConnectTimeout;
  557. NSTimeInterval _callPacketTimeout;
  558. std::unique_ptr<tgcalls::Instance> _tgVoip;
  559. bool _didStop;
  560. OngoingCallStateWebrtc _state;
  561. OngoingCallVideoStateWebrtc _videoState;
  562. bool _connectedOnce;
  563. OngoingCallRemoteBatteryLevelWebrtc _remoteBatteryLevel;
  564. OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
  565. OngoingCallRemoteAudioStateWebrtc _remoteAudioState;
  566. OngoingCallVideoOrientationWebrtc _remoteVideoOrientation;
  567. __weak UIView<OngoingCallThreadLocalContextWebrtcVideoViewImpl> *_currentRemoteVideoRenderer;
  568. OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
  569. int32_t _signalBars;
  570. NSData *_lastDerivedState;
  571. void (^_sendSignalingData)(NSData *);
  572. float _remotePreferredAspectRatio;
  573. }
  574. - (void)controllerStateChanged:(tgcalls::State)state;
  575. - (void)signalBarsChanged:(int32_t)signalBars;
  576. @end
  577. @implementation VoipProxyServerWebrtc
  578. - (instancetype _Nonnull)initWithHost:(NSString * _Nonnull)host port:(int32_t)port username:(NSString * _Nullable)username password:(NSString * _Nullable)password {
  579. self = [super init];
  580. if (self != nil) {
  581. _host = host;
  582. _port = port;
  583. _username = username;
  584. _password = password;
  585. }
  586. return self;
  587. }
  588. @end
  589. static tgcalls::NetworkType callControllerNetworkTypeForType(OngoingCallNetworkTypeWebrtc type) {
  590. switch (type) {
  591. case OngoingCallNetworkTypeWifi:
  592. return tgcalls::NetworkType::WiFi;
  593. case OngoingCallNetworkTypeCellularGprs:
  594. return tgcalls::NetworkType::Gprs;
  595. case OngoingCallNetworkTypeCellular3g:
  596. return tgcalls::NetworkType::ThirdGeneration;
  597. case OngoingCallNetworkTypeCellularLte:
  598. return tgcalls::NetworkType::Lte;
  599. default:
  600. return tgcalls::NetworkType::ThirdGeneration;
  601. }
  602. }
  603. static tgcalls::DataSaving callControllerDataSavingForType(OngoingCallDataSavingWebrtc type) {
  604. switch (type) {
  605. case OngoingCallDataSavingNever:
  606. return tgcalls::DataSaving::Never;
  607. case OngoingCallDataSavingCellular:
  608. return tgcalls::DataSaving::Mobile;
  609. case OngoingCallDataSavingAlways:
  610. return tgcalls::DataSaving::Always;
  611. default:
  612. return tgcalls::DataSaving::Never;
  613. }
  614. }
  615. @implementation OngoingCallThreadLocalContextWebrtc
  616. static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
  617. + (void)setupLoggingFunction:(void (*)(NSString *))loggingFunction {
  618. InternalVoipLoggingFunction = loggingFunction;
  619. tgcalls::SetLoggingFunction([](std::string const &string) {
  620. if (InternalVoipLoggingFunction) {
  621. InternalVoipLoggingFunction([[NSString alloc] initWithUTF8String:string.c_str()]);
  622. }
  623. });
  624. }
  625. + (void)applyServerConfig:(NSString *)string {
  626. if (string.length != 0) {
  627. //TgVoip::setGlobalServerConfig(std::string(string.UTF8String));
  628. }
  629. }
  630. + (int32_t)maxLayer {
  631. return 92;
  632. }
  633. + (void)ensureRegisteredImplementations {
  634. static dispatch_once_t onceToken;
  635. dispatch_once(&onceToken, ^{
  636. tgcalls::Register<tgcalls::InstanceImpl>();
  637. tgcalls::Register<tgcalls::InstanceV2_4_0_0Impl>();
  638. tgcalls::Register<tgcalls::InstanceV2Impl>();
  639. tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
  640. });
  641. }
  642. + (NSArray<NSString *> * _Nonnull)versionsWithIncludeReference:(bool)includeReference {
  643. [self ensureRegisteredImplementations];
  644. NSMutableArray<NSString *> *list = [[NSMutableArray alloc] init];
  645. for (const auto &version : tgcalls::Meta::Versions()) {
  646. [list addObject:[NSString stringWithUTF8String:version.c_str()]];
  647. }
  648. [list sortUsingComparator:^NSComparisonResult(NSString * _Nonnull lhs, NSString * _Nonnull rhs) {
  649. return [lhs compare:rhs];
  650. }];
  651. return list;
  652. }
  653. + (tgcalls::ProtocolVersion)protocolVersionFromLibraryVersion:(NSString *)version {
  654. if ([version isEqualToString:@"2.7.7"]) {
  655. return tgcalls::ProtocolVersion::V0;
  656. } else if ([version isEqualToString:@"5.0.0"]) {
  657. return tgcalls::ProtocolVersion::V1;
  658. } else {
  659. return tgcalls::ProtocolVersion::V0;
  660. }
  661. }
  662. - (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec inputDeviceId: (NSString * _Nonnull)inputDeviceId outputDeviceId: (NSString * _Nonnull)outputDeviceId {
  663. self = [super init];
  664. if (self != nil) {
  665. _version = version;
  666. _queue = queue;
  667. assert([queue isCurrent]);
  668. assert([[OngoingCallThreadLocalContextWebrtc versionsWithIncludeReference:true] containsObject:version]);
  669. _callReceiveTimeout = 20.0;
  670. _callRingTimeout = 90.0;
  671. _callConnectTimeout = 30.0;
  672. _callPacketTimeout = 10.0;
  673. _remotePreferredAspectRatio = 0;
  674. _networkType = networkType;
  675. _sendSignalingData = [sendSignalingData copy];
  676. _videoCapturer = videoCapturer;
  677. if (videoCapturer != nil) {
  678. _videoState = OngoingCallVideoStateActive;
  679. } else {
  680. _videoState = OngoingCallVideoStateInactive;
  681. }
  682. _remoteVideoState = OngoingCallRemoteVideoStateInactive;
  683. _remoteAudioState = OngoingCallRemoteAudioStateActive;
  684. _remoteVideoOrientation = OngoingCallVideoOrientation0;
  685. std::vector<uint8_t> derivedStateValue;
  686. derivedStateValue.resize(derivedState.length);
  687. [derivedState getBytes:derivedStateValue.data() length:derivedState.length];
  688. std::unique_ptr<tgcalls::Proxy> proxyValue = nullptr;
  689. if (proxy != nil) {
  690. tgcalls::Proxy *proxyObject = new tgcalls::Proxy();
  691. proxyObject->host = proxy.host.UTF8String;
  692. proxyObject->port = (uint16_t)proxy.port;
  693. proxyObject->login = proxy.username.UTF8String ?: "";
  694. proxyObject->password = proxy.password.UTF8String ?: "";
  695. proxyValue = std::unique_ptr<tgcalls::Proxy>(proxyObject);
  696. }
  697. std::vector<tgcalls::RtcServer> parsedRtcServers;
  698. for (OngoingCallConnectionDescriptionWebrtc *connection in connections) {
  699. if (connection.hasStun) {
  700. parsedRtcServers.push_back((tgcalls::RtcServer){
  701. .id = 0,
  702. .host = connection.ip.UTF8String,
  703. .port = (uint16_t)connection.port,
  704. .login = "",
  705. .password = "",
  706. .isTurn = false,
  707. .isTcp = false
  708. });
  709. }
  710. if (connection.hasTurn || connection.hasTcp) {
  711. parsedRtcServers.push_back((tgcalls::RtcServer){
  712. .id = connection.reflectorId,
  713. .host = connection.ip.UTF8String,
  714. .port = (uint16_t)connection.port,
  715. .login = connection.username.UTF8String,
  716. .password = connection.password.UTF8String,
  717. .isTurn = true,
  718. .isTcp = connection.hasTcp
  719. });
  720. }
  721. }
  722. std::vector<std::string> preferredVideoCodecs;
  723. if (preferredVideoCodec != nil) {
  724. preferredVideoCodecs.push_back([preferredVideoCodec UTF8String]);
  725. }
  726. std::vector<tgcalls::Endpoint> endpoints;
  727. tgcalls::Config config = {
  728. .initializationTimeout = _callConnectTimeout,
  729. .receiveTimeout = _callPacketTimeout,
  730. .dataSaving = callControllerDataSavingForType(dataSaving),
  731. .enableP2P = (bool)allowP2P,
  732. .allowTCP = (bool)allowTCP,
  733. .enableStunMarking = (bool)enableStunMarking,
  734. .enableAEC = false,
  735. .enableNS = true,
  736. .enableAGC = true,
  737. .enableCallUpgrade = false,
  738. .logPath = std::string(logPath.length == 0 ? "" : logPath.UTF8String),
  739. .statsLogPath = std::string(statsLogPath.length == 0 ? "" : statsLogPath.UTF8String),
  740. .maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer],
  741. .enableHighBitrateVideo = true,
  742. .preferredVideoCodecs = preferredVideoCodecs,
  743. .protocolVersion = [OngoingCallThreadLocalContextWebrtc protocolVersionFromLibraryVersion:version]
  744. };
  745. auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
  746. memcpy(encryptionKeyValue->data(), key.bytes, key.length);
  747. tgcalls::EncryptionKey encryptionKey(encryptionKeyValue, isOutgoing);
  748. [OngoingCallThreadLocalContextWebrtc ensureRegisteredImplementations];
  749. __weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
  750. _tgVoip = tgcalls::Meta::Create([version UTF8String], (tgcalls::Descriptor){
  751. .version = [version UTF8String],
  752. .config = config,
  753. .persistentState = (tgcalls::PersistentState){ derivedStateValue },
  754. .endpoints = endpoints,
  755. .proxy = std::move(proxyValue),
  756. .rtcServers = parsedRtcServers,
  757. .initialNetworkType = callControllerNetworkTypeForType(networkType),
  758. .encryptionKey = encryptionKey,
  759. .mediaDevicesConfig = tgcalls::MediaDevicesConfig {
  760. .audioInputId = [inputDeviceId UTF8String],
  761. .audioOutputId = [outputDeviceId UTF8String]
  762. },
  763. .videoCapture = [_videoCapturer getInterface],
  764. .stateUpdated = [weakSelf, queue](tgcalls::State state) {
  765. [queue dispatch:^{
  766. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  767. if (strongSelf) {
  768. [strongSelf controllerStateChanged:state];
  769. }
  770. }];
  771. },
  772. .signalBarsUpdated = [weakSelf, queue](int value) {
  773. [queue dispatch:^{
  774. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  775. if (strongSelf) {
  776. strongSelf->_signalBars = value;
  777. if (strongSelf->_signalBarsChanged) {
  778. strongSelf->_signalBarsChanged(value);
  779. }
  780. }
  781. }];
  782. },
  783. .audioLevelUpdated = [weakSelf, queue](float level) {
  784. [queue dispatch:^{
  785. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  786. if (strongSelf) {
  787. if (strongSelf->_audioLevelUpdated) {
  788. strongSelf->_audioLevelUpdated(level);
  789. }
  790. }
  791. }];
  792. },
  793. .remoteMediaStateUpdated = [weakSelf, queue](tgcalls::AudioState audioState, tgcalls::VideoState videoState) {
  794. [queue dispatch:^{
  795. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  796. if (strongSelf) {
  797. OngoingCallRemoteAudioStateWebrtc remoteAudioState;
  798. OngoingCallRemoteVideoStateWebrtc remoteVideoState;
  799. switch (audioState) {
  800. case tgcalls::AudioState::Muted:
  801. remoteAudioState = OngoingCallRemoteAudioStateMuted;
  802. break;
  803. case tgcalls::AudioState::Active:
  804. remoteAudioState = OngoingCallRemoteAudioStateActive;
  805. break;
  806. default:
  807. remoteAudioState = OngoingCallRemoteAudioStateMuted;
  808. break;
  809. }
  810. switch (videoState) {
  811. case tgcalls::VideoState::Inactive:
  812. remoteVideoState = OngoingCallRemoteVideoStateInactive;
  813. break;
  814. case tgcalls::VideoState::Paused:
  815. remoteVideoState = OngoingCallRemoteVideoStatePaused;
  816. break;
  817. case tgcalls::VideoState::Active:
  818. remoteVideoState = OngoingCallRemoteVideoStateActive;
  819. break;
  820. default:
  821. remoteVideoState = OngoingCallRemoteVideoStateInactive;
  822. break;
  823. }
  824. if (strongSelf->_remoteVideoState != remoteVideoState || strongSelf->_remoteAudioState != remoteAudioState) {
  825. strongSelf->_remoteVideoState = remoteVideoState;
  826. strongSelf->_remoteAudioState = remoteAudioState;
  827. if (strongSelf->_stateChanged) {
  828. strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
  829. }
  830. }
  831. }
  832. }];
  833. },
  834. .remoteBatteryLevelIsLowUpdated = [weakSelf, queue](bool isLow) {
  835. [queue dispatch:^{
  836. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  837. if (strongSelf) {
  838. OngoingCallRemoteBatteryLevelWebrtc remoteBatteryLevel;
  839. if (isLow) {
  840. remoteBatteryLevel = OngoingCallRemoteBatteryLevelLow;
  841. } else {
  842. remoteBatteryLevel = OngoingCallRemoteBatteryLevelNormal;
  843. }
  844. if (strongSelf->_remoteBatteryLevel != remoteBatteryLevel) {
  845. strongSelf->_remoteBatteryLevel = remoteBatteryLevel;
  846. if (strongSelf->_stateChanged) {
  847. strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
  848. }
  849. }
  850. }
  851. }];
  852. },
  853. .remotePrefferedAspectRatioUpdated = [weakSelf, queue](float value) {
  854. [queue dispatch:^{
  855. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  856. if (strongSelf) {
  857. strongSelf->_remotePreferredAspectRatio = value;
  858. if (strongSelf->_stateChanged) {
  859. strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
  860. }
  861. }
  862. }];
  863. },
  864. .signalingDataEmitted = [weakSelf, queue](const std::vector<uint8_t> &data) {
  865. NSData *mappedData = [[NSData alloc] initWithBytes:data.data() length:data.size()];
  866. [queue dispatch:^{
  867. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  868. if (strongSelf) {
  869. [strongSelf signalingDataEmitted:mappedData];
  870. }
  871. }];
  872. },
  873. .initialInputDeviceId = inputDeviceId.UTF8String,
  874. .initialOutputDeviceId = outputDeviceId.UTF8String,
  875. // .createAudioDeviceModule = [](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
  876. // return rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
  877. // }
  878. });
  879. _state = OngoingCallStateInitializing;
  880. _signalBars = 4;
  881. }
  882. return self;
  883. }
  884. - (void)dealloc {
  885. if (InternalVoipLoggingFunction) {
  886. InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
  887. }
  888. if (_tgVoip != NULL) {
  889. [self stop:nil];
  890. }
  891. }
  892. - (bool)needRate {
  893. return false;
  894. }
  895. - (void)beginTermination {
  896. }
  897. + (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
  898. if (completion) {
  899. if (terminationResult) {
  900. NSString *debugLog = [NSString stringWithUTF8String:terminationResult.finalState.debugLog.c_str()];
  901. if (completion) {
  902. completion(debugLog, terminationResult.finalState.trafficStats.bytesSentWifi, terminationResult.finalState.trafficStats.bytesReceivedWifi, terminationResult.finalState.trafficStats.bytesSentMobile, terminationResult.finalState.trafficStats.bytesReceivedMobile);
  903. }
  904. } else {
  905. if (completion) {
  906. completion(@"", 0, 0, 0, 0);
  907. }
  908. }
  909. }
  910. }
  911. - (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
  912. if (!_tgVoip) {
  913. return;
  914. }
  915. if (completion == nil) {
  916. if (!_didStop) {
  917. _tgVoip->stop([](tgcalls::FinalState finalState) {
  918. });
  919. }
  920. _tgVoip.reset();
  921. return;
  922. }
  923. __weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
  924. id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
  925. _didStop = true;
  926. _tgVoip->stop([weakSelf, queue, completion = [completion copy]](tgcalls::FinalState finalState) {
  927. [queue dispatch:^{
  928. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  929. if (strongSelf) {
  930. strongSelf->_tgVoip.reset();
  931. }
  932. OngoingCallThreadLocalContextWebrtcTerminationResult *terminationResult = [[OngoingCallThreadLocalContextWebrtcTerminationResult alloc] initWithFinalState:finalState];
  933. [OngoingCallThreadLocalContextWebrtc stopWithTerminationResult:terminationResult completion:completion];
  934. }];
  935. });
  936. }
  937. - (NSString *)debugInfo {
  938. if (_tgVoip != nullptr) {
  939. NSString *version = [self version];
  940. return [NSString stringWithFormat:@"WebRTC, Version: %@", version];
  941. //auto rawDebugString = _tgVoip->getDebugInfo();
  942. //return [NSString stringWithUTF8String:rawDebugString.c_str()];
  943. } else {
  944. return nil;
  945. }
  946. }
  947. - (NSString *)version {
  948. return _version;
  949. }
  950. - (NSData * _Nonnull)getDerivedState {
  951. if (_tgVoip) {
  952. auto persistentState = _tgVoip->getPersistentState();
  953. return [[NSData alloc] initWithBytes:persistentState.value.data() length:persistentState.value.size()];
  954. } else if (_lastDerivedState != nil) {
  955. return _lastDerivedState;
  956. } else {
  957. return [NSData data];
  958. }
  959. }
  960. - (void)controllerStateChanged:(tgcalls::State)state {
  961. OngoingCallStateWebrtc callState = OngoingCallStateInitializing;
  962. switch (state) {
  963. case tgcalls::State::Established:
  964. callState = OngoingCallStateConnected;
  965. break;
  966. case tgcalls::State::Failed:
  967. callState = OngoingCallStateFailed;
  968. break;
  969. case tgcalls::State::Reconnecting:
  970. callState = OngoingCallStateReconnecting;
  971. break;
  972. default:
  973. break;
  974. }
  975. if (_state != callState) {
  976. _state = callState;
  977. if (_stateChanged) {
  978. _stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
  979. }
  980. }
  981. }
  982. - (void)signalBarsChanged:(int32_t)signalBars {
  983. if (signalBars != _signalBars) {
  984. _signalBars = signalBars;
  985. if (_signalBarsChanged) {
  986. _signalBarsChanged(signalBars);
  987. }
  988. }
  989. }
  990. - (void)signalingDataEmitted:(NSData *)data {
  991. if (_sendSignalingData) {
  992. _sendSignalingData(data);
  993. }
  994. }
  995. - (void)addSignalingData:(NSData *)data {
  996. if (_tgVoip) {
  997. std::vector<uint8_t> mappedData;
  998. mappedData.resize(data.length);
  999. [data getBytes:mappedData.data() length:data.length];
  1000. _tgVoip->receiveSignalingData(mappedData);
  1001. }
  1002. }
  1003. - (void)setIsMuted:(bool)isMuted {
  1004. if (_tgVoip) {
  1005. _tgVoip->setMuteMicrophone(isMuted);
  1006. }
  1007. }
  1008. - (void)setIsLowBatteryLevel:(bool)isLowBatteryLevel {
  1009. if (_tgVoip) {
  1010. _tgVoip->setIsLowBatteryLevel(isLowBatteryLevel);
  1011. }
  1012. }
  1013. - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType {
  1014. if (_networkType != networkType) {
  1015. _networkType = networkType;
  1016. if (_tgVoip) {
  1017. _tgVoip->setNetworkType(callControllerNetworkTypeForType(networkType));
  1018. }
  1019. }
  1020. }
  1021. - (void)makeIncomingVideoView:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
  1022. if (_tgVoip) {
  1023. __weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
  1024. dispatch_async(dispatch_get_main_queue(), ^{
  1025. if ([VideoMetalView isSupported]) {
  1026. VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
  1027. #if TARGET_OS_IPHONE
  1028. remoteRenderer.videoContentMode = UIViewContentModeScaleToFill;
  1029. #else
  1030. remoteRenderer.videoContentMode = UIViewContentModeScaleAspect;
  1031. #endif
  1032. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  1033. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  1034. if (strongSelf) {
  1035. [remoteRenderer setOrientation:strongSelf->_remoteVideoOrientation];
  1036. strongSelf->_currentRemoteVideoRenderer = remoteRenderer;
  1037. strongSelf->_tgVoip->setIncomingVideoOutput(sink);
  1038. }
  1039. completion(remoteRenderer);
  1040. } else {
  1041. GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
  1042. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  1043. __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
  1044. if (strongSelf) {
  1045. [remoteRenderer setOrientation:strongSelf->_remoteVideoOrientation];
  1046. strongSelf->_currentRemoteVideoRenderer = remoteRenderer;
  1047. strongSelf->_tgVoip->setIncomingVideoOutput(sink);
  1048. }
  1049. completion(remoteRenderer);
  1050. }
  1051. });
  1052. }
  1053. }
  1054. - (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
  1055. if (_tgVoip && _videoCapturer == nil) {
  1056. _videoCapturer = videoCapturer;
  1057. _tgVoip->setVideoCapture([_videoCapturer getInterface]);
  1058. _videoState = OngoingCallVideoStateActive;
  1059. if (_stateChanged) {
  1060. _stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
  1061. }
  1062. }
  1063. }
  1064. - (void)setRequestedVideoAspect:(float)aspect {
  1065. if (_tgVoip) {
  1066. _tgVoip->setRequestedVideoAspect(aspect);
  1067. }
  1068. }
  1069. - (void)disableVideo {
  1070. if (_tgVoip) {
  1071. _videoCapturer = nil;
  1072. _tgVoip->setVideoCapture(nullptr);
  1073. _videoState = OngoingCallVideoStateInactive;
  1074. if (_stateChanged) {
  1075. _stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
  1076. }
  1077. }
  1078. }
  1079. - (void)remotePrefferedAspectRatioUpdated:(float)remotePrefferedAspectRatio {
  1080. }
  1081. - (void)switchAudioOutput:(NSString * _Nonnull)deviceId {
  1082. if (_tgVoip) {
  1083. _tgVoip->setAudioOutputDevice(deviceId.UTF8String);
  1084. }
  1085. }
  1086. - (void)switchAudioInput:(NSString * _Nonnull)deviceId {
  1087. if (_tgVoip) {
  1088. _tgVoip->setAudioInputDevice(deviceId.UTF8String);
  1089. }
  1090. }
  1091. - (void)addExternalAudioData:(NSData * _Nonnull)data {
  1092. if (_tgVoip) {
  1093. std::vector<uint8_t> samples;
  1094. samples.resize(data.length);
  1095. [data getBytes:samples.data() length:data.length];
  1096. _tgVoip->addExternalAudioSamples(std::move(samples));
  1097. }
  1098. }
  1099. @end
  1100. namespace {
  1101. class BroadcastPartTaskImpl : public tgcalls::BroadcastPartTask {
  1102. public:
  1103. BroadcastPartTaskImpl(id<OngoingGroupCallBroadcastPartTask> task) {
  1104. _task = task;
  1105. }
  1106. virtual ~BroadcastPartTaskImpl() {
  1107. }
  1108. virtual void cancel() override {
  1109. [_task cancel];
  1110. }
  1111. private:
  1112. id<OngoingGroupCallBroadcastPartTask> _task;
  1113. };
  1114. class RequestMediaChannelDescriptionTaskImpl : public tgcalls::RequestMediaChannelDescriptionTask {
  1115. public:
  1116. RequestMediaChannelDescriptionTaskImpl(id<OngoingGroupCallMediaChannelDescriptionTask> task) {
  1117. _task = task;
  1118. }
  1119. virtual ~RequestMediaChannelDescriptionTaskImpl() {
  1120. }
  1121. virtual void cancel() override {
  1122. [_task cancel];
  1123. }
  1124. private:
  1125. id<OngoingGroupCallMediaChannelDescriptionTask> _task;
  1126. };
  1127. }
  1128. @interface GroupCallThreadLocalContext () {
  1129. id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
  1130. std::unique_ptr<tgcalls::GroupInstanceInterface> _instance;
  1131. OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
  1132. void (^_networkStateUpdated)(GroupCallNetworkState);
  1133. int _nextSinkId;
  1134. NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
  1135. }
  1136. @end
  1137. @implementation GroupCallThreadLocalContext
  1138. - (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
  1139. networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated
  1140. audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated
  1141. inputDeviceId:(NSString * _Nonnull)inputDeviceId
  1142. outputDeviceId:(NSString * _Nonnull)outputDeviceId
  1143. videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
  1144. requestMediaChannelDescriptions:(id<OngoingGroupCallMediaChannelDescriptionTask> _Nonnull (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull, void (^ _Nonnull)(NSArray<OngoingGroupCallMediaChannelDescription *> * _Nonnull)))requestMediaChannelDescriptions
  1145. requestCurrentTime:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(void (^ _Nonnull)(int64_t)))requestCurrentTime
  1146. requestAudioBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestAudioBroadcastPart
  1147. requestVideoBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, int32_t, OngoingGroupCallRequestedVideoQuality, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestVideoBroadcastPart
  1148. outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit
  1149. videoContentType:(OngoingGroupCallVideoContentType)videoContentType
  1150. enableNoiseSuppression:(bool)enableNoiseSuppression
  1151. disableAudioInput:(bool)disableAudioInput
  1152. preferX264:(bool)preferX264
  1153. logPath:(NSString * _Nonnull)logPath {
  1154. self = [super init];
  1155. if (self != nil) {
  1156. _queue = queue;
  1157. tgcalls::PlatformInterface::SharedInstance()->preferX264 = preferX264;
  1158. _sinks = [[NSMutableDictionary alloc] init];
  1159. _networkStateUpdated = [networkStateUpdated copy];
  1160. _videoCapturer = videoCapturer;
  1161. tgcalls::VideoContentType _videoContentType;
  1162. switch (videoContentType) {
  1163. case OngoingGroupCallVideoContentTypeGeneric: {
  1164. _videoContentType = tgcalls::VideoContentType::Generic;
  1165. break;
  1166. }
  1167. case OngoingGroupCallVideoContentTypeScreencast: {
  1168. _videoContentType = tgcalls::VideoContentType::Screencast;
  1169. break;
  1170. }
  1171. case OngoingGroupCallVideoContentTypeNone: {
  1172. _videoContentType = tgcalls::VideoContentType::None;
  1173. break;
  1174. }
  1175. default: {
  1176. _videoContentType = tgcalls::VideoContentType::None;
  1177. break;
  1178. }
  1179. }
  1180. std::vector<tgcalls::VideoCodecName> videoCodecPreferences;
  1181. int minOutgoingVideoBitrateKbit = 500;
  1182. bool disableOutgoingAudioProcessing = false;
  1183. tgcalls::GroupConfig config;
  1184. config.need_log = true;
  1185. config.logPath.data = std::string(logPath.length == 0 ? "" : logPath.UTF8String);
  1186. __weak GroupCallThreadLocalContext *weakSelf = self;
  1187. _instance.reset(new tgcalls::GroupInstanceCustomImpl((tgcalls::GroupInstanceDescriptor){
  1188. .threads = tgcalls::StaticThreads::getThreads(),
  1189. .config = config,
  1190. .networkStateUpdated = [weakSelf, queue, networkStateUpdated](tgcalls::GroupNetworkState networkState) {
  1191. [queue dispatch:^{
  1192. __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
  1193. if (strongSelf == nil) {
  1194. return;
  1195. }
  1196. GroupCallNetworkState mappedState;
  1197. mappedState.isConnected = networkState.isConnected;
  1198. mappedState.isTransitioningFromBroadcastToRtc = networkState.isTransitioningFromBroadcastToRtc;
  1199. networkStateUpdated(mappedState);
  1200. }];
  1201. },
  1202. .audioLevelsUpdated = [audioLevelsUpdated](tgcalls::GroupLevelsUpdate const &levels) {
  1203. NSMutableArray *result = [[NSMutableArray alloc] init];
  1204. for (auto &it : levels.updates) {
  1205. [result addObject:@(it.ssrc)];
  1206. [result addObject:@(it.value.level)];
  1207. [result addObject:@(it.value.voice)];
  1208. }
  1209. audioLevelsUpdated(result);
  1210. },
  1211. .initialInputDeviceId = inputDeviceId.UTF8String,
  1212. .initialOutputDeviceId = outputDeviceId.UTF8String,
  1213. .videoCapture = [_videoCapturer getInterface],
  1214. .requestCurrentTime = [requestCurrentTime](std::function<void(int64_t)> completion) {
  1215. id<OngoingGroupCallBroadcastPartTask> task = requestCurrentTime(^(int64_t result) {
  1216. completion(result);
  1217. });
  1218. return std::make_shared<BroadcastPartTaskImpl>(task);
  1219. },
  1220. .requestAudioBroadcastPart = [requestAudioBroadcastPart](int64_t timestampMilliseconds, int64_t durationMilliseconds, std::function<void(tgcalls::BroadcastPart &&)> completion) -> std::shared_ptr<tgcalls::BroadcastPartTask> {
  1221. id<OngoingGroupCallBroadcastPartTask> task = requestAudioBroadcastPart(timestampMilliseconds, durationMilliseconds, ^(OngoingGroupCallBroadcastPart * _Nullable part) {
  1222. tgcalls::BroadcastPart parsedPart;
  1223. parsedPart.timestampMilliseconds = part.timestampMilliseconds;
  1224. parsedPart.responseTimestamp = part.responseTimestamp;
  1225. tgcalls::BroadcastPart::Status mappedStatus;
  1226. switch (part.status) {
  1227. case OngoingGroupCallBroadcastPartStatusSuccess: {
  1228. mappedStatus = tgcalls::BroadcastPart::Status::Success;
  1229. break;
  1230. }
  1231. case OngoingGroupCallBroadcastPartStatusNotReady: {
  1232. mappedStatus = tgcalls::BroadcastPart::Status::NotReady;
  1233. break;
  1234. }
  1235. case OngoingGroupCallBroadcastPartStatusResyncNeeded: {
  1236. mappedStatus = tgcalls::BroadcastPart::Status::ResyncNeeded;
  1237. break;
  1238. }
  1239. default: {
  1240. mappedStatus = tgcalls::BroadcastPart::Status::NotReady;
  1241. break;
  1242. }
  1243. }
  1244. parsedPart.status = mappedStatus;
  1245. parsedPart.data.resize(part.oggData.length);
  1246. [part.oggData getBytes:parsedPart.data.data() length:part.oggData.length];
  1247. completion(std::move(parsedPart));
  1248. });
  1249. return std::make_shared<BroadcastPartTaskImpl>(task);
  1250. },
  1251. .requestVideoBroadcastPart = [requestVideoBroadcastPart](int64_t timestampMilliseconds, int64_t durationMilliseconds, int32_t channelId, tgcalls::VideoChannelDescription::Quality quality, std::function<void(tgcalls::BroadcastPart &&)> completion) -> std::shared_ptr<tgcalls::BroadcastPartTask> {
  1252. OngoingGroupCallRequestedVideoQuality mappedQuality;
  1253. switch (quality) {
  1254. case tgcalls::VideoChannelDescription::Quality::Thumbnail: {
  1255. mappedQuality = OngoingGroupCallRequestedVideoQualityThumbnail;
  1256. break;
  1257. }
  1258. case tgcalls::VideoChannelDescription::Quality::Medium: {
  1259. mappedQuality = OngoingGroupCallRequestedVideoQualityMedium;
  1260. break;
  1261. }
  1262. case tgcalls::VideoChannelDescription::Quality::Full: {
  1263. mappedQuality = OngoingGroupCallRequestedVideoQualityFull;
  1264. break;
  1265. }
  1266. default: {
  1267. mappedQuality = OngoingGroupCallRequestedVideoQualityThumbnail;
  1268. break;
  1269. }
  1270. }
  1271. id<OngoingGroupCallBroadcastPartTask> task = requestVideoBroadcastPart(timestampMilliseconds, durationMilliseconds, channelId, mappedQuality, ^(OngoingGroupCallBroadcastPart * _Nullable part) {
  1272. tgcalls::BroadcastPart parsedPart;
  1273. parsedPart.timestampMilliseconds = part.timestampMilliseconds;
  1274. parsedPart.responseTimestamp = part.responseTimestamp;
  1275. tgcalls::BroadcastPart::Status mappedStatus;
  1276. switch (part.status) {
  1277. case OngoingGroupCallBroadcastPartStatusSuccess: {
  1278. mappedStatus = tgcalls::BroadcastPart::Status::Success;
  1279. break;
  1280. }
  1281. case OngoingGroupCallBroadcastPartStatusNotReady: {
  1282. mappedStatus = tgcalls::BroadcastPart::Status::NotReady;
  1283. break;
  1284. }
  1285. case OngoingGroupCallBroadcastPartStatusResyncNeeded: {
  1286. mappedStatus = tgcalls::BroadcastPart::Status::ResyncNeeded;
  1287. break;
  1288. }
  1289. default: {
  1290. mappedStatus = tgcalls::BroadcastPart::Status::NotReady;
  1291. break;
  1292. }
  1293. }
  1294. parsedPart.status = mappedStatus;
  1295. parsedPart.data.resize(part.oggData.length);
  1296. [part.oggData getBytes:parsedPart.data.data() length:part.oggData.length];
  1297. completion(std::move(parsedPart));
  1298. });
  1299. return std::make_shared<BroadcastPartTaskImpl>(task);
  1300. },
  1301. .outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
  1302. .disableOutgoingAudioProcessing = disableOutgoingAudioProcessing,
  1303. .disableAudioInput = disableAudioInput,
  1304. .videoContentType = _videoContentType,
  1305. .videoCodecPreferences = videoCodecPreferences,
  1306. .initialEnableNoiseSuppression = enableNoiseSuppression,
  1307. .requestMediaChannelDescriptions = [requestMediaChannelDescriptions](std::vector<uint32_t> const &ssrcs, std::function<void(std::vector<tgcalls::MediaChannelDescription> &&)> completion) -> std::shared_ptr<tgcalls::RequestMediaChannelDescriptionTask> {
  1308. NSMutableArray<NSNumber *> *mappedSsrcs = [[NSMutableArray alloc] init];
  1309. for (auto ssrc : ssrcs) {
  1310. [mappedSsrcs addObject:[NSNumber numberWithUnsignedInt:ssrc]];
  1311. }
  1312. id<OngoingGroupCallMediaChannelDescriptionTask> task = requestMediaChannelDescriptions(mappedSsrcs, ^(NSArray<OngoingGroupCallMediaChannelDescription *> *channels) {
  1313. std::vector<tgcalls::MediaChannelDescription> mappedChannels;
  1314. for (OngoingGroupCallMediaChannelDescription *channel in channels) {
  1315. tgcalls::MediaChannelDescription mappedChannel;
  1316. switch (channel.type) {
  1317. case OngoingGroupCallMediaChannelTypeAudio: {
  1318. mappedChannel.type = tgcalls::MediaChannelDescription::Type::Audio;
  1319. break;
  1320. }
  1321. case OngoingGroupCallMediaChannelTypeVideo: {
  1322. mappedChannel.type = tgcalls::MediaChannelDescription::Type::Video;
  1323. break;
  1324. }
  1325. default: {
  1326. continue;
  1327. }
  1328. }
  1329. mappedChannel.audioSsrc = channel.audioSsrc;
  1330. mappedChannel.videoInformation = channel.videoDescription.UTF8String ?: "";
  1331. mappedChannels.push_back(std::move(mappedChannel));
  1332. }
  1333. completion(std::move(mappedChannels));
  1334. });
  1335. return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
  1336. },
  1337. .minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit
  1338. }));
  1339. }
  1340. return self;
  1341. }
  1342. - (void)stop {
  1343. if (_instance) {
  1344. _instance->stop();
  1345. _instance.reset();
  1346. }
  1347. }
  1348. - (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast {
  1349. if (_instance) {
  1350. tgcalls::GroupConnectionMode mappedConnectionMode;
  1351. switch (connectionMode) {
  1352. case OngoingCallConnectionModeNone: {
  1353. mappedConnectionMode = tgcalls::GroupConnectionMode::GroupConnectionModeNone;
  1354. break;
  1355. }
  1356. case OngoingCallConnectionModeRtc: {
  1357. mappedConnectionMode = tgcalls::GroupConnectionMode::GroupConnectionModeRtc;
  1358. break;
  1359. }
  1360. case OngoingCallConnectionModeBroadcast: {
  1361. mappedConnectionMode = tgcalls::GroupConnectionMode::GroupConnectionModeBroadcast;
  1362. break;
  1363. }
  1364. default: {
  1365. mappedConnectionMode = tgcalls::GroupConnectionMode::GroupConnectionModeNone;
  1366. break;
  1367. }
  1368. }
  1369. _instance->setConnectionMode(mappedConnectionMode, keepBroadcastConnectedIfWasEnabled, isUnifiedBroadcast);
  1370. }
  1371. }
  1372. - (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
  1373. if (_instance) {
  1374. _instance->emitJoinPayload([completion](tgcalls::GroupJoinPayload const &payload) {
  1375. completion([NSString stringWithUTF8String:payload.json.c_str()], payload.audioSsrc);
  1376. });
  1377. }
  1378. }
  1379. - (void)setJoinResponsePayload:(NSString * _Nonnull)payload {
  1380. if (_instance) {
  1381. _instance->setJoinResponsePayload(payload.UTF8String);
  1382. }
  1383. }
  1384. - (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs {
  1385. if (_instance) {
  1386. std::vector<uint32_t> values;
  1387. for (NSNumber *ssrc in ssrcs) {
  1388. values.push_back([ssrc unsignedIntValue]);
  1389. }
  1390. _instance->removeSsrcs(values);
  1391. }
  1392. }
  1393. - (void)removeIncomingVideoSource:(uint32_t)ssrc {
  1394. if (_instance) {
  1395. _instance->removeIncomingVideoSource(ssrc);
  1396. }
  1397. }
  1398. - (void)setIsMuted:(bool)isMuted {
  1399. if (_instance) {
  1400. _instance->setIsMuted(isMuted);
  1401. }
  1402. }
  1403. - (void)setIsNoiseSuppressionEnabled:(bool)isNoiseSuppressionEnabled {
  1404. if (_instance) {
  1405. _instance->setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled);
  1406. }
  1407. }
  1408. - (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer completion:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
  1409. if (_instance) {
  1410. _instance->setVideoCapture([videoCapturer getInterface]);
  1411. }
  1412. }
  1413. - (void)disableVideo:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
  1414. if (_instance) {
  1415. _instance->setVideoCapture(nullptr);
  1416. }
  1417. }
  1418. - (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume {
  1419. if (_instance) {
  1420. _instance->setVolume(ssrc, volume);
  1421. }
  1422. }
  1423. - (void)setRequestedVideoChannels:(NSArray<OngoingGroupCallRequestedVideoChannel *> * _Nonnull)requestedVideoChannels {
  1424. if (_instance) {
  1425. std::vector<tgcalls::VideoChannelDescription> mappedChannels;
  1426. for (OngoingGroupCallRequestedVideoChannel *channel : requestedVideoChannels) {
  1427. tgcalls::VideoChannelDescription description;
  1428. description.audioSsrc = channel.audioSsrc;
  1429. description.endpointId = channel.endpointId.UTF8String ?: "";
  1430. for (OngoingGroupCallSsrcGroup *group in channel.ssrcGroups) {
  1431. tgcalls::MediaSsrcGroup parsedGroup;
  1432. parsedGroup.semantics = group.semantics.UTF8String ?: "";
  1433. for (NSNumber *ssrc in group.ssrcs) {
  1434. parsedGroup.ssrcs.push_back([ssrc unsignedIntValue]);
  1435. }
  1436. description.ssrcGroups.push_back(std::move(parsedGroup));
  1437. }
  1438. switch (channel.minQuality) {
  1439. case OngoingGroupCallRequestedVideoQualityThumbnail: {
  1440. description.minQuality = tgcalls::VideoChannelDescription::Quality::Thumbnail;
  1441. break;
  1442. }
  1443. case OngoingGroupCallRequestedVideoQualityMedium: {
  1444. description.minQuality = tgcalls::VideoChannelDescription::Quality::Medium;
  1445. break;
  1446. }
  1447. case OngoingGroupCallRequestedVideoQualityFull: {
  1448. description.minQuality = tgcalls::VideoChannelDescription::Quality::Full;
  1449. break;
  1450. }
  1451. default: {
  1452. break;
  1453. }
  1454. }
  1455. switch (channel.maxQuality) {
  1456. case OngoingGroupCallRequestedVideoQualityThumbnail: {
  1457. description.maxQuality = tgcalls::VideoChannelDescription::Quality::Thumbnail;
  1458. break;
  1459. }
  1460. case OngoingGroupCallRequestedVideoQualityMedium: {
  1461. description.maxQuality = tgcalls::VideoChannelDescription::Quality::Medium;
  1462. break;
  1463. }
  1464. case OngoingGroupCallRequestedVideoQualityFull: {
  1465. description.maxQuality = tgcalls::VideoChannelDescription::Quality::Full;
  1466. break;
  1467. }
  1468. default: {
  1469. break;
  1470. }
  1471. }
  1472. mappedChannels.push_back(std::move(description));
  1473. }
  1474. _instance->setRequestedVideoChannels(std::move(mappedChannels));
  1475. }
  1476. }
  1477. - (void)switchAudioOutput:(NSString * _Nonnull)deviceId {
  1478. if (_instance) {
  1479. _instance->setAudioOutputDevice(deviceId.UTF8String);
  1480. }
  1481. }
  1482. - (void)switchAudioInput:(NSString * _Nonnull)deviceId {
  1483. if (_instance) {
  1484. _instance->setAudioInputDevice(deviceId.UTF8String);
  1485. }
  1486. }
  1487. - (void)makeIncomingVideoViewWithEndpointId:(NSString * _Nonnull)endpointId requestClone:(bool)requestClone completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable, UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
  1488. if (_instance) {
  1489. __weak GroupCallThreadLocalContext *weakSelf = self;
  1490. id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
  1491. dispatch_async(dispatch_get_main_queue(), ^{
  1492. BOOL useSampleBuffer = NO;
  1493. #ifdef WEBRTC_IOS
  1494. useSampleBuffer = YES;
  1495. #endif
  1496. if (useSampleBuffer) {
  1497. VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
  1498. remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  1499. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  1500. VideoSampleBufferView *cloneRenderer = nil;
  1501. if (requestClone) {
  1502. cloneRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
  1503. cloneRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
  1504. #ifdef WEBRTC_IOS
  1505. [remoteRenderer setCloneTarget:cloneRenderer];
  1506. #endif
  1507. }
  1508. [queue dispatch:^{
  1509. __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
  1510. if (strongSelf && strongSelf->_instance) {
  1511. strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, sink);
  1512. }
  1513. }];
  1514. completion(remoteRenderer, cloneRenderer);
  1515. } else if ([VideoMetalView isSupported]) {
  1516. VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
  1517. #ifdef WEBRTC_IOS
  1518. remoteRenderer.videoContentMode = UIViewContentModeScaleToFill;
  1519. #else
  1520. remoteRenderer.videoContentMode = kCAGravityResizeAspectFill;
  1521. #endif
  1522. VideoMetalView *cloneRenderer = nil;
  1523. if (requestClone) {
  1524. cloneRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
  1525. #ifdef WEBRTC_IOS
  1526. cloneRenderer.videoContentMode = UIViewContentModeScaleToFill;
  1527. #else
  1528. cloneRenderer.videoContentMode = kCAGravityResizeAspectFill;
  1529. #endif
  1530. }
  1531. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  1532. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> cloneSink = [cloneRenderer getSink];
  1533. [queue dispatch:^{
  1534. __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
  1535. if (strongSelf && strongSelf->_instance) {
  1536. strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, sink);
  1537. if (cloneSink) {
  1538. strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, cloneSink);
  1539. }
  1540. }
  1541. }];
  1542. completion(remoteRenderer, cloneRenderer);
  1543. } else {
  1544. GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
  1545. // [remoteRenderer setVideoContentMode:kCAGravityResizeAspectFill];
  1546. std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
  1547. [queue dispatch:^{
  1548. __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
  1549. if (strongSelf && strongSelf->_instance) {
  1550. strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, sink);
  1551. }
  1552. }];
  1553. completion(remoteRenderer, nil);
  1554. }
  1555. });
  1556. }
  1557. }
  1558. - (GroupCallDisposable * _Nonnull)addVideoOutputWithEndpointId:(NSString * _Nonnull)endpointId sink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink {
  1559. int sinkId = _nextSinkId;
  1560. _nextSinkId += 1;
  1561. GroupCallVideoSink *storedSink = [[GroupCallVideoSink alloc] initWithSink:sink];
  1562. _sinks[@(sinkId)] = storedSink;
  1563. if (_instance) {
  1564. _instance->addIncomingVideoOutput(endpointId.UTF8String, [storedSink sink]);
  1565. }
  1566. __weak GroupCallThreadLocalContext *weakSelf = self;
  1567. id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
  1568. return [[GroupCallDisposable alloc] initWithBlock:^{
  1569. [queue dispatch:^{
  1570. __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
  1571. if (!strongSelf) {
  1572. return;
  1573. }
  1574. [strongSelf->_sinks removeObjectForKey:@(sinkId)];
  1575. }];
  1576. }];
  1577. }
  1578. - (void)addExternalAudioData:(NSData * _Nonnull)data {
  1579. if (_instance) {
  1580. std::vector<uint8_t> samples;
  1581. samples.resize(data.length);
  1582. [data getBytes:samples.data() length:data.length];
  1583. _instance->addExternalAudioSamples(std::move(samples));
  1584. }
  1585. }
  1586. - (void)getStats:(void (^ _Nonnull)(OngoingGroupCallStats * _Nonnull))completion {
  1587. if (_instance) {
  1588. _instance->getStats([completion](tgcalls::GroupInstanceStats stats) {
  1589. NSMutableDictionary<NSString *,OngoingGroupCallIncomingVideoStats *> *incomingVideoStats = [[NSMutableDictionary alloc] init];
  1590. for (const auto &it : stats.incomingVideoStats) {
  1591. incomingVideoStats[[NSString stringWithUTF8String:it.first.c_str()]] = [[OngoingGroupCallIncomingVideoStats alloc] initWithReceivingQuality:it.second.receivingQuality availableQuality:it.second.availableQuality];
  1592. }
  1593. completion([[OngoingGroupCallStats alloc] initWithIncomingVideoStats:incomingVideoStats]);
  1594. });
  1595. }
  1596. }
  1597. @end
  1598. @implementation OngoingGroupCallMediaChannelDescription
  1599. - (instancetype _Nonnull)initWithType:(OngoingGroupCallMediaChannelType)type
  1600. audioSsrc:(uint32_t)audioSsrc
  1601. videoDescription:(NSString * _Nullable)videoDescription {
  1602. self = [super init];
  1603. if (self != nil) {
  1604. _type = type;
  1605. _audioSsrc = audioSsrc;
  1606. _videoDescription = videoDescription;
  1607. }
  1608. return self;
  1609. }
  1610. @end
  1611. @implementation OngoingGroupCallBroadcastPart
  1612. - (instancetype _Nonnull)initWithTimestampMilliseconds:(int64_t)timestampMilliseconds responseTimestamp:(double)responseTimestamp status:(OngoingGroupCallBroadcastPartStatus)status oggData:(NSData * _Nonnull)oggData {
  1613. self = [super init];
  1614. if (self != nil) {
  1615. _timestampMilliseconds = timestampMilliseconds;
  1616. _responseTimestamp = responseTimestamp;
  1617. _status = status;
  1618. _oggData = oggData;
  1619. }
  1620. return self;
  1621. }
  1622. @end
  1623. @implementation OngoingGroupCallSsrcGroup
  1624. - (instancetype)initWithSemantics:(NSString * _Nonnull)semantics ssrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs {
  1625. self = [super init];
  1626. if (self != nil) {
  1627. _semantics = semantics;
  1628. _ssrcs = ssrcs;
  1629. }
  1630. return self;
  1631. }
  1632. @end
  1633. @implementation OngoingGroupCallRequestedVideoChannel
  1634. - (instancetype)initWithAudioSsrc:(uint32_t)audioSsrc endpointId:(NSString * _Nonnull)endpointId ssrcGroups:(NSArray<OngoingGroupCallSsrcGroup *> * _Nonnull)ssrcGroups minQuality:(OngoingGroupCallRequestedVideoQuality)minQuality maxQuality:(OngoingGroupCallRequestedVideoQuality)maxQuality {
  1635. self = [super init];
  1636. if (self != nil) {
  1637. _audioSsrc = audioSsrc;
  1638. _endpointId = endpointId;
  1639. _ssrcGroups = ssrcGroups;
  1640. _minQuality = minQuality;
  1641. _maxQuality = maxQuality;
  1642. }
  1643. return self;
  1644. }
  1645. @end
  1646. @implementation OngoingGroupCallIncomingVideoStats
  1647. - (instancetype _Nonnull)initWithReceivingQuality:(int)receivingQuality availableQuality:(int)availableQuality {
  1648. self = [super init];
  1649. if (self != nil) {
  1650. _receivingQuality = receivingQuality;
  1651. _availableQuality = availableQuality;
  1652. }
  1653. return self;
  1654. }
  1655. @end
  1656. @implementation OngoingGroupCallStats
  1657. - (instancetype _Nonnull)initWithIncomingVideoStats:(NSDictionary<NSString *, OngoingGroupCallIncomingVideoStats *> * _Nonnull)incomingVideoStats {
  1658. self = [super init];
  1659. if (self != nil) {
  1660. _incomingVideoStats = incomingVideoStats;
  1661. }
  1662. return self;
  1663. }
  1664. @end