webrtc_video_track.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579
  1. // This file is part of Desktop App Toolkit,
  2. // a set of libraries for developing nice desktop applications.
  3. //
  4. // For license and copyright information please follow this link:
  5. // https://github.com/desktop-app/legal/blob/master/LEGAL
  6. //
  7. #include "webrtc/webrtc_video_track.h"
  8. #include "ffmpeg/ffmpeg_utility.h"
  9. #include <QtGui/QImage>
  10. #include <QtGui/QPainter>
  11. #include <api/video/video_sink_interface.h>
  12. #include <api/video/video_frame.h>
  13. namespace Webrtc {
  14. namespace {
  15. constexpr auto kDropFramesWhileInactive = 5 * crl::time(1000);
  16. [[nodiscard]] bool GoodForRequest(
  17. const QImage &image,
  18. int rotation,
  19. const FrameRequest &request) {
  20. if (request.resize.isEmpty()) {
  21. return true;
  22. } else if (rotation != 0) {
  23. return false;
  24. //} else if ((request.radius != ImageRoundRadius::None)
  25. // && ((request.corners & RectPart::AllCorners) != 0)) {
  26. // return false;
  27. }
  28. return (request.resize == request.outer)
  29. && (request.resize == image.size());
  30. }
  31. void PaintFrameOuter(QPainter &p, const QRect &inner, QSize outer) {
  32. const auto left = inner.x();
  33. const auto right = outer.width() - inner.width() - left;
  34. const auto top = inner.y();
  35. const auto bottom = outer.height() - inner.height() - top;
  36. if (left > 0) {
  37. p.fillRect(0, 0, left, outer.height(), Qt::black);
  38. }
  39. if (right > 0) {
  40. p.fillRect(
  41. outer.width() - right,
  42. 0,
  43. right,
  44. outer.height(),
  45. Qt::black);
  46. }
  47. if (top > 0) {
  48. p.fillRect(left, 0, inner.width(), top, Qt::black);
  49. }
  50. if (bottom > 0) {
  51. p.fillRect(
  52. left,
  53. outer.height() - bottom,
  54. inner.width(),
  55. bottom,
  56. Qt::black);
  57. }
  58. }
  59. void PaintFrameInner(
  60. QPainter &p,
  61. QRect to,
  62. const QImage &original,
  63. bool alpha,
  64. int rotation) {
  65. const auto rotated = [](QRect rect, int rotation) {
  66. switch (rotation) {
  67. case 0: return rect;
  68. case 90: return QRect(
  69. rect.y(),
  70. -rect.x() - rect.width(),
  71. rect.height(),
  72. rect.width());
  73. case 180: return QRect(
  74. -rect.x() - rect.width(),
  75. -rect.y() - rect.height(),
  76. rect.width(),
  77. rect.height());
  78. case 270: return QRect(
  79. -rect.y() - rect.height(),
  80. rect.x(),
  81. rect.height(),
  82. rect.width());
  83. }
  84. Unexpected("Rotation in PaintFrameInner.");
  85. };
  86. const auto hints = {
  87. QPainter::Antialiasing,
  88. QPainter::SmoothPixmapTransform,
  89. QPainter::TextAntialiasing
  90. };
  91. for (const auto hint : hints) {
  92. p.setRenderHint(hint);
  93. }
  94. if (rotation) {
  95. p.rotate(rotation);
  96. }
  97. const auto rect = rotated(to, rotation);
  98. if (alpha) {
  99. p.fillRect(rect, Qt::white);
  100. }
  101. p.drawImage(rect, original);
  102. }
  103. void PaintFrameContent(
  104. QPainter &p,
  105. const QImage &original,
  106. bool alpha,
  107. int rotation,
  108. const FrameRequest &request) {
  109. const auto full = request.outer.isEmpty()
  110. ? original.size()
  111. : request.outer;
  112. const auto size = request.resize.isEmpty()
  113. ? original.size()
  114. : request.resize;
  115. const auto to = QRect(
  116. (full.width() - size.width()) / 2,
  117. (full.height() - size.height()) / 2,
  118. size.width(),
  119. size.height());
  120. PaintFrameOuter(p, to, full);
  121. PaintFrameInner(p, to, original, alpha, rotation);
  122. }
  123. void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
  124. //if (!(request.corners & RectPart::AllCorners)
  125. // || (request.radius == ImageRoundRadius::None)) {
  126. // return;
  127. //}
  128. //Images::prepareRound(storage, request.radius, request.corners);
  129. }
  130. QImage PrepareByRequest(
  131. const QImage &original,
  132. bool alpha,
  133. int rotation,
  134. const FrameRequest &request,
  135. QImage storage) {
  136. Expects(!request.outer.isEmpty() || alpha);
  137. const auto outer = request.outer.isEmpty()
  138. ? original.size()
  139. : request.outer;
  140. if (!FFmpeg::GoodStorageForFrame(storage, outer)) {
  141. storage = FFmpeg::CreateFrameStorage(outer);
  142. }
  143. QPainter p(&storage);
  144. PaintFrameContent(p, original, alpha, rotation, request);
  145. p.end();
  146. ApplyFrameRounding(storage, request);
  147. return storage;
  148. }
  149. } // namespace
  150. struct VideoTrack::Frame {
  151. int64 mcstimestamp = 0;
  152. QImage original;
  153. QImage prepared;
  154. rtc::scoped_refptr<webrtc::I420BufferInterface> native;
  155. FrameYUV420 yuv420;
  156. FrameRequest request = FrameRequest::NonStrict();
  157. FrameFormat format = FrameFormat::None;
  158. int rotation = 0;
  159. bool displayed = false;
  160. bool alpha = false;
  161. bool requireARGB32 = true;
  162. };
  163. class VideoTrack::Sink final
  164. : public rtc::VideoSinkInterface<webrtc::VideoFrame>
  165. , public std::enable_shared_from_this<Sink> {
  166. public:
  167. explicit Sink(bool requireARGB32);
  168. using PrepareFrame = not_null<Frame*>;
  169. using PrepareState = bool;
  170. struct FrameWithIndex {
  171. not_null<Frame*> frame;
  172. int index = -1;
  173. };
  174. [[nodiscard]] bool firstPresentHappened() const;
  175. // Called from the main thread.
  176. void markFrameShown();
  177. [[nodiscard]] not_null<Frame*> frameForPaint();
  178. [[nodiscard]] FrameWithIndex frameForPaintWithIndex();
  179. [[nodiscard]] rpl::producer<> renderNextFrameOnMain() const;
  180. void destroyFrameForPaint();
  181. void OnFrame(const webrtc::VideoFrame &nativeVideoFrame) override;
  182. private:
  183. struct FrameForDecode {
  184. not_null<Frame*> frame;
  185. int counter = 0;
  186. };
  187. [[nodiscard]] FrameForDecode nextFrameForDecode();
  188. void presentNextFrame(const FrameForDecode &frame);
  189. [[nodiscard]] not_null<Frame*> getFrame(int index);
  190. [[nodiscard]] not_null<const Frame*> getFrame(int index) const;
  191. [[nodiscard]] int counter() const;
  192. bool decodeFrame(
  193. const webrtc::VideoFrame &nativeVideoFrame,
  194. not_null<Frame*> frame);
  195. void notifyFrameDecoded();
  196. FFmpeg::SwscalePointer _decodeContext;
  197. std::atomic<int> _counter = 0;
  198. // Main thread.
  199. int _counterCycle = 0;
  200. static constexpr auto kFramesCount = 3;
  201. std::array<Frame, kFramesCount> _frames;
  202. rpl::event_stream<> _renderNextFrameOnMain;
  203. };
  204. VideoTrack::Sink::Sink(bool requireARGB32) {
  205. for (auto &frame : _frames) {
  206. frame.requireARGB32 = requireARGB32;
  207. }
  208. }
  209. void VideoTrack::Sink::OnFrame(const webrtc::VideoFrame &nativeVideoFrame) {
  210. const auto decode = nextFrameForDecode();
  211. if (decodeFrame(nativeVideoFrame, decode.frame)) {
  212. PrepareFrameByRequests(decode.frame, nativeVideoFrame.rotation());
  213. presentNextFrame(decode);
  214. }
  215. }
  216. auto VideoTrack::Sink::nextFrameForDecode() -> FrameForDecode {
  217. const auto current = counter();
  218. const auto index = ((current + 3) / 2) % kFramesCount;
  219. const auto frame = getFrame(index);
  220. return { frame, current };
  221. }
  222. void VideoTrack::Sink::presentNextFrame(const FrameForDecode &frame) {
  223. // Release this frame to the main thread for rendering.
  224. const auto present = [&](int counter) {
  225. Expects(counter + 1 < 2 * kFramesCount);
  226. _counter.store(counter + 1, std::memory_order_release);
  227. notifyFrameDecoded();
  228. };
  229. switch (frame.counter) {
  230. case 0: present(0);
  231. case 1: return;
  232. case 2: present(2);
  233. case 3: return;
  234. case 4: present(4);
  235. case 5: return;
  236. //case 6: present(6);
  237. //case 7: return;
  238. }
  239. Unexpected("Counter value in VideoTrack::Sink::presentNextFrame.");
  240. }
  241. bool VideoTrack::Sink::decodeFrame(
  242. const webrtc::VideoFrame &nativeVideoFrame,
  243. not_null<Frame*> frame) {
  244. const auto native = nativeVideoFrame.video_frame_buffer()->ToI420();
  245. const auto size = QSize{ native->width(), native->height() };
  246. if (size.isEmpty()) {
  247. frame->format = FrameFormat::None;
  248. return false;
  249. }
  250. frame->mcstimestamp = nativeVideoFrame.timestamp_us();
  251. if (!frame->mcstimestamp) {
  252. frame->mcstimestamp = crl::now() * 1000;
  253. }
  254. if (!frame->requireARGB32) {
  255. if (!frame->original.isNull()) {
  256. frame->original = frame->prepared = QImage();
  257. }
  258. frame->format = FrameFormat::YUV420;
  259. frame->native = native;
  260. frame->yuv420 = FrameYUV420{
  261. .size = size,
  262. .chromaSize = { native->ChromaWidth(), native->ChromaHeight() },
  263. .y = { native->DataY(), native->StrideY() },
  264. .u = { native->DataU(), native->StrideU() },
  265. .v = { native->DataV(), native->StrideV() },
  266. };
  267. return true;
  268. }
  269. frame->format = FrameFormat::ARGB32;
  270. frame->yuv420 = FrameYUV420{
  271. .size = size,
  272. };
  273. if (!FFmpeg::GoodStorageForFrame(frame->original, size)) {
  274. frame->original = FFmpeg::CreateFrameStorage(size);
  275. }
  276. _decodeContext = FFmpeg::MakeSwscalePointer(
  277. size,
  278. AV_PIX_FMT_YUV420P,
  279. size,
  280. AV_PIX_FMT_BGRA,
  281. &_decodeContext);
  282. Assert(_decodeContext != nullptr);
  283. // AV_NUM_DATA_POINTERS defined in AVFrame struct
  284. const uint8_t *src[AV_NUM_DATA_POINTERS] = {
  285. native->DataY(),
  286. native->DataU(),
  287. native->DataV(),
  288. nullptr
  289. };
  290. int srcLineSize[AV_NUM_DATA_POINTERS] = {
  291. native->StrideY(),
  292. native->StrideU(),
  293. native->StrideV(),
  294. 0
  295. };
  296. uint8_t *dst[AV_NUM_DATA_POINTERS] = { frame->original.bits(), nullptr };
  297. int dstLineSize[AV_NUM_DATA_POINTERS] = { int(frame->original.bytesPerLine()), 0 };
  298. sws_scale(
  299. _decodeContext.get(),
  300. src,
  301. srcLineSize,
  302. 0,
  303. frame->original.height(),
  304. dst,
  305. dstLineSize);
  306. return true;
  307. }
  308. void VideoTrack::Sink::notifyFrameDecoded() {
  309. crl::on_main([weak = weak_from_this()] {
  310. if (const auto strong = weak.lock()) {
  311. strong->_renderNextFrameOnMain.fire({});
  312. }
  313. });
  314. }
  315. int VideoTrack::Sink::counter() const {
  316. return _counter.load(std::memory_order_acquire);
  317. }
  318. not_null<VideoTrack::Frame*> VideoTrack::Sink::getFrame(int index) {
  319. Expects(index >= 0 && index < kFramesCount);
  320. return &_frames[index];
  321. }
  322. not_null<const VideoTrack::Frame*> VideoTrack::Sink::getFrame(
  323. int index) const {
  324. Expects(index >= 0 && index < kFramesCount);
  325. return &_frames[index];
  326. }
  327. // Sometimes main thread subscribes to check frame requests before
  328. // the first frame is ready and presented and sometimes after.
  329. bool VideoTrack::Sink::firstPresentHappened() const {
  330. switch (counter()) {
  331. case 0: return false;
  332. case 1: return true;
  333. }
  334. Unexpected("Counter value in VideoTrack::Sink::firstPresentHappened.");
  335. }
  336. void VideoTrack::Sink::markFrameShown() {
  337. const auto jump = [&](int counter) {
  338. if (counter == 2 * kFramesCount - 1) {
  339. ++_counterCycle;
  340. }
  341. const auto next = (counter + 1) % (2 * kFramesCount);
  342. const auto index = next / 2;
  343. const auto frame = getFrame(index);
  344. frame->displayed = true;
  345. _counter.store(
  346. next,
  347. std::memory_order_release);
  348. };
  349. switch (counter()) {
  350. case 0: return;
  351. case 1: return jump(1);
  352. case 2: return;
  353. case 3: return jump(3);
  354. case 4: return;
  355. case 5: return jump(5);
  356. //case 6: return;
  357. //case 7: return jump(7);
  358. }
  359. Unexpected("Counter value in VideoTrack::Sink::markFrameShown.");
  360. }
  361. not_null<VideoTrack::Frame*> VideoTrack::Sink::frameForPaint() {
  362. return frameForPaintWithIndex().frame;
  363. }
  364. VideoTrack::Sink::FrameWithIndex VideoTrack::Sink::frameForPaintWithIndex() {
  365. const auto index = counter() / 2;
  366. return {
  367. .frame = getFrame(index),
  368. .index = (_counterCycle * 2 * kFramesCount) + index,
  369. };
  370. }
  371. void VideoTrack::Sink::destroyFrameForPaint() {
  372. const auto frame = getFrame(counter() / 2);
  373. if (!frame->original.isNull()) {
  374. frame->original = frame->prepared = QImage();
  375. }
  376. if (frame->native) {
  377. frame->native = nullptr;
  378. }
  379. frame->yuv420 = FrameYUV420();
  380. frame->format = FrameFormat::None;
  381. }
  382. rpl::producer<> VideoTrack::Sink::renderNextFrameOnMain() const {
  383. return _renderNextFrameOnMain.events();
  384. }
  385. VideoTrack::VideoTrack(VideoState state, bool requireARGB32)
  386. : _state(state) {
  387. _sink = std::make_shared<Sink>(requireARGB32);
  388. }
  389. VideoTrack::~VideoTrack() {
  390. }
  391. rpl::producer<> VideoTrack::renderNextFrame() const {
  392. return rpl::merge(
  393. _sink->renderNextFrameOnMain(),
  394. _state.changes() | rpl::to_empty);
  395. }
  396. auto VideoTrack::sink()
  397. -> std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> {
  398. return _sink;
  399. }
  400. [[nodiscard]] VideoState VideoTrack::state() const {
  401. return _state.current();
  402. }
  403. [[nodiscard]] rpl::producer<VideoState> VideoTrack::stateValue() const {
  404. return _state.value();
  405. }
  406. [[nodiscard]] rpl::producer<VideoState> VideoTrack::stateChanges() const {
  407. return _state.changes();
  408. }
  409. void VideoTrack::setState(VideoState state) {
  410. if (state == VideoState::Inactive) {
  411. _inactiveFrom = crl::now();
  412. } else {
  413. _inactiveFrom = 0;
  414. }
  415. _state = state;
  416. if (state == VideoState::Inactive) {
  417. // save last frame?..
  418. _sink->destroyFrameForPaint();
  419. }
  420. }
  421. void VideoTrack::markFrameShown() {
  422. _sink->markFrameShown();
  423. }
  424. QImage VideoTrack::frame(const FrameRequest &request) {
  425. if (_inactiveFrom > 0
  426. && (_inactiveFrom + kDropFramesWhileInactive > crl::now())) {
  427. _sink->destroyFrameForPaint();
  428. return {};
  429. }
  430. const auto frame = _sink->frameForPaint();
  431. const auto preparedFor = frame->request;
  432. const auto changed = !preparedFor.goodFor(request);
  433. const auto useRequest = changed ? request : preparedFor;
  434. if (changed) {
  435. //_wrapped.with([=](Implementation &unwrapped) {
  436. // unwrapped.updateFrameRequest(instance, useRequest);
  437. //});
  438. }
  439. if (!frame->alpha
  440. && GoodForRequest(frame->original, frame->rotation, useRequest)) {
  441. return frame->original;
  442. } else if (changed || frame->prepared.isNull()) {
  443. if (changed) {
  444. frame->request = useRequest;
  445. }
  446. frame->prepared = PrepareByRequest(
  447. frame->original,
  448. frame->alpha,
  449. frame->rotation,
  450. useRequest,
  451. std::move(frame->prepared));
  452. }
  453. return frame->prepared;
  454. }
  455. FrameWithInfo VideoTrack::frameWithInfo(bool requireARGB32) const {
  456. if (_inactiveFrom > 0
  457. && (_inactiveFrom + kDropFramesWhileInactive > crl::now())) {
  458. _sink->destroyFrameForPaint();
  459. return {};
  460. }
  461. const auto data = _sink->frameForPaintWithIndex();
  462. Assert(!requireARGB32
  463. || (data.frame->format == FrameFormat::ARGB32)
  464. || (data.frame->format == FrameFormat::None));
  465. if (data.frame->requireARGB32 && !requireARGB32) {
  466. data.frame->requireARGB32 = requireARGB32;
  467. }
  468. return {
  469. .mcstimestamp = data.frame->mcstimestamp,
  470. .original = data.frame->original,
  471. .yuv420 = &data.frame->yuv420,
  472. .format = data.frame->format,
  473. .rotation = data.frame->rotation,
  474. .index = data.index,
  475. };
  476. }
  477. QSize VideoTrack::frameSize() const {
  478. if (_inactiveFrom > 0
  479. && (_inactiveFrom + kDropFramesWhileInactive > crl::now())) {
  480. _sink->destroyFrameForPaint();
  481. return {};
  482. }
  483. const auto frame = _sink->frameForPaint();
  484. const auto size = frame->yuv420.size;
  485. const auto rotation = frame->rotation;
  486. return (rotation == 90 || rotation == 270)
  487. ? QSize(size.height(), size.width())
  488. : size;
  489. }
  490. void VideoTrack::PrepareFrameByRequests(
  491. not_null<Frame*> frame,
  492. int rotation) {
  493. Expects(frame->format != FrameFormat::ARGB32
  494. || !frame->original.isNull());
  495. frame->rotation = rotation;
  496. if (frame->format != FrameFormat::ARGB32) {
  497. return;
  498. }
  499. if (frame->alpha
  500. || !GoodForRequest(frame->original, rotation, frame->request)) {
  501. frame->prepared = PrepareByRequest(
  502. frame->original,
  503. frame->alpha,
  504. rotation,
  505. frame->request,
  506. std::move(frame->prepared));
  507. }
  508. }
  509. } // namespace Webrtc