media_streaming_video_track.cpp 36 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346
  1. /*
  2. This file is part of Telegram Desktop,
  3. the official desktop application for the Telegram messaging service.
  4. For license and copyright information please follow this link:
  5. https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
  6. */
  7. #include "media/streaming/media_streaming_video_track.h"
  8. #include "ffmpeg/ffmpeg_utility.h"
  9. #include "media/audio/media_audio.h"
  10. #include "base/concurrent_timer.h"
  11. #include "core/crash_reports.h"
  12. #include "base/debug_log.h"
  13. namespace Media {
  14. namespace Streaming {
  15. namespace {
  16. constexpr auto kMaxFrameArea = 3840 * 2160; // usual 4K
  17. constexpr auto kDisplaySkipped = crl::time(-1);
  18. constexpr auto kFinishedPosition = std::numeric_limits<crl::time>::max();
  19. static_assert(kDisplaySkipped != kTimeUnknown);
  20. [[nodiscard]] QImage ConvertToARGB32(
  21. FrameFormat format,
  22. const FrameYUV &data) {
  23. Expects(data.y.data != nullptr);
  24. Expects(data.u.data != nullptr);
  25. Expects((format == FrameFormat::NV12) || (data.v.data != nullptr));
  26. Expects(!data.size.isEmpty());
  27. //if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
  28. // resize.transpose();
  29. //}
  30. auto result = FFmpeg::CreateFrameStorage(data.size);
  31. const auto swscale = FFmpeg::MakeSwscalePointer(
  32. data.size,
  33. (format == FrameFormat::YUV420
  34. ? AV_PIX_FMT_YUV420P
  35. : AV_PIX_FMT_NV12),
  36. data.size,
  37. AV_PIX_FMT_BGRA);
  38. if (!swscale) {
  39. return QImage();
  40. }
  41. // AV_NUM_DATA_POINTERS defined in AVFrame struct
  42. const uint8_t *srcData[AV_NUM_DATA_POINTERS] = {
  43. static_cast<const uint8_t*>(data.y.data),
  44. static_cast<const uint8_t*>(data.u.data),
  45. static_cast<const uint8_t*>(data.v.data),
  46. nullptr,
  47. };
  48. int srcLinesize[AV_NUM_DATA_POINTERS] = {
  49. data.y.stride,
  50. data.u.stride,
  51. data.v.stride,
  52. 0,
  53. };
  54. uint8_t *dstData[AV_NUM_DATA_POINTERS] = { result.bits(), nullptr };
  55. int dstLinesize[AV_NUM_DATA_POINTERS] = { int(result.bytesPerLine()), 0 };
  56. sws_scale(
  57. swscale.get(),
  58. srcData,
  59. srcLinesize,
  60. 0,
  61. data.size.height(),
  62. dstData,
  63. dstLinesize);
  64. return result;
  65. }
  66. } // namespace
  67. class VideoTrackObject final {
  68. public:
  69. using Frame = VideoTrack::Frame;
  70. using Shared = VideoTrack::Shared;
  71. VideoTrackObject(
  72. crl::weak_on_queue<VideoTrackObject> weak,
  73. const PlaybackOptions &options,
  74. not_null<Shared*> shared,
  75. Stream &&stream,
  76. const AudioMsgId &audioId,
  77. FnMut<void(const Information &)> ready,
  78. Fn<void(Error)> error);
  79. void process(std::vector<FFmpeg::Packet> &&packets);
  80. [[nodiscard]] rpl::producer<> checkNextFrame() const;
  81. [[nodiscard]] rpl::producer<> waitingForData() const;
  82. void pause(crl::time time);
  83. void resume(crl::time time);
  84. void setSpeed(float64 speed);
  85. void setWaitForMarkAsShown(bool wait);
  86. void interrupt();
  87. void frameShown();
  88. void addTimelineDelay(crl::time delayed);
  89. void updateFrameRequest(
  90. const Instance *instance,
  91. const FrameRequest &request);
  92. void removeFrameRequest(const Instance *instance);
  93. void rasterizeFrame(not_null<Frame*> frame);
  94. [[nodiscard]] bool requireARGB32() const;
  95. private:
  96. enum class FrameResult {
  97. Done,
  98. Error,
  99. Waiting,
  100. Looped,
  101. Finished,
  102. };
  103. using ReadEnoughState = std::variant<
  104. v::null_t,
  105. FrameResult,
  106. Shared::PrepareNextCheck>;
  107. void fail(Error error);
  108. [[nodiscard]] bool interrupted() const;
  109. [[nodiscard]] bool tryReadFirstFrame(FFmpeg::Packet &&packet);
  110. [[nodiscard]] bool fillStateFromFrame();
  111. [[nodiscard]] bool processFirstFrame();
  112. void queueReadFrames(crl::time delay = 0);
  113. void readFrames();
  114. [[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime);
  115. [[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
  116. void fillRequests(not_null<Frame*> frame) const;
  117. [[nodiscard]] QSize chooseOriginalResize(QSize encoded) const;
  118. void presentFrameIfNeeded();
  119. void callReady();
  120. [[nodiscard]] bool loopAround();
  121. [[nodiscard]] crl::time computeDuration() const;
  122. [[nodiscard]] int durationByPacket(const FFmpeg::Packet &packet);
  123. // Force frame position to be clamped to [0, duration] and monotonic.
  124. [[nodiscard]] crl::time currentFramePosition() const;
  125. [[nodiscard]] TimePoint trackTime() const;
  126. const crl::weak_on_queue<VideoTrackObject> _weak;
  127. PlaybackOptions _options;
  128. // Main thread wrapper destructor will set _shared back to nullptr.
  129. // All queued method calls after that should be discarded.
  130. Shared *_shared = nullptr;
  131. Stream _stream;
  132. AudioMsgId _audioId;
  133. bool _readTillEnd = false;
  134. FnMut<void(const Information &)> _ready;
  135. Fn<void(Error)> _error;
  136. crl::time _pausedTime = kTimeUnknown;
  137. crl::time _resumedTime = kTimeUnknown;
  138. int _frameIndex = 0;
  139. int _durationByLastPacket = 0;
  140. mutable TimePoint _syncTimePoint;
  141. crl::time _loopingShift = 0;
  142. rpl::event_stream<> _checkNextFrame;
  143. rpl::event_stream<> _waitingForData;
  144. base::flat_map<const Instance*, FrameRequest> _requests;
  145. bool _queued = false;
  146. base::ConcurrentTimer _readFramesTimer;
  147. // For initial frame skipping for an exact seek.
  148. FFmpeg::FramePointer _initialSkippingFrame;
  149. };
  150. VideoTrackObject::VideoTrackObject(
  151. crl::weak_on_queue<VideoTrackObject> weak,
  152. const PlaybackOptions &options,
  153. not_null<Shared*> shared,
  154. Stream &&stream,
  155. const AudioMsgId &audioId,
  156. FnMut<void(const Information &)> ready,
  157. Fn<void(Error)> error)
  158. : _weak(std::move(weak))
  159. , _options(options)
  160. , _shared(shared)
  161. , _stream(std::move(stream))
  162. , _audioId(audioId)
  163. , _ready(std::move(ready))
  164. , _error(std::move(error))
  165. , _readFramesTimer(_weak, [=] { readFrames(); }) {
  166. Expects(_stream.duration > 1);
  167. Expects(_ready != nullptr);
  168. Expects(_error != nullptr);
  169. }
  170. rpl::producer<> VideoTrackObject::checkNextFrame() const {
  171. return interrupted()
  172. ? (rpl::complete<>() | rpl::type_erased())
  173. : !_shared->firstPresentHappened()
  174. ? (_checkNextFrame.events() | rpl::type_erased())
  175. : _checkNextFrame.events_starting_with({});
  176. }
  177. rpl::producer<> VideoTrackObject::waitingForData() const {
  178. return interrupted()
  179. ? (rpl::never() | rpl::type_erased())
  180. : _waitingForData.events();
  181. }
  182. void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
  183. if (interrupted() || packets.empty()) {
  184. return;
  185. }
  186. if (packets.front().empty()) {
  187. Assert(packets.size() == 1);
  188. _readTillEnd = true;
  189. } else if (!_readTillEnd) {
  190. //for (const auto &packet : packets) {
  191. // // Maybe it is enough to count by list.back()?.. hope so.
  192. // accumulate_max(
  193. // _durationByLastPacket,
  194. // durationByPacket(packet));
  195. // if (interrupted()) {
  196. // return;
  197. // }
  198. //}
  199. accumulate_max(
  200. _durationByLastPacket,
  201. durationByPacket(packets.back()));
  202. if (interrupted()) {
  203. return;
  204. }
  205. }
  206. for (auto i = begin(packets), e = end(packets); i != e; ++i) {
  207. if (_shared->initialized()) {
  208. _stream.queue.insert(
  209. end(_stream.queue),
  210. std::make_move_iterator(i),
  211. std::make_move_iterator(e));
  212. queueReadFrames();
  213. break;
  214. } else if (!tryReadFirstFrame(std::move(*i))) {
  215. fail(Error::InvalidData);
  216. break;
  217. }
  218. }
  219. }
  220. int VideoTrackObject::durationByPacket(const FFmpeg::Packet &packet) {
  221. // We've set this value on the first cycle.
  222. if (_loopingShift || _stream.duration != kDurationUnavailable) {
  223. return 0;
  224. }
  225. const auto result = FFmpeg::DurationByPacket(packet, _stream.timeBase);
  226. if (result < 0) {
  227. fail(Error::InvalidData);
  228. return 0;
  229. }
  230. Ensures(result > 0);
  231. return result;
  232. }
  233. void VideoTrackObject::queueReadFrames(crl::time delay) {
  234. if (delay > 0) {
  235. _readFramesTimer.callOnce(delay);
  236. } else if (!_queued) {
  237. _queued = true;
  238. _weak.with([](VideoTrackObject &that) {
  239. that._queued = false;
  240. that.readFrames();
  241. });
  242. }
  243. }
  244. void VideoTrackObject::readFrames() {
  245. if (interrupted()) {
  246. return;
  247. }
  248. auto time = trackTime().trackTime;
  249. while (true) {
  250. const auto result = readEnoughFrames(time);
  251. v::match(result, [&](FrameResult result) {
  252. if (result == FrameResult::Done
  253. || result == FrameResult::Finished) {
  254. presentFrameIfNeeded();
  255. } else if (result == FrameResult::Looped) {
  256. const auto duration = computeDuration();
  257. Assert(duration != kDurationUnavailable);
  258. time -= duration;
  259. }
  260. }, [&](Shared::PrepareNextCheck delay) {
  261. Expects(delay == kTimeUnknown || delay > 0);
  262. if (delay != kTimeUnknown) {
  263. queueReadFrames(delay);
  264. }
  265. }, [](v::null_t) {
  266. });
  267. if (!v::is_null(result)) {
  268. break;
  269. }
  270. }
  271. }
  272. auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
  273. -> ReadEnoughState {
  274. const auto dropStaleFrames = !_options.waitForMarkAsShown;
  275. const auto state = _shared->prepareState(trackTime, dropStaleFrames);
  276. return v::match(state, [&](Shared::PrepareFrame frame)
  277. -> ReadEnoughState {
  278. while (true) {
  279. const auto result = readFrame(frame);
  280. if (result != FrameResult::Done) {
  281. return result;
  282. } else if (!dropStaleFrames
  283. || !VideoTrack::IsStale(frame, trackTime)) {
  284. return v::null;
  285. }
  286. }
  287. }, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
  288. return delay;
  289. }, [&](v::null_t) -> ReadEnoughState {
  290. return FrameResult::Done;
  291. });
  292. }
  293. bool VideoTrackObject::loopAround() {
  294. const auto duration = computeDuration();
  295. if (duration == kDurationUnavailable) {
  296. LOG(("Streaming Error: "
  297. "Couldn't find out the real video stream duration."));
  298. return false;
  299. }
  300. avcodec_flush_buffers(_stream.codec.get());
  301. _frameIndex = 0;
  302. _loopingShift += duration;
  303. _readTillEnd = false;
  304. return true;
  305. }
  306. crl::time VideoTrackObject::computeDuration() const {
  307. if (_stream.duration != kDurationUnavailable) {
  308. return _stream.duration;
  309. } else if ((_loopingShift || _readTillEnd) && _durationByLastPacket) {
  310. // We looped, so it already holds full stream duration.
  311. return _durationByLastPacket;
  312. }
  313. return kDurationUnavailable;
  314. }
  315. auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
  316. if (const auto error = ReadNextFrame(_stream)) {
  317. if (error.code() == AVERROR_EOF) {
  318. if (!_options.loop) {
  319. frame->position = kFinishedPosition;
  320. frame->displayed = kTimeUnknown;
  321. return FrameResult::Finished;
  322. } else if (loopAround()) {
  323. return FrameResult::Looped;
  324. } else {
  325. fail(Error::InvalidData);
  326. return FrameResult::Error;
  327. }
  328. } else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
  329. fail(Error::InvalidData);
  330. return FrameResult::Error;
  331. }
  332. Assert(_stream.queue.empty());
  333. _waitingForData.fire({});
  334. return FrameResult::Waiting;
  335. }
  336. const auto position = currentFramePosition();
  337. if (position == kTimeUnknown) {
  338. fail(Error::InvalidData);
  339. return FrameResult::Error;
  340. }
  341. std::swap(frame->decoded, _stream.decodedFrame);
  342. std::swap(frame->transferred, _stream.transferredFrame);
  343. frame->index = _frameIndex++;
  344. frame->position = position;
  345. frame->displayed = kTimeUnknown;
  346. return FrameResult::Done;
  347. }
  348. void VideoTrackObject::fillRequests(not_null<Frame*> frame) const {
  349. auto i = frame->prepared.begin();
  350. for (const auto &[instance, request] : _requests) {
  351. while (i != frame->prepared.end() && i->first < instance) {
  352. i = frame->prepared.erase(i);
  353. }
  354. if (i == frame->prepared.end() || i->first > instance) {
  355. i = frame->prepared.emplace(instance, request).first;
  356. }
  357. ++i;
  358. }
  359. while (i != frame->prepared.end()) {
  360. i = frame->prepared.erase(i);
  361. }
  362. }
  363. QSize VideoTrackObject::chooseOriginalResize(QSize encoded) const {
  364. auto chosen = QSize();
  365. if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
  366. encoded.transpose();
  367. }
  368. for (const auto &[_, request] : _requests) {
  369. const auto resize = request.blurredBackground
  370. ? CalculateResizeFromOuter(request.outer, encoded)
  371. : request.resize;
  372. if (resize.isEmpty()) {
  373. return QSize();
  374. }
  375. const auto byWidth = (resize.width() >= chosen.width());
  376. const auto byHeight = (resize.height() >= chosen.height());
  377. if (byWidth && byHeight) {
  378. chosen = resize;
  379. } else if (byWidth || byHeight) {
  380. return QSize();
  381. }
  382. }
  383. return chosen;
  384. }
  385. bool VideoTrackObject::requireARGB32() const {
  386. for (const auto &[_, request] : _requests) {
  387. if (!request.requireARGB32) {
  388. return false;
  389. }
  390. }
  391. return true;
  392. }
  393. void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
  394. Expects(frame->position != kFinishedPosition);
  395. fillRequests(frame);
  396. frame->format = FrameFormat::None;
  397. if (frame->decoded->hw_frames_ctx) {
  398. if (!frame->transferred) {
  399. frame->transferred = FFmpeg::MakeFramePointer();
  400. }
  401. const auto success = TransferFrame(
  402. _stream,
  403. frame->decoded.get(),
  404. frame->transferred.get());
  405. if (!success) {
  406. frame->prepared.clear();
  407. fail(Error::InvalidData);
  408. return;
  409. }
  410. } else {
  411. frame->transferred = nullptr;
  412. }
  413. const auto frameWithData = frame->transferred
  414. ? frame->transferred.get()
  415. : frame->decoded.get();
  416. if ((frameWithData->format == AV_PIX_FMT_YUV420P
  417. || frameWithData->format == AV_PIX_FMT_NV12) && !requireARGB32()) {
  418. const auto nv12 = (frameWithData->format == AV_PIX_FMT_NV12);
  419. frame->alpha = false;
  420. frame->yuv = ExtractYUV(_stream, frameWithData);
  421. if (frame->yuv.size.isEmpty()
  422. || frame->yuv.chromaSize.isEmpty()
  423. || !frame->yuv.y.data
  424. || !frame->yuv.u.data
  425. || (!nv12 && !frame->yuv.v.data)) {
  426. frame->prepared.clear();
  427. fail(Error::InvalidData);
  428. return;
  429. }
  430. if (!frame->original.isNull()) {
  431. frame->original = QImage();
  432. for (auto &[_, prepared] : frame->prepared) {
  433. prepared.image = QImage();
  434. }
  435. }
  436. frame->format = nv12 ? FrameFormat::NV12 : FrameFormat::YUV420;
  437. } else {
  438. frame->alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
  439. || (frameWithData->format == AV_PIX_FMT_YUVA420P);
  440. frame->yuv.size = {
  441. frameWithData->width,
  442. frameWithData->height
  443. };
  444. frame->original = ConvertFrame(
  445. _stream,
  446. frameWithData,
  447. chooseOriginalResize(
  448. { frameWithData->width, frameWithData->height }),
  449. std::move(frame->original));
  450. if (frame->original.isNull()) {
  451. frame->prepared.clear();
  452. fail(Error::InvalidData);
  453. return;
  454. }
  455. frame->format = FrameFormat::ARGB32;
  456. }
  457. VideoTrack::PrepareFrameByRequests(
  458. frame,
  459. _stream.aspect,
  460. _stream.rotation);
  461. Ensures(VideoTrack::IsRasterized(frame));
  462. }
  463. void VideoTrackObject::presentFrameIfNeeded() {
  464. if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
  465. return;
  466. }
  467. const auto dropStaleFrames = !_options.waitForMarkAsShown;
  468. const auto time = trackTime();
  469. const auto presented = _shared->presentFrame(
  470. this,
  471. time,
  472. _options.speed,
  473. dropStaleFrames);
  474. addTimelineDelay(presented.addedWorldTimeDelay);
  475. if (presented.displayPosition == kFinishedPosition) {
  476. interrupt();
  477. _checkNextFrame = rpl::event_stream<>();
  478. return;
  479. } else if (presented.displayPosition != kTimeUnknown) {
  480. _checkNextFrame.fire({});
  481. }
  482. if (presented.nextCheckDelay != kTimeUnknown) {
  483. Assert(presented.nextCheckDelay >= 0);
  484. queueReadFrames(presented.nextCheckDelay);
  485. }
  486. }
  487. void VideoTrackObject::pause(crl::time time) {
  488. Expects(_syncTimePoint.valid());
  489. if (interrupted()) {
  490. return;
  491. } else if (_pausedTime == kTimeUnknown) {
  492. _pausedTime = time;
  493. }
  494. }
  495. void VideoTrackObject::resume(crl::time time) {
  496. Expects(_syncTimePoint.trackTime != kTimeUnknown);
  497. if (interrupted()) {
  498. return;
  499. }
  500. // Resumed time used to validate sync to audio.
  501. _resumedTime = time;
  502. if (_pausedTime != kTimeUnknown) {
  503. Assert(_pausedTime <= time);
  504. _syncTimePoint.worldTime += (time - _pausedTime);
  505. _pausedTime = kTimeUnknown;
  506. } else {
  507. _syncTimePoint.worldTime = time;
  508. }
  509. queueReadFrames();
  510. Ensures(_syncTimePoint.valid());
  511. Ensures(_pausedTime == kTimeUnknown);
  512. }
  513. void VideoTrackObject::setSpeed(float64 speed) {
  514. if (interrupted()) {
  515. return;
  516. }
  517. if (_syncTimePoint.valid()) {
  518. const auto time = trackTime();
  519. _syncTimePoint = time;
  520. }
  521. _options.speed = speed;
  522. }
  523. void VideoTrackObject::setWaitForMarkAsShown(bool wait) {
  524. if (interrupted()) {
  525. return;
  526. }
  527. _options.waitForMarkAsShown = wait;
  528. }
  529. bool VideoTrackObject::interrupted() const {
  530. return !_shared;
  531. }
  532. void VideoTrackObject::frameShown() {
  533. if (interrupted()) {
  534. return;
  535. }
  536. queueReadFrames();
  537. }
  538. void VideoTrackObject::addTimelineDelay(crl::time delayed) {
  539. Expects(_syncTimePoint.valid());
  540. if (!delayed) {
  541. return;
  542. }
  543. _syncTimePoint.worldTime += delayed;
  544. }
  545. void VideoTrackObject::updateFrameRequest(
  546. const Instance *instance,
  547. const FrameRequest &request) {
  548. _requests[instance] = request;
  549. }
  550. void VideoTrackObject::removeFrameRequest(const Instance *instance) {
  551. _requests.remove(instance);
  552. }
  553. bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
  554. if (ProcessPacket(_stream, std::move(packet)).failed()) {
  555. return false;
  556. }
  557. while (true) {
  558. if (const auto error = ReadNextFrame(_stream)) {
  559. if (error.code() == AVERROR_EOF) {
  560. if (!_initialSkippingFrame) {
  561. return false;
  562. }
  563. // Return the last valid frame if we seek too far.
  564. _stream.decodedFrame = std::move(_initialSkippingFrame);
  565. return processFirstFrame();
  566. } else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
  567. return false;
  568. } else {
  569. // Waiting for more packets.
  570. return true;
  571. }
  572. } else if (!fillStateFromFrame()) {
  573. return false;
  574. } else if (_syncTimePoint.trackTime >= _options.position) {
  575. return processFirstFrame();
  576. }
  577. // Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
  578. // Try skipping frames until one is after the requested position.
  579. std::swap(_initialSkippingFrame, _stream.decodedFrame);
  580. if (!_stream.decodedFrame) {
  581. _stream.decodedFrame = FFmpeg::MakeFramePointer();
  582. }
  583. }
  584. }
  585. bool VideoTrackObject::processFirstFrame() {
  586. const auto decodedFrame = _stream.decodedFrame.get();
  587. if (decodedFrame->width * decodedFrame->height > kMaxFrameArea) {
  588. return false;
  589. } else if (decodedFrame->hw_frames_ctx) {
  590. if (!_stream.transferredFrame) {
  591. _stream.transferredFrame = FFmpeg::MakeFramePointer();
  592. }
  593. const auto success = TransferFrame(
  594. _stream,
  595. decodedFrame,
  596. _stream.transferredFrame.get());
  597. if (!success) {
  598. LOG(("Video Error: Failed accelerated decoding from format %1."
  599. ).arg(int(decodedFrame->format)));
  600. return false;
  601. }
  602. DEBUG_LOG(("Video Info: "
  603. "Using accelerated decoding from format %1 to format %2."
  604. ).arg(int(decodedFrame->format)
  605. ).arg(int(_stream.transferredFrame->format)));
  606. } else {
  607. _stream.transferredFrame = nullptr;
  608. }
  609. const auto frameWithData = _stream.transferredFrame
  610. ? _stream.transferredFrame.get()
  611. : decodedFrame;
  612. const auto alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
  613. || (frameWithData->format == AV_PIX_FMT_YUVA420P);
  614. auto frame = ConvertFrame(
  615. _stream,
  616. frameWithData,
  617. QSize(),
  618. QImage());
  619. if (frame.isNull()) {
  620. return false;
  621. }
  622. _shared->init(std::move(frame), alpha, _syncTimePoint.trackTime);
  623. callReady();
  624. queueReadFrames();
  625. return true;
  626. }
  627. crl::time VideoTrackObject::currentFramePosition() const {
  628. const auto position = FramePosition(_stream);
  629. if (position == kTimeUnknown || position == kFinishedPosition) {
  630. return kTimeUnknown;
  631. }
  632. return _loopingShift + std::clamp(
  633. position,
  634. crl::time(0),
  635. computeDuration() - 1);
  636. }
  637. bool VideoTrackObject::fillStateFromFrame() {
  638. const auto position = currentFramePosition();
  639. if (position == kTimeUnknown) {
  640. return false;
  641. }
  642. _syncTimePoint.trackTime = position;
  643. return true;
  644. }
  645. void VideoTrackObject::callReady() {
  646. Expects(_ready != nullptr);
  647. const auto frame = _shared->frameForPaint();
  648. ++_frameIndex;
  649. base::take(_ready)({ VideoInformation{
  650. .state = {
  651. .position = _syncTimePoint.trackTime,
  652. .receivedTill = (_readTillEnd
  653. ? _stream.duration
  654. : _syncTimePoint.trackTime),
  655. .duration = _stream.duration,
  656. },
  657. .size = FFmpeg::TransposeSizeByRotation(
  658. FFmpeg::CorrectByAspect(frame->original.size(), _stream.aspect),
  659. _stream.rotation),
  660. .cover = frame->original,
  661. .rotation = _stream.rotation,
  662. .alpha = frame->alpha,
  663. } });
  664. }
  665. TimePoint VideoTrackObject::trackTime() const {
  666. auto result = TimePoint();
  667. result.worldTime = (_pausedTime != kTimeUnknown)
  668. ? _pausedTime
  669. : crl::now();
  670. if (!_syncTimePoint) {
  671. result.trackTime = _syncTimePoint.trackTime;
  672. return result;
  673. }
  674. Assert(_resumedTime != kTimeUnknown);
  675. if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
  676. const auto mixer = Media::Player::mixer();
  677. const auto point = mixer->getExternalSyncTimePoint(_audioId);
  678. if (point && point.worldTime > _resumedTime) {
  679. _syncTimePoint = point;
  680. }
  681. }
  682. const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
  683. const auto adjustSpeed = adjust * _options.speed;
  684. const auto roundAdjustSpeed = base::SafeRound(adjustSpeed);
  685. const auto timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
  686. result.trackTime = _syncTimePoint.trackTime + timeRoundAdjustSpeed;
  687. return result;
  688. }
  689. void VideoTrackObject::interrupt() {
  690. _shared = nullptr;
  691. }
  692. void VideoTrackObject::fail(Error error) {
  693. interrupt();
  694. _error(error);
  695. }
  696. void VideoTrack::Shared::init(
  697. QImage &&cover,
  698. bool hasAlpha,
  699. crl::time position) {
  700. Expects(!initialized());
  701. _frames[0].original = std::move(cover);
  702. _frames[0].position = position;
  703. _frames[0].format = FrameFormat::ARGB32;
  704. _frames[0].alpha = hasAlpha;
  705. // Usually main thread sets displayed time before _counter increment.
  706. // But in this case we update _counter, so we set a fake displayed time.
  707. _frames[0].displayed = kDisplaySkipped;
  708. _delay = 0;
  709. _counter.store(0, std::memory_order_release);
  710. }
  711. int VideoTrack::Shared::counter() const {
  712. return _counter.load(std::memory_order_acquire);
  713. }
  714. bool VideoTrack::Shared::initialized() const {
  715. return (counter() != kCounterUninitialized);
  716. }
  717. not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
  718. Expects(index >= 0 && index < kFramesCount);
  719. return &_frames[index];
  720. }
  721. not_null<const VideoTrack::Frame*> VideoTrack::Shared::getFrame(
  722. int index) const {
  723. Expects(index >= 0 && index < kFramesCount);
  724. return &_frames[index];
  725. }
  726. auto VideoTrack::Shared::prepareState(
  727. crl::time trackTime,
  728. bool dropStaleFrames)
  729. -> PrepareState {
  730. const auto prepareNext = [&](int index) -> PrepareState {
  731. const auto frame = getFrame(index);
  732. const auto next = getFrame((index + 1) % kFramesCount);
  733. if (!IsDecoded(frame)) {
  734. return frame;
  735. } else if (!IsDecoded(next)) {
  736. return next;
  737. } else if (next->position < frame->position) {
  738. std::swap(*frame, *next);
  739. }
  740. if (next->position == kFinishedPosition || !dropStaleFrames) {
  741. return PrepareNextCheck(kTimeUnknown);
  742. } else if (IsStale(frame, trackTime)) {
  743. std::swap(*frame, *next);
  744. next->displayed = kDisplaySkipped;
  745. return next;
  746. } else {
  747. if (frame->position - trackTime + 1 <= 0) { // Debugging crash.
  748. CrashReports::SetAnnotation(
  749. "DelayValues",
  750. (QString::number(frame->position)
  751. + " + 1 <= "
  752. + QString::number(trackTime)));
  753. }
  754. Assert(frame->position >= trackTime);
  755. Assert(frame->position - trackTime + 1 > 0);
  756. return PrepareNextCheck(frame->position - trackTime + 1);
  757. }
  758. };
  759. const auto finishPrepare = [&](int index) -> PrepareState {
  760. // If player already awaits next frame - we ignore if it's stale.
  761. dropStaleFrames = false;
  762. const auto result = prepareNext(index);
  763. return v::is<PrepareNextCheck>(result) ? PrepareState() : result;
  764. };
  765. switch (counter()) {
  766. case 0: return finishPrepare(1);
  767. case 1: return prepareNext(2);
  768. case 2: return finishPrepare(2);
  769. case 3: return prepareNext(3);
  770. case 4: return finishPrepare(3);
  771. case 5: return prepareNext(0);
  772. case 6: return finishPrepare(0);
  773. case 7: return prepareNext(1);
  774. }
  775. Unexpected("Counter value in VideoTrack::Shared::prepareState.");
  776. }
  777. // Sometimes main thread subscribes to check frame requests before
  778. // the first frame is ready and presented and sometimes after.
  779. bool VideoTrack::Shared::firstPresentHappened() const {
  780. switch (counter()) {
  781. case 0: return false;
  782. case 1: return true;
  783. }
  784. Unexpected("Counter value in VideoTrack::Shared::firstPresentHappened.");
  785. }
  786. auto VideoTrack::Shared::presentFrame(
  787. not_null<VideoTrackObject*> object,
  788. TimePoint time,
  789. float64 playbackSpeed,
  790. bool dropStaleFrames)
  791. -> PresentFrame {
  792. const auto present = [&](int counter, int index) -> PresentFrame {
  793. const auto frame = getFrame(index);
  794. const auto position = frame->position;
  795. const auto addedWorldTimeDelay = base::take(_delay);
  796. if (position == kFinishedPosition) {
  797. return { kFinishedPosition, kTimeUnknown, addedWorldTimeDelay };
  798. }
  799. object->rasterizeFrame(frame);
  800. if (!IsRasterized(frame)) {
  801. // Error happened during frame prepare.
  802. return { kTimeUnknown, kTimeUnknown, addedWorldTimeDelay };
  803. }
  804. const auto trackLeft = position - time.trackTime;
  805. const auto adjustedBySpeed = trackLeft / playbackSpeed;
  806. const auto roundedAdjustedBySpeed = base::SafeRound(adjustedBySpeed);
  807. frame->display = time.worldTime
  808. + addedWorldTimeDelay
  809. + crl::time(roundedAdjustedBySpeed);
  810. // Release this frame to the main thread for rendering.
  811. _counter.store(
  812. counter + 1,
  813. std::memory_order_release);
  814. return { position, crl::time(0), addedWorldTimeDelay };
  815. };
  816. const auto nextCheckDelay = [&](int index) -> PresentFrame {
  817. const auto frame = getFrame(index);
  818. if (frame->position == kFinishedPosition) {
  819. return { kFinishedPosition, kTimeUnknown };
  820. }
  821. const auto next = getFrame((index + 1) % kFramesCount);
  822. if (!IsDecoded(frame) || !IsDecoded(next)) {
  823. return { kTimeUnknown, crl::time(0) };
  824. } else if (next->position == kFinishedPosition
  825. || !dropStaleFrames
  826. || IsStale(frame, time.trackTime)) {
  827. return { kTimeUnknown, kTimeUnknown };
  828. }
  829. return { kTimeUnknown, (frame->position - time.trackTime + 1) };
  830. };
  831. switch (counter()) {
  832. case 0: return present(0, 1);
  833. case 1: return nextCheckDelay(2);
  834. case 2: return present(2, 2);
  835. case 3: return nextCheckDelay(3);
  836. case 4: return present(4, 3);
  837. case 5: return nextCheckDelay(0);
  838. case 6: return present(6, 0);
  839. case 7: return nextCheckDelay(1);
  840. }
  841. Unexpected("Counter value in VideoTrack::Shared::prepareState.");
  842. }
  843. crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
  844. const auto frameDisplayTime = [&](int counter) {
  845. const auto next = (counter + 1) % (2 * kFramesCount);
  846. const auto index = next / 2;
  847. const auto frame = getFrame(index);
  848. if (frame->displayed != kTimeUnknown) {
  849. // Frame already displayed, but not yet shown.
  850. return kFrameDisplayTimeAlreadyDone;
  851. }
  852. Assert(IsRasterized(frame));
  853. Assert(frame->display != kTimeUnknown);
  854. return frame->display;
  855. };
  856. switch (counter()) {
  857. case 0: return kTimeUnknown;
  858. case 1: return frameDisplayTime(1);
  859. case 2: return kTimeUnknown;
  860. case 3: return frameDisplayTime(3);
  861. case 4: return kTimeUnknown;
  862. case 5: return frameDisplayTime(5);
  863. case 6: return kTimeUnknown;
  864. case 7: return frameDisplayTime(7);
  865. }
  866. Unexpected("Counter value in VideoTrack::Shared::nextFrameDisplayTime.");
  867. }
  868. crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
  869. const auto mark = [&](int counter) {
  870. const auto next = (counter + 1) % (2 * kFramesCount);
  871. const auto index = next / 2;
  872. const auto frame = getFrame(index);
  873. Assert(frame->position != kTimeUnknown);
  874. if (frame->displayed == kTimeUnknown) {
  875. frame->displayed = now;
  876. }
  877. return frame->position;
  878. };
  879. switch (counter()) {
  880. case 0: Unexpected("Value 0 in VideoTrack::Shared::markFrameDisplayed.");
  881. case 1: return mark(1);
  882. case 2: Unexpected("Value 2 in VideoTrack::Shared::markFrameDisplayed.");
  883. case 3: return mark(3);
  884. case 4: Unexpected("Value 4 in VideoTrack::Shared::markFrameDisplayed.");
  885. case 5: return mark(5);
  886. case 6: Unexpected("Value 6 in VideoTrack::Shared::markFrameDisplayed.");
  887. case 7: return mark(7);
  888. }
  889. Unexpected("Counter value in VideoTrack::Shared::markFrameDisplayed.");
  890. }
  891. void VideoTrack::Shared::addTimelineDelay(crl::time delayed) {
  892. if (!delayed) {
  893. return;
  894. }
  895. const auto recountCurrentFrame = [&](int counter) {
  896. _delay += delayed;
  897. //const auto next = (counter + 1) % (2 * kFramesCount);
  898. //const auto index = next / 2;
  899. //const auto frame = getFrame(index);
  900. //if (frame->displayed != kTimeUnknown) {
  901. // // Frame already displayed.
  902. // return;
  903. //}
  904. //Assert(IsRasterized(frame));
  905. //Assert(frame->display != kTimeUnknown);
  906. //frame->display = countFrameDisplayTime(frame->index);
  907. };
  908. switch (counter()) {
  909. case 0: Unexpected("Value 0 in VideoTrack::Shared::addTimelineDelay.");
  910. case 1: return recountCurrentFrame(1);
  911. case 2: Unexpected("Value 2 in VideoTrack::Shared::addTimelineDelay.");
  912. case 3: return recountCurrentFrame(3);
  913. case 4: Unexpected("Value 4 in VideoTrack::Shared::addTimelineDelay.");
  914. case 5: return recountCurrentFrame(5);
  915. case 6: Unexpected("Value 6 in VideoTrack::Shared::addTimelineDelay.");
  916. case 7: return recountCurrentFrame(7);
  917. }
  918. Unexpected("Counter value in VideoTrack::Shared::addTimelineDelay.");
  919. }
  920. bool VideoTrack::Shared::markFrameShown() {
  921. const auto jump = [&](int counter) {
  922. const auto next = (counter + 1) % (2 * kFramesCount);
  923. const auto index = next / 2;
  924. const auto frame = getFrame(index);
  925. if (frame->displayed == kTimeUnknown) {
  926. return false;
  927. }
  928. _counter.store(
  929. next,
  930. std::memory_order_release);
  931. return true;
  932. };
  933. switch (counter()) {
  934. case 0: return false;
  935. case 1: return jump(1);
  936. case 2: return false;
  937. case 3: return jump(3);
  938. case 4: return false;
  939. case 5: return jump(5);
  940. case 6: return false;
  941. case 7: return jump(7);
  942. }
  943. Unexpected("Counter value in VideoTrack::Shared::markFrameShown.");
  944. }
  945. not_null<VideoTrack::Frame*> VideoTrack::Shared::frameForPaint() {
  946. return frameForPaintWithIndex().frame;
  947. }
  948. VideoTrack::FrameWithIndex VideoTrack::Shared::frameForPaintWithIndex() {
  949. const auto index = counter() / 2;
  950. const auto frame = getFrame(index);
  951. Assert(frame->format != FrameFormat::None);
  952. Assert(frame->position != kTimeUnknown);
  953. Assert(frame->displayed != kTimeUnknown);
  954. return {
  955. .frame = frame,
  956. .index = frame->index,
  957. };
  958. }
  959. VideoTrack::VideoTrack(
  960. const PlaybackOptions &options,
  961. Stream &&stream,
  962. const AudioMsgId &audioId,
  963. FnMut<void(const Information &)> ready,
  964. Fn<void(Error)> error)
  965. : _streamIndex(stream.index)
  966. , _streamTimeBase(stream.timeBase)
  967. , _streamDuration(stream.duration)
  968. , _streamRotation(stream.rotation)
  969. , _streamAspect(stream.aspect)
  970. , _shared(std::make_unique<Shared>())
  971. , _wrapped(
  972. options,
  973. _shared.get(),
  974. std::move(stream),
  975. audioId,
  976. std::move(ready),
  977. std::move(error)) {
  978. }
  979. int VideoTrack::streamIndex() const {
  980. return _streamIndex;
  981. }
  982. AVRational VideoTrack::streamTimeBase() const {
  983. return _streamTimeBase;
  984. }
  985. crl::time VideoTrack::streamDuration() const {
  986. return _streamDuration;
  987. }
  988. void VideoTrack::process(std::vector<FFmpeg::Packet> &&packets) {
  989. _wrapped.with([
  990. packets = std::move(packets)
  991. ](Implementation &unwrapped) mutable {
  992. unwrapped.process(std::move(packets));
  993. });
  994. }
  995. void VideoTrack::waitForData() {
  996. }
  997. void VideoTrack::pause(crl::time time) {
  998. _wrapped.with([=](Implementation &unwrapped) {
  999. unwrapped.pause(time);
  1000. });
  1001. }
  1002. void VideoTrack::resume(crl::time time) {
  1003. _wrapped.with([=](Implementation &unwrapped) {
  1004. unwrapped.resume(time);
  1005. });
  1006. }
  1007. void VideoTrack::setSpeed(float64 speed) {
  1008. _wrapped.with([=](Implementation &unwrapped) {
  1009. unwrapped.setSpeed(speed);
  1010. });
  1011. }
  1012. void VideoTrack::setWaitForMarkAsShown(bool wait) {
  1013. _wrapped.with([=](Implementation &unwrapped) {
  1014. unwrapped.setWaitForMarkAsShown(wait);
  1015. });
  1016. }
  1017. crl::time VideoTrack::nextFrameDisplayTime() const {
  1018. return _shared->nextFrameDisplayTime();
  1019. }
  1020. crl::time VideoTrack::markFrameDisplayed(crl::time now) {
  1021. const auto result = _shared->markFrameDisplayed(now);
  1022. Ensures(result != kTimeUnknown);
  1023. return result;
  1024. }
  1025. void VideoTrack::addTimelineDelay(crl::time delayed) {
  1026. _shared->addTimelineDelay(delayed);
  1027. //if (!delayed) {
  1028. // return;
  1029. //}
  1030. //_wrapped.with([=](Implementation &unwrapped) mutable {
  1031. // unwrapped.addTimelineDelay(delayed);
  1032. //});
  1033. }
  1034. bool VideoTrack::markFrameShown() {
  1035. if (!_shared->markFrameShown()) {
  1036. return false;
  1037. }
  1038. _wrapped.with([](Implementation &unwrapped) {
  1039. unwrapped.frameShown();
  1040. });
  1041. return true;
  1042. }
  1043. QImage VideoTrack::frame(
  1044. const FrameRequest &request,
  1045. const Instance *instance) {
  1046. return frameImage(_shared->frameForPaint(), request, instance);
  1047. }
  1048. FrameWithInfo VideoTrack::frameWithInfo(
  1049. const FrameRequest &request,
  1050. const Instance *instance) {
  1051. const auto data = _shared->frameForPaintWithIndex();
  1052. return {
  1053. .image = frameImage(data.frame, request, instance),
  1054. .format = FrameFormat::ARGB32,
  1055. .index = data.index,
  1056. };
  1057. }
  1058. FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) {
  1059. const auto data = _shared->frameForPaintWithIndex();
  1060. const auto i = data.frame->prepared.find(instance);
  1061. const auto none = (i == data.frame->prepared.end());
  1062. if (none || i->second.request.requireARGB32) {
  1063. _wrapped.with([=](Implementation &unwrapped) {
  1064. unwrapped.updateFrameRequest(
  1065. instance,
  1066. { .requireARGB32 = false });
  1067. });
  1068. }
  1069. return {
  1070. .image = data.frame->original,
  1071. .yuv = &data.frame->yuv,
  1072. .format = data.frame->format,
  1073. .index = data.index,
  1074. .alpha = data.frame->alpha,
  1075. };
  1076. }
  1077. QImage VideoTrack::frameImage(
  1078. not_null<Frame*> frame,
  1079. const FrameRequest &request,
  1080. const Instance *instance) {
  1081. const auto i = frame->prepared.find(instance);
  1082. const auto none = (i == frame->prepared.end());
  1083. const auto preparedFor = frame->prepared.empty()
  1084. ? FrameRequest::NonStrict()
  1085. : (none ? frame->prepared.begin() : i)->second.request;
  1086. const auto changed = !preparedFor.goodFor(request);
  1087. const auto useRequest = changed ? request : preparedFor;
  1088. if (changed) {
  1089. _wrapped.with([=](Implementation &unwrapped) {
  1090. unwrapped.updateFrameRequest(instance, useRequest);
  1091. });
  1092. }
  1093. if (frame->original.isNull()
  1094. && (frame->format == FrameFormat::YUV420
  1095. || frame->format == FrameFormat::NV12)) {
  1096. frame->original = ConvertToARGB32(frame->format, frame->yuv);
  1097. }
  1098. if (GoodForRequest(
  1099. frame->original,
  1100. frame->alpha,
  1101. _streamRotation,
  1102. useRequest)) {
  1103. return frame->original;
  1104. } else if (changed || none || i->second.image.isNull()) {
  1105. const auto j = none
  1106. ? frame->prepared.emplace(instance, useRequest).first
  1107. : i;
  1108. if (changed && !none) {
  1109. i->second.request = useRequest;
  1110. }
  1111. if (frame->prepared.size() > 1) {
  1112. for (auto &[alreadyInstance, prepared] : frame->prepared) {
  1113. if (alreadyInstance != instance
  1114. && prepared.request == useRequest
  1115. && !prepared.image.isNull()) {
  1116. return prepared.image;
  1117. }
  1118. }
  1119. }
  1120. j->second.image = PrepareByRequest(
  1121. frame->original,
  1122. frame->alpha,
  1123. _streamAspect,
  1124. _streamRotation,
  1125. useRequest,
  1126. std::move(j->second.image));
  1127. return j->second.image;
  1128. }
  1129. return i->second.image;
  1130. }
  1131. QImage VideoTrack::currentFrameImage() {
  1132. const auto frame = _shared->frameForPaint();
  1133. if (frame->original.isNull()
  1134. && (frame->format == FrameFormat::YUV420
  1135. || frame->format == FrameFormat::NV12)) {
  1136. frame->original = ConvertToARGB32(frame->format, frame->yuv);
  1137. }
  1138. return frame->original;
  1139. }
  1140. void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
  1141. _wrapped.with([=](Implementation &unwrapped) {
  1142. unwrapped.removeFrameRequest(instance);
  1143. });
  1144. }
  1145. void VideoTrack::PrepareFrameByRequests(
  1146. not_null<Frame*> frame,
  1147. const AVRational &aspect,
  1148. int rotation) {
  1149. Expects(frame->format != FrameFormat::ARGB32
  1150. || !frame->original.isNull());
  1151. if (frame->format != FrameFormat::ARGB32) {
  1152. return;
  1153. }
  1154. const auto begin = frame->prepared.begin();
  1155. const auto end = frame->prepared.end();
  1156. for (auto i = begin; i != end; ++i) {
  1157. auto &prepared = i->second;
  1158. if (!GoodForRequest(
  1159. frame->original,
  1160. frame->alpha,
  1161. rotation,
  1162. prepared.request)) {
  1163. auto j = begin;
  1164. for (; j != i; ++j) {
  1165. if (j->second.request == prepared.request) {
  1166. prepared.image = QImage();
  1167. break;
  1168. }
  1169. }
  1170. if (j == i) {
  1171. prepared.image = PrepareByRequest(
  1172. frame->original,
  1173. frame->alpha,
  1174. aspect,
  1175. rotation,
  1176. prepared.request,
  1177. std::move(prepared.image));
  1178. }
  1179. }
  1180. }
  1181. }
  1182. bool VideoTrack::IsDecoded(not_null<const Frame*> frame) {
  1183. return (frame->position != kTimeUnknown)
  1184. && (frame->displayed == kTimeUnknown);
  1185. }
  1186. bool VideoTrack::IsRasterized(not_null<const Frame*> frame) {
  1187. return IsDecoded(frame)
  1188. && (!frame->original.isNull()
  1189. || frame->format == FrameFormat::YUV420
  1190. || frame->format == FrameFormat::NV12);
  1191. }
  1192. bool VideoTrack::IsStale(not_null<const Frame*> frame, crl::time trackTime) {
  1193. Expects(IsDecoded(frame));
  1194. return (frame->position < trackTime);
  1195. }
  1196. rpl::producer<> VideoTrack::checkNextFrame() const {
  1197. return _wrapped.producer_on_main([](const Implementation &unwrapped) {
  1198. return unwrapped.checkNextFrame();
  1199. });
  1200. }
  1201. rpl::producer<> VideoTrack::waitingForData() const {
  1202. return _wrapped.producer_on_main([](const Implementation &unwrapped) {
  1203. return unwrapped.waitingForData();
  1204. });
  1205. }
  1206. VideoTrack::~VideoTrack() {
  1207. _wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
  1208. unwrapped.interrupt();
  1209. });
  1210. }
  1211. } // namespace Streaming
  1212. } // namespace Media