media_streaming_utility.cpp 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. /*
  2. This file is part of Telegram Desktop,
  3. the official desktop application for the Telegram messaging service.
  4. For license and copyright information please follow this link:
  5. https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
  6. */
  7. #include "media/streaming/media_streaming_utility.h"
  8. #include "media/streaming/media_streaming_common.h"
  9. #include "ui/image/image_prepare.h"
  10. #include "ui/painter.h"
  11. #include "ffmpeg/ffmpeg_utility.h"
  12. namespace Media {
  13. namespace Streaming {
  14. namespace {
  15. constexpr auto kSkipInvalidDataPackets = 10;
  16. } // namespace
  17. crl::time FramePosition(const Stream &stream) {
  18. const auto pts = !stream.decodedFrame
  19. ? AV_NOPTS_VALUE
  20. : (stream.decodedFrame->best_effort_timestamp != AV_NOPTS_VALUE)
  21. ? stream.decodedFrame->best_effort_timestamp
  22. : (stream.decodedFrame->pts != AV_NOPTS_VALUE)
  23. ? stream.decodedFrame->pts
  24. : stream.decodedFrame->pkt_dts;
  25. const auto result = FFmpeg::PtsToTime(pts, stream.timeBase);
  26. // Sometimes the result here may be larger than the stream duration.
  27. return (stream.duration == kDurationUnavailable)
  28. ? result
  29. : std::min(result, stream.duration);
  30. }
  31. FFmpeg::AvErrorWrap ProcessPacket(Stream &stream, FFmpeg::Packet &&packet) {
  32. Expects(stream.codec != nullptr);
  33. auto error = FFmpeg::AvErrorWrap();
  34. const auto native = &packet.fields();
  35. const auto guard = gsl::finally([
  36. &,
  37. size = native->size,
  38. data = native->data
  39. ] {
  40. native->size = size;
  41. native->data = data;
  42. packet = FFmpeg::Packet();
  43. });
  44. error = avcodec_send_packet(
  45. stream.codec.get(),
  46. native->data ? native : nullptr); // Drain on eof.
  47. if (error) {
  48. LogError(u"avcodec_send_packet"_q, error);
  49. if (error.code() == AVERROR_INVALIDDATA
  50. // There is a sample voice message where skipping such packet
  51. // results in a crash (read_access to nullptr) in swr_convert().
  52. && stream.codec->codec_id != AV_CODEC_ID_OPUS) {
  53. if (++stream.invalidDataPackets < kSkipInvalidDataPackets) {
  54. return FFmpeg::AvErrorWrap(); // Try to skip a bad packet.
  55. }
  56. }
  57. } else {
  58. stream.invalidDataPackets = 0;
  59. }
  60. return error;
  61. }
  62. FFmpeg::AvErrorWrap ReadNextFrame(Stream &stream) {
  63. Expects(stream.decodedFrame != nullptr);
  64. auto error = FFmpeg::AvErrorWrap();
  65. do {
  66. error = avcodec_receive_frame(
  67. stream.codec.get(),
  68. stream.decodedFrame.get());
  69. if (!error
  70. || error.code() != AVERROR(EAGAIN)
  71. || stream.queue.empty()) {
  72. return error;
  73. }
  74. error = ProcessPacket(stream, std::move(stream.queue.front()));
  75. stream.queue.pop_front();
  76. } while (!error);
  77. return error;
  78. }
  79. bool GoodForRequest(
  80. const QImage &image,
  81. bool hasAlpha,
  82. int rotation,
  83. const FrameRequest &request) {
  84. if (image.isNull()
  85. || (hasAlpha && !request.keepAlpha)
  86. || request.colored.alpha() != 0) {
  87. return false;
  88. } else if (!request.blurredBackground && request.resize.isEmpty()) {
  89. return true;
  90. } else if (rotation != 0) {
  91. return false;
  92. } else if (!request.rounding.empty() || !request.mask.isNull()) {
  93. return false;
  94. }
  95. const auto size = request.blurredBackground
  96. ? request.outer
  97. : request.resize;
  98. return (size == request.outer) && (size == image.size());
  99. }
  100. bool TransferFrame(
  101. Stream &stream,
  102. not_null<AVFrame*> decodedFrame,
  103. not_null<AVFrame*> transferredFrame) {
  104. Expects(decodedFrame->hw_frames_ctx != nullptr);
  105. const auto error = FFmpeg::AvErrorWrap(
  106. av_hwframe_transfer_data(transferredFrame, decodedFrame, 0));
  107. if (error) {
  108. LogError(u"av_hwframe_transfer_data"_q, error);
  109. return false;
  110. }
  111. FFmpeg::ClearFrameMemory(decodedFrame);
  112. return true;
  113. }
  114. QImage ConvertFrame(
  115. Stream &stream,
  116. not_null<AVFrame*> frame,
  117. QSize resize,
  118. QImage storage) {
  119. const auto frameSize = QSize(frame->width, frame->height);
  120. if (frameSize.isEmpty()) {
  121. LOG(("Streaming Error: Bad frame size %1,%2"
  122. ).arg(frameSize.width()
  123. ).arg(frameSize.height()));
  124. return QImage();
  125. } else if (!FFmpeg::FrameHasData(frame)) {
  126. LOG(("Streaming Error: Bad frame data."));
  127. return QImage();
  128. }
  129. if (resize.isEmpty()) {
  130. resize = frameSize;
  131. } else if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
  132. resize.transpose();
  133. }
  134. if (!FFmpeg::GoodStorageForFrame(storage, resize)) {
  135. storage = FFmpeg::CreateFrameStorage(resize);
  136. }
  137. const auto format = AV_PIX_FMT_BGRA;
  138. const auto hasDesiredFormat = (frame->format == format);
  139. if (frameSize == storage.size() && hasDesiredFormat) {
  140. static_assert(sizeof(uint32) == FFmpeg::kPixelBytesSize);
  141. auto to = reinterpret_cast<uint32*>(storage.bits());
  142. auto from = reinterpret_cast<const uint32*>(frame->data[0]);
  143. const auto deltaTo = (storage.bytesPerLine() / sizeof(uint32))
  144. - storage.width();
  145. const auto deltaFrom = (frame->linesize[0] / sizeof(uint32))
  146. - frame->width;
  147. for ([[maybe_unused]] const auto y : ranges::views::ints(0, frame->height)) {
  148. for ([[maybe_unused]] const auto x : ranges::views::ints(0, frame->width)) {
  149. // Wipe out possible alpha values.
  150. *to++ = 0xFF000000U | *from++;
  151. }
  152. to += deltaTo;
  153. from += deltaFrom;
  154. }
  155. } else {
  156. stream.swscale = MakeSwscalePointer(
  157. frame,
  158. resize,
  159. &stream.swscale);
  160. if (!stream.swscale) {
  161. return QImage();
  162. }
  163. // AV_NUM_DATA_POINTERS defined in AVFrame struct
  164. uint8_t *data[AV_NUM_DATA_POINTERS] = { storage.bits(), nullptr };
  165. int linesize[AV_NUM_DATA_POINTERS] = { int(storage.bytesPerLine()), 0 };
  166. sws_scale(
  167. stream.swscale.get(),
  168. frame->data,
  169. frame->linesize,
  170. 0,
  171. frame->height,
  172. data,
  173. linesize);
  174. if (frame->format == AV_PIX_FMT_YUVA420P) {
  175. FFmpeg::PremultiplyInplace(storage);
  176. }
  177. }
  178. FFmpeg::ClearFrameMemory(frame);
  179. return storage;
  180. }
  181. FrameYUV ExtractYUV(Stream &stream, AVFrame *frame) {
  182. return {
  183. .size = { frame->width, frame->height },
  184. .chromaSize = {
  185. AV_CEIL_RSHIFT(frame->width, 1), // SWScale does that.
  186. AV_CEIL_RSHIFT(frame->height, 1)
  187. },
  188. .y = { .data = frame->data[0], .stride = frame->linesize[0] },
  189. .u = { .data = frame->data[1], .stride = frame->linesize[1] },
  190. .v = { .data = frame->data[2], .stride = frame->linesize[2] },
  191. };
  192. }
  193. void PaintFrameOuter(QPainter &p, const QRect &inner, QSize outer) {
  194. const auto left = inner.x();
  195. const auto right = outer.width() - inner.width() - left;
  196. const auto top = inner.y();
  197. const auto bottom = outer.height() - inner.height() - top;
  198. if (left > 0) {
  199. p.fillRect(0, 0, left, outer.height(), st::imageBg);
  200. }
  201. if (right > 0) {
  202. p.fillRect(
  203. outer.width() - right,
  204. 0,
  205. right,
  206. outer.height(),
  207. st::imageBg);
  208. }
  209. if (top > 0) {
  210. p.fillRect(left, 0, inner.width(), top, st::imageBg);
  211. }
  212. if (bottom > 0) {
  213. p.fillRect(
  214. left,
  215. outer.height() - bottom,
  216. inner.width(),
  217. bottom,
  218. st::imageBg);
  219. }
  220. }
  221. void PaintFrameInner(
  222. QPainter &p,
  223. QRect to,
  224. const QImage &original,
  225. bool alpha,
  226. int rotation) {
  227. const auto rotated = [](QRect rect, int rotation) {
  228. switch (rotation) {
  229. case 0: return rect;
  230. case 90: return QRect(
  231. rect.y(),
  232. -rect.x() - rect.width(),
  233. rect.height(),
  234. rect.width());
  235. case 180: return QRect(
  236. -rect.x() - rect.width(),
  237. -rect.y() - rect.height(),
  238. rect.width(),
  239. rect.height());
  240. case 270: return QRect(
  241. -rect.y() - rect.height(),
  242. rect.x(),
  243. rect.height(),
  244. rect.width());
  245. }
  246. Unexpected("Rotation in PaintFrameInner.");
  247. };
  248. PainterHighQualityEnabler hq(p);
  249. if (rotation) {
  250. p.rotate(rotation);
  251. }
  252. const auto rect = rotated(to, rotation);
  253. if (alpha) {
  254. p.fillRect(rect, Qt::white);
  255. }
  256. p.drawImage(rect, original);
  257. }
  258. QImage PrepareBlurredBackground(QSize outer, QImage frame) {
  259. const auto bsize = frame.size();
  260. const auto copyw = std::min(
  261. bsize.width(),
  262. std::max(outer.width() * bsize.height() / outer.height(), 1));
  263. const auto copyh = std::min(
  264. bsize.height(),
  265. std::max(outer.height() * bsize.width() / outer.width(), 1));
  266. auto copy = (bsize == QSize(copyw, copyh))
  267. ? std::move(frame)
  268. : frame.copy(
  269. (bsize.width() - copyw) / 2,
  270. (bsize.height() - copyh) / 2,
  271. copyw,
  272. copyh);
  273. auto scaled = (copy.width() <= 100 && copy.height() <= 100)
  274. ? std::move(copy)
  275. : copy.scaled(40, 40, Qt::KeepAspectRatio, Qt::FastTransformation);
  276. return Images::Blur(std::move(scaled), true);
  277. }
  278. void FillBlurredBackground(QPainter &p, QSize outer, QImage bg) {
  279. auto hq = PainterHighQualityEnabler(p);
  280. const auto rect = QRect(QPoint(), outer);
  281. const auto ratio = p.device()->devicePixelRatio();
  282. p.drawImage(
  283. rect,
  284. PrepareBlurredBackground(outer * ratio, std::move(bg)));
  285. p.fillRect(rect, QColor(0, 0, 0, 48));
  286. }
  287. void PaintFrameContent(
  288. QPainter &p,
  289. const QImage &original,
  290. bool hasAlpha,
  291. const AVRational &aspect,
  292. int rotation,
  293. const FrameRequest &request) {
  294. const auto outer = request.outer;
  295. const auto full = request.outer.isEmpty() ? original.size() : outer;
  296. const auto deAlpha = hasAlpha && !request.keepAlpha;
  297. const auto resize = request.blurredBackground
  298. ? DecideVideoFrameResize(
  299. outer,
  300. FFmpeg::TransposeSizeByRotation(
  301. FFmpeg::CorrectByAspect(original.size(), aspect), rotation))
  302. : ExpandDecision{ request.resize.isEmpty()
  303. ? original.size()
  304. : request.resize };
  305. const auto size = resize.result;
  306. const auto target = QRect(
  307. (full.width() - size.width()) / 2,
  308. (full.height() - size.height()) / 2,
  309. size.width(),
  310. size.height());
  311. if (request.blurredBackground) {
  312. if (!resize.expanding) {
  313. FillBlurredBackground(p, full, original);
  314. }
  315. } else if (!hasAlpha || !request.keepAlpha) {
  316. PaintFrameOuter(p, target, full);
  317. }
  318. PaintFrameInner(p, target, original, deAlpha, rotation);
  319. }
  320. void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
  321. if (!request.mask.isNull()) {
  322. auto p = QPainter(&storage);
  323. p.setCompositionMode(QPainter::CompositionMode_DestinationIn);
  324. p.drawImage(
  325. QRect(QPoint(), storage.size() / storage.devicePixelRatio()),
  326. request.mask);
  327. } else if (!request.rounding.empty()) {
  328. storage = Images::Round(std::move(storage), request.rounding);
  329. }
  330. }
  331. ExpandDecision DecideFrameResize(
  332. QSize outer,
  333. QSize original,
  334. int minVisibleNominator,
  335. int minVisibleDenominator) {
  336. if (outer.isEmpty()) {
  337. // Often "expanding" means that we don't need to fill the background.
  338. return { .result = original, .expanding = true };
  339. }
  340. const auto big = original.scaled(outer, Qt::KeepAspectRatioByExpanding);
  341. if ((big.width() <= outer.width())
  342. && (big.height() * minVisibleNominator
  343. <= outer.height() * minVisibleDenominator)) {
  344. return { .result = big, .expanding = true };
  345. }
  346. return { .result = original.scaled(outer, Qt::KeepAspectRatio) };
  347. }
  348. bool FrameResizeMayExpand(
  349. QSize outer,
  350. QSize original,
  351. int minVisibleNominator,
  352. int minVisibleDenominator) {
  353. const auto min = std::min({
  354. outer.width(),
  355. outer.height(),
  356. original.width(),
  357. original.height(),
  358. });
  359. // Count for: (nominator / denominator) - (1 / min).
  360. // In case the result is less than 1 / 2, just return.
  361. if (2 * minVisibleNominator * min
  362. < 2 * minVisibleDenominator + minVisibleDenominator * min) {
  363. return false;
  364. }
  365. return DecideFrameResize(
  366. outer,
  367. original,
  368. minVisibleNominator * min - minVisibleDenominator,
  369. minVisibleDenominator * min).expanding;
  370. }
  371. ExpandDecision DecideVideoFrameResize(QSize outer, QSize original) {
  372. return DecideFrameResize(outer, original, 1, 2);
  373. }
  374. QSize CalculateResizeFromOuter(QSize outer, QSize original) {
  375. return DecideVideoFrameResize(outer, original).result;
  376. }
  377. QImage PrepareByRequest(
  378. const QImage &original,
  379. bool hasAlpha,
  380. const AVRational &aspect,
  381. int rotation,
  382. const FrameRequest &request,
  383. QImage storage) {
  384. Expects(!request.outer.isEmpty() || hasAlpha);
  385. const auto outer = request.outer.isEmpty()
  386. ? original.size()
  387. : request.outer;
  388. if (!FFmpeg::GoodStorageForFrame(storage, outer)) {
  389. storage = FFmpeg::CreateFrameStorage(outer);
  390. }
  391. if (hasAlpha && request.keepAlpha) {
  392. storage.fill(Qt::transparent);
  393. }
  394. QPainter p(&storage);
  395. PaintFrameContent(p, original, hasAlpha, aspect, rotation, request);
  396. p.end();
  397. ApplyFrameRounding(storage, request);
  398. if (request.colored.alpha() != 0) {
  399. storage = Images::Colored(std::move(storage), request.colored);
  400. }
  401. return storage;
  402. }
  403. } // namespace Streaming
  404. } // namespace Media