ffmpeg_utility.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781
  1. /*
  2. This file is part of Telegram Desktop,
  3. the official desktop application for the Telegram messaging service.
  4. For license and copyright information please follow this link:
  5. https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
  6. */
  7. #include "ffmpeg/ffmpeg_utility.h"
  8. #include "base/algorithm.h"
  9. #include "logs.h"
  10. #if !defined TDESKTOP_USE_PACKAGED && !defined Q_OS_WIN && !defined Q_OS_MAC
  11. #include "base/platform/linux/base_linux_library.h"
  12. #include <deque>
  13. #endif // !TDESKTOP_USE_PACKAGED && !Q_OS_WIN && !Q_OS_MAC
  14. #include <QImage>
  15. #ifdef LIB_FFMPEG_USE_QT_PRIVATE_API
  16. #include <private/qdrawhelper_p.h>
  17. #endif // LIB_FFMPEG_USE_QT_PRIVATE_API
  18. extern "C" {
  19. #include <libavutil/opt.h>
  20. #include <libavutil/display.h>
  21. } // extern "C"
  22. namespace FFmpeg {
  23. namespace {
  24. // See https://github.com/telegramdesktop/tdesktop/issues/7225
  25. constexpr auto kAlignImageBy = 64;
  26. constexpr auto kImageFormat = QImage::Format_ARGB32_Premultiplied;
  27. constexpr auto kMaxScaleByAspectRatio = 16;
  28. constexpr auto kAvioBlockSize = 4096;
  29. constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
  30. constexpr auto kDurationMax = crl::time(std::numeric_limits<int>::max());
  31. using GetFormatMethod = enum AVPixelFormat(*)(
  32. struct AVCodecContext *s,
  33. const enum AVPixelFormat *fmt);
  34. struct HwAccelDescriptor {
  35. GetFormatMethod getFormat = nullptr;
  36. AVPixelFormat format = AV_PIX_FMT_NONE;
  37. };
  38. void AlignedImageBufferCleanupHandler(void* data) {
  39. const auto buffer = static_cast<uchar*>(data);
  40. delete[] buffer;
  41. }
  42. [[nodiscard]] bool IsValidAspectRatio(AVRational aspect) {
  43. return (aspect.num > 0)
  44. && (aspect.den > 0)
  45. && (aspect.num <= aspect.den * kMaxScaleByAspectRatio)
  46. && (aspect.den <= aspect.num * kMaxScaleByAspectRatio);
  47. }
  48. [[nodiscard]] bool IsAlignedImage(const QImage &image) {
  49. return !(reinterpret_cast<uintptr_t>(image.bits()) % kAlignImageBy)
  50. && !(image.bytesPerLine() % kAlignImageBy);
  51. }
  52. void UnPremultiplyLine(uchar *dst, const uchar *src, int intsCount) {
  53. [[maybe_unused]] const auto udst = reinterpret_cast<uint*>(dst);
  54. const auto usrc = reinterpret_cast<const uint*>(src);
  55. #ifndef LIB_FFMPEG_USE_QT_PRIVATE_API
  56. for (auto i = 0; i != intsCount; ++i) {
  57. udst[i] = qUnpremultiply(usrc[i]);
  58. }
  59. #else // !LIB_FFMPEG_USE_QT_PRIVATE_API
  60. static const auto layout = &qPixelLayouts[QImage::Format_ARGB32];
  61. layout->storeFromARGB32PM(dst, usrc, 0, intsCount, nullptr, nullptr);
  62. #endif // LIB_FFMPEG_USE_QT_PRIVATE_API
  63. }
  64. void PremultiplyLine(uchar *dst, const uchar *src, int intsCount) {
  65. const auto udst = reinterpret_cast<uint*>(dst);
  66. [[maybe_unused]] const auto usrc = reinterpret_cast<const uint*>(src);
  67. #ifndef LIB_FFMPEG_USE_QT_PRIVATE_API
  68. for (auto i = 0; i != intsCount; ++i) {
  69. udst[i] = qPremultiply(usrc[i]);
  70. }
  71. #else // !LIB_FFMPEG_USE_QT_PRIVATE_API
  72. static const auto layout = &qPixelLayouts[QImage::Format_ARGB32];
  73. layout->fetchToARGB32PM(udst, src, 0, intsCount, nullptr, nullptr);
  74. #endif // LIB_FFMPEG_USE_QT_PRIVATE_API
  75. }
  76. #if !defined TDESKTOP_USE_PACKAGED && !defined Q_OS_WIN && !defined Q_OS_MAC
  77. [[nodiscard]] auto CheckHwLibs() {
  78. auto list = std::deque{
  79. AV_PIX_FMT_CUDA,
  80. };
  81. if (base::Platform::LoadLibrary("libvdpau.so.1")) {
  82. list.push_front(AV_PIX_FMT_VDPAU);
  83. }
  84. if ([&] {
  85. const auto list = std::array{
  86. "libva-drm.so.2",
  87. "libva-x11.so.2",
  88. "libva.so.2",
  89. "libdrm.so.2",
  90. };
  91. for (const auto lib : list) {
  92. if (!base::Platform::LoadLibrary(lib)) {
  93. return false;
  94. }
  95. }
  96. return true;
  97. }()) {
  98. list.push_front(AV_PIX_FMT_VAAPI);
  99. }
  100. return list;
  101. }
  102. #endif // !TDESKTOP_USE_PACKAGED && !Q_OS_WIN && !Q_OS_MAC
  103. [[nodiscard]] bool InitHw(AVCodecContext *context, AVHWDeviceType type) {
  104. AVCodecContext *parent = static_cast<AVCodecContext*>(context->opaque);
  105. auto hwDeviceContext = (AVBufferRef*)nullptr;
  106. AvErrorWrap error = av_hwdevice_ctx_create(
  107. &hwDeviceContext,
  108. type,
  109. nullptr,
  110. nullptr,
  111. 0);
  112. if (error || !hwDeviceContext) {
  113. LogError(u"av_hwdevice_ctx_create"_q, error);
  114. return false;
  115. }
  116. DEBUG_LOG(("Video Info: "
  117. "Trying \"%1\" hardware acceleration for \"%2\" decoder."
  118. ).arg(
  119. av_hwdevice_get_type_name(type),
  120. context->codec->name));
  121. if (parent->hw_device_ctx) {
  122. av_buffer_unref(&parent->hw_device_ctx);
  123. }
  124. parent->hw_device_ctx = av_buffer_ref(hwDeviceContext);
  125. av_buffer_unref(&hwDeviceContext);
  126. context->hw_device_ctx = parent->hw_device_ctx;
  127. return true;
  128. }
  129. [[nodiscard]] enum AVPixelFormat GetHwFormat(
  130. AVCodecContext *context,
  131. const enum AVPixelFormat *formats) {
  132. const auto has = [&](enum AVPixelFormat format) {
  133. const enum AVPixelFormat *p = nullptr;
  134. for (p = formats; *p != AV_PIX_FMT_NONE; p++) {
  135. if (*p == format) {
  136. return true;
  137. }
  138. }
  139. return false;
  140. };
  141. #if !defined TDESKTOP_USE_PACKAGED && !defined Q_OS_WIN && !defined Q_OS_MAC
  142. static const auto list = CheckHwLibs();
  143. #else // !TDESKTOP_USE_PACKAGED && !Q_OS_WIN && !Q_OS_MAC
  144. const auto list = std::array{
  145. #ifdef Q_OS_WIN
  146. AV_PIX_FMT_D3D11,
  147. AV_PIX_FMT_DXVA2_VLD,
  148. AV_PIX_FMT_CUDA,
  149. #elif defined Q_OS_MAC // Q_OS_WIN
  150. AV_PIX_FMT_VIDEOTOOLBOX,
  151. #else // Q_OS_WIN || Q_OS_MAC
  152. AV_PIX_FMT_VAAPI,
  153. AV_PIX_FMT_VDPAU,
  154. AV_PIX_FMT_CUDA,
  155. #endif // Q_OS_WIN || Q_OS_MAC
  156. };
  157. #endif // TDESKTOP_USE_PACKAGED || Q_OS_WIN || Q_OS_MAC
  158. for (const auto format : list) {
  159. if (!has(format)) {
  160. continue;
  161. }
  162. const auto type = [&] {
  163. switch (format) {
  164. #ifdef Q_OS_WIN
  165. case AV_PIX_FMT_D3D11: return AV_HWDEVICE_TYPE_D3D11VA;
  166. case AV_PIX_FMT_DXVA2_VLD: return AV_HWDEVICE_TYPE_DXVA2;
  167. case AV_PIX_FMT_CUDA: return AV_HWDEVICE_TYPE_CUDA;
  168. #elif defined Q_OS_MAC // Q_OS_WIN
  169. case AV_PIX_FMT_VIDEOTOOLBOX:
  170. return AV_HWDEVICE_TYPE_VIDEOTOOLBOX;
  171. #else // Q_OS_WIN || Q_OS_MAC
  172. case AV_PIX_FMT_VAAPI: return AV_HWDEVICE_TYPE_VAAPI;
  173. case AV_PIX_FMT_VDPAU: return AV_HWDEVICE_TYPE_VDPAU;
  174. case AV_PIX_FMT_CUDA: return AV_HWDEVICE_TYPE_CUDA;
  175. #endif // Q_OS_WIN || Q_OS_MAC
  176. }
  177. return AV_HWDEVICE_TYPE_NONE;
  178. }();
  179. if (type == AV_HWDEVICE_TYPE_NONE && context->hw_device_ctx) {
  180. av_buffer_unref(&context->hw_device_ctx);
  181. } else if (type != AV_HWDEVICE_TYPE_NONE && !InitHw(context, type)) {
  182. continue;
  183. }
  184. return format;
  185. }
  186. enum AVPixelFormat result = AV_PIX_FMT_NONE;
  187. for (const enum AVPixelFormat *p = formats; *p != AV_PIX_FMT_NONE; p++) {
  188. result = *p;
  189. }
  190. return result;
  191. }
  192. template <AVPixelFormat Required>
  193. enum AVPixelFormat GetFormatImplementation(
  194. AVCodecContext *ctx,
  195. const enum AVPixelFormat *pix_fmts) {
  196. const enum AVPixelFormat *p = nullptr;
  197. for (p = pix_fmts; *p != -1; p++) {
  198. if (*p == Required) {
  199. return *p;
  200. }
  201. }
  202. return AV_PIX_FMT_NONE;
  203. }
  204. } // namespace
  205. IOPointer MakeIOPointer(
  206. void *opaque,
  207. int(*read)(void *opaque, uint8_t *buffer, int bufferSize),
  208. #if DA_FFMPEG_CONST_WRITE_CALLBACK
  209. int(*write)(void *opaque, const uint8_t *buffer, int bufferSize),
  210. #else
  211. int(*write)(void *opaque, uint8_t *buffer, int bufferSize),
  212. #endif
  213. int64_t(*seek)(void *opaque, int64_t offset, int whence)) {
  214. auto buffer = reinterpret_cast<uchar*>(av_malloc(kAvioBlockSize));
  215. if (!buffer) {
  216. LogError(u"av_malloc"_q);
  217. return {};
  218. }
  219. auto result = IOPointer(avio_alloc_context(
  220. buffer,
  221. kAvioBlockSize,
  222. write ? 1 : 0,
  223. opaque,
  224. read,
  225. write,
  226. seek));
  227. if (!result) {
  228. av_freep(&buffer);
  229. LogError(u"avio_alloc_context"_q);
  230. return {};
  231. }
  232. return result;
  233. }
  234. void IODeleter::operator()(AVIOContext *value) {
  235. if (value) {
  236. av_freep(&value->buffer);
  237. avio_context_free(&value);
  238. }
  239. }
  240. FormatPointer MakeFormatPointer(
  241. void *opaque,
  242. int(*read)(void *opaque, uint8_t *buffer, int bufferSize),
  243. #if DA_FFMPEG_CONST_WRITE_CALLBACK
  244. int(*write)(void *opaque, const uint8_t *buffer, int bufferSize),
  245. #else
  246. int(*write)(void *opaque, uint8_t *buffer, int bufferSize),
  247. #endif
  248. int64_t(*seek)(void *opaque, int64_t offset, int whence)) {
  249. auto io = MakeIOPointer(opaque, read, write, seek);
  250. if (!io) {
  251. return {};
  252. }
  253. io->seekable = (seek != nullptr);
  254. auto result = avformat_alloc_context();
  255. if (!result) {
  256. LogError(u"avformat_alloc_context"_q);
  257. return {};
  258. }
  259. result->pb = io.get();
  260. result->flags |= AVFMT_FLAG_CUSTOM_IO;
  261. auto options = (AVDictionary*)nullptr;
  262. const auto guard = gsl::finally([&] { av_dict_free(&options); });
  263. av_dict_set(&options, "usetoc", "1", 0);
  264. const auto error = AvErrorWrap(avformat_open_input(
  265. &result,
  266. nullptr,
  267. nullptr,
  268. &options));
  269. if (error) {
  270. // avformat_open_input freed 'result' in case an error happened.
  271. LogError(u"avformat_open_input"_q, error);
  272. return {};
  273. }
  274. if (seek) {
  275. result->flags |= AVFMT_FLAG_FAST_SEEK;
  276. }
  277. // Now FormatPointer will own and free the IO context.
  278. io.release();
  279. return FormatPointer(result);
  280. }
  281. FormatPointer MakeWriteFormatPointer(
  282. void *opaque,
  283. int(*read)(void *opaque, uint8_t *buffer, int bufferSize),
  284. #if DA_FFMPEG_CONST_WRITE_CALLBACK
  285. int(*write)(void *opaque, const uint8_t *buffer, int bufferSize),
  286. #else
  287. int(*write)(void *opaque, uint8_t *buffer, int bufferSize),
  288. #endif
  289. int64_t(*seek)(void *opaque, int64_t offset, int whence),
  290. const QByteArray &format) {
  291. const AVOutputFormat *found = nullptr;
  292. void *i = nullptr;
  293. while ((found = av_muxer_iterate(&i))) {
  294. if (found->name == format) {
  295. break;
  296. }
  297. }
  298. if (!found) {
  299. LogError(
  300. "av_muxer_iterate",
  301. u"Format %1 not found"_q.arg(QString::fromUtf8(format)));
  302. return {};
  303. }
  304. auto io = MakeIOPointer(opaque, read, write, seek);
  305. if (!io) {
  306. return {};
  307. }
  308. io->seekable = (seek != nullptr);
  309. auto result = (AVFormatContext*)nullptr;
  310. auto error = AvErrorWrap(avformat_alloc_output_context2(
  311. &result,
  312. (AVOutputFormat*)found,
  313. nullptr,
  314. nullptr));
  315. if (!result || error) {
  316. LogError("avformat_alloc_output_context2", error);
  317. return {};
  318. }
  319. result->pb = io.get();
  320. result->flags |= AVFMT_FLAG_CUSTOM_IO;
  321. // Now FormatPointer will own and free the IO context.
  322. io.release();
  323. return FormatPointer(result);
  324. }
  325. void FormatDeleter::operator()(AVFormatContext *value) {
  326. if (value) {
  327. const auto deleter = IOPointer(value->pb);
  328. avformat_close_input(&value);
  329. }
  330. }
  331. const AVCodec *FindDecoder(not_null<AVCodecContext*> context) {
  332. // Force libvpx-vp9, because we need alpha channel support.
  333. return (context->codec_id == AV_CODEC_ID_VP9)
  334. ? avcodec_find_decoder_by_name("libvpx-vp9")
  335. : avcodec_find_decoder(context->codec_id);
  336. }
  337. CodecPointer MakeCodecPointer(CodecDescriptor descriptor) {
  338. auto error = AvErrorWrap();
  339. auto result = CodecPointer(avcodec_alloc_context3(nullptr));
  340. const auto context = result.get();
  341. if (!context) {
  342. LogError(u"avcodec_alloc_context3"_q);
  343. return {};
  344. }
  345. const auto stream = descriptor.stream;
  346. error = avcodec_parameters_to_context(context, stream->codecpar);
  347. if (error) {
  348. LogError(u"avcodec_parameters_to_context"_q, error);
  349. return {};
  350. }
  351. context->pkt_timebase = stream->time_base;
  352. av_opt_set(context, "threads", "auto", 0);
  353. av_opt_set_int(context, "refcounted_frames", 1, 0);
  354. const auto codec = FindDecoder(context);
  355. if (!codec) {
  356. LogError(u"avcodec_find_decoder"_q, context->codec_id);
  357. return {};
  358. }
  359. if (descriptor.hwAllowed) {
  360. context->get_format = GetHwFormat;
  361. context->opaque = context;
  362. } else {
  363. DEBUG_LOG(("Video Info: Using software \"%2\" decoder."
  364. ).arg(codec->name));
  365. }
  366. if ((error = avcodec_open2(context, codec, nullptr))) {
  367. LogError(u"avcodec_open2"_q, error);
  368. return {};
  369. }
  370. return result;
  371. }
  372. void CodecDeleter::operator()(AVCodecContext *value) {
  373. if (value) {
  374. avcodec_free_context(&value);
  375. }
  376. }
  377. FramePointer MakeFramePointer() {
  378. return FramePointer(av_frame_alloc());
  379. }
  380. FramePointer DuplicateFramePointer(AVFrame *frame) {
  381. return frame
  382. ? FramePointer(av_frame_clone(frame))
  383. : FramePointer();
  384. }
  385. bool FrameHasData(AVFrame *frame) {
  386. return (frame && frame->data[0] != nullptr);
  387. }
  388. void ClearFrameMemory(AVFrame *frame) {
  389. if (FrameHasData(frame)) {
  390. av_frame_unref(frame);
  391. }
  392. }
  393. void FrameDeleter::operator()(AVFrame *value) {
  394. av_frame_free(&value);
  395. }
  396. SwscalePointer MakeSwscalePointer(
  397. QSize srcSize,
  398. int srcFormat,
  399. QSize dstSize,
  400. int dstFormat,
  401. SwscalePointer *existing) {
  402. // We have to use custom caching for SwsContext, because
  403. // sws_getCachedContext checks passed flags with existing context flags,
  404. // and re-creates context if they're different, but in the process of
  405. // context creation the passed flags are modified before being written
  406. // to the resulting context, so the caching doesn't work.
  407. if (existing && (*existing) != nullptr) {
  408. const auto &deleter = existing->get_deleter();
  409. if (deleter.srcSize == srcSize
  410. && deleter.srcFormat == srcFormat
  411. && deleter.dstSize == dstSize
  412. && deleter.dstFormat == dstFormat) {
  413. return std::move(*existing);
  414. }
  415. }
  416. if (srcFormat <= AV_PIX_FMT_NONE || srcFormat >= AV_PIX_FMT_NB) {
  417. LogError(u"frame->format"_q);
  418. return SwscalePointer();
  419. }
  420. const auto result = sws_getCachedContext(
  421. existing ? existing->release() : nullptr,
  422. srcSize.width(),
  423. srcSize.height(),
  424. AVPixelFormat(srcFormat),
  425. dstSize.width(),
  426. dstSize.height(),
  427. AVPixelFormat(dstFormat),
  428. 0,
  429. nullptr,
  430. nullptr,
  431. nullptr);
  432. if (!result) {
  433. LogError(u"sws_getCachedContext"_q);
  434. }
  435. return SwscalePointer(
  436. result,
  437. { srcSize, srcFormat, dstSize, dstFormat });
  438. }
  439. SwscalePointer MakeSwscalePointer(
  440. not_null<AVFrame*> frame,
  441. QSize resize,
  442. SwscalePointer *existing) {
  443. return MakeSwscalePointer(
  444. QSize(frame->width, frame->height),
  445. frame->format,
  446. resize,
  447. AV_PIX_FMT_BGRA,
  448. existing);
  449. }
  450. void SwresampleDeleter::operator()(SwrContext *value) {
  451. if (value) {
  452. swr_free(&value);
  453. }
  454. }
  455. SwresamplePointer MakeSwresamplePointer(
  456. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  457. AVChannelLayout *srcLayout,
  458. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  459. uint64_t srcLayout,
  460. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  461. AVSampleFormat srcFormat,
  462. int srcRate,
  463. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  464. AVChannelLayout *dstLayout,
  465. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  466. uint64_t dstLayout,
  467. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  468. AVSampleFormat dstFormat,
  469. int dstRate,
  470. SwresamplePointer *existing) {
  471. // We have to use custom caching for SwsContext, because
  472. // sws_getCachedContext checks passed flags with existing context flags,
  473. // and re-creates context if they're different, but in the process of
  474. // context creation the passed flags are modified before being written
  475. // to the resulting context, so the caching doesn't work.
  476. if (existing && (*existing) != nullptr) {
  477. const auto &deleter = existing->get_deleter();
  478. if (true
  479. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  480. && srcLayout->nb_channels == deleter.srcChannels
  481. && dstLayout->nb_channels == deleter.dstChannels
  482. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  483. && (av_get_channel_layout_nb_channels(srcLayout)
  484. == deleter.srcChannels)
  485. && (av_get_channel_layout_nb_channels(dstLayout)
  486. == deleter.dstChannels)
  487. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  488. && srcFormat == deleter.srcFormat
  489. && dstFormat == deleter.dstFormat
  490. && srcRate == deleter.srcRate
  491. && dstRate == deleter.dstRate) {
  492. return std::move(*existing);
  493. }
  494. }
  495. // Initialize audio resampler
  496. AvErrorWrap error;
  497. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  498. auto result = (SwrContext*)nullptr;
  499. error = AvErrorWrap(swr_alloc_set_opts2(
  500. &result,
  501. dstLayout,
  502. dstFormat,
  503. dstRate,
  504. srcLayout,
  505. srcFormat,
  506. srcRate,
  507. 0,
  508. nullptr));
  509. if (error || !result) {
  510. LogError(u"swr_alloc_set_opts2"_q, error);
  511. return SwresamplePointer();
  512. }
  513. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  514. auto result = swr_alloc_set_opts(
  515. existing ? existing->get() : nullptr,
  516. dstLayout,
  517. dstFormat,
  518. dstRate,
  519. srcLayout,
  520. srcFormat,
  521. srcRate,
  522. 0,
  523. nullptr);
  524. if (!result) {
  525. LogError(u"swr_alloc_set_opts"_q);
  526. }
  527. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  528. error = AvErrorWrap(swr_init(result));
  529. if (error) {
  530. LogError(u"swr_init"_q, error);
  531. swr_free(&result);
  532. return SwresamplePointer();
  533. }
  534. return SwresamplePointer(
  535. result,
  536. {
  537. srcFormat,
  538. srcRate,
  539. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  540. srcLayout->nb_channels,
  541. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  542. av_get_channel_layout_nb_channels(srcLayout),
  543. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  544. dstFormat,
  545. dstRate,
  546. #if DA_FFMPEG_NEW_CHANNEL_LAYOUT
  547. dstLayout->nb_channels,
  548. #else // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  549. av_get_channel_layout_nb_channels(dstLayout),
  550. #endif // DA_FFMPEG_NEW_CHANNEL_LAYOUT
  551. });
  552. }
  553. void SwscaleDeleter::operator()(SwsContext *value) {
  554. if (value) {
  555. sws_freeContext(value);
  556. }
  557. }
  558. void LogError(const QString &method, const QString &details) {
  559. LOG(("Streaming Error: Error in %1%2."
  560. ).arg(method
  561. ).arg(details.isEmpty() ? QString() : " - " + details));
  562. }
  563. void LogError(
  564. const QString &method,
  565. AvErrorWrap error,
  566. const QString &details) {
  567. LOG(("Streaming Error: Error in %1 (code: %2, text: %3)%4."
  568. ).arg(method
  569. ).arg(error.code()
  570. ).arg(error.text()
  571. ).arg(details.isEmpty() ? QString() : " - " + details));
  572. }
  573. crl::time PtsToTime(int64_t pts, AVRational timeBase) {
  574. return (pts == AV_NOPTS_VALUE || !timeBase.den)
  575. ? kTimeUnknown
  576. : ((pts * 1000LL * timeBase.num) / timeBase.den);
  577. }
  578. crl::time PtsToTimeCeil(int64_t pts, AVRational timeBase) {
  579. return (pts == AV_NOPTS_VALUE || !timeBase.den)
  580. ? kTimeUnknown
  581. : ((pts * 1000LL * timeBase.num + timeBase.den - 1) / timeBase.den);
  582. }
  583. int64_t TimeToPts(crl::time time, AVRational timeBase) {
  584. return (time == kTimeUnknown || !timeBase.num)
  585. ? AV_NOPTS_VALUE
  586. : (time * timeBase.den) / (1000LL * timeBase.num);
  587. }
  588. crl::time PacketPosition(const Packet &packet, AVRational timeBase) {
  589. const auto &native = packet.fields();
  590. return PtsToTime(
  591. (native.pts == AV_NOPTS_VALUE) ? native.dts : native.pts,
  592. timeBase);
  593. }
  594. crl::time PacketDuration(const Packet &packet, AVRational timeBase) {
  595. return PtsToTime(packet.fields().duration, timeBase);
  596. }
  597. int DurationByPacket(const Packet &packet, AVRational timeBase) {
  598. const auto position = PacketPosition(packet, timeBase);
  599. const auto duration = std::max(
  600. PacketDuration(packet, timeBase),
  601. crl::time(1));
  602. const auto bad = [](crl::time time) {
  603. return (time < 0) || (time > kDurationMax);
  604. };
  605. if (bad(position) || bad(duration) || bad(position + duration + 1)) {
  606. LOG(("Streaming Error: Wrong duration by packet: %1 + %2"
  607. ).arg(position
  608. ).arg(duration));
  609. return -1;
  610. }
  611. return int(position + duration + 1);
  612. }
  613. int ReadRotationFromMetadata(not_null<AVStream*> stream) {
  614. const auto displaymatrix = av_stream_get_side_data(
  615. stream,
  616. AV_PKT_DATA_DISPLAYMATRIX,
  617. nullptr);
  618. auto theta = 0;
  619. if (displaymatrix) {
  620. theta = -round(av_display_rotation_get((int32_t*)displaymatrix));
  621. }
  622. theta -= 360 * floor(theta / 360 + 0.9 / 360);
  623. const auto result = int(base::SafeRound(theta));
  624. return (result == 90 || result == 180 || result == 270) ? result : 0;
  625. }
  626. AVRational ValidateAspectRatio(AVRational aspect) {
  627. return IsValidAspectRatio(aspect) ? aspect : kNormalAspect;
  628. }
  629. QSize CorrectByAspect(QSize size, AVRational aspect) {
  630. Expects(IsValidAspectRatio(aspect));
  631. return QSize(size.width() * av_q2d(aspect), size.height());
  632. }
  633. bool RotationSwapWidthHeight(int rotation) {
  634. return (rotation == 90 || rotation == 270);
  635. }
  636. QSize TransposeSizeByRotation(QSize size, int rotation) {
  637. return RotationSwapWidthHeight(rotation) ? size.transposed() : size;
  638. }
  639. bool GoodStorageForFrame(const QImage &storage, QSize size) {
  640. return !storage.isNull()
  641. && (storage.format() == kImageFormat)
  642. && (storage.size() == size)
  643. && storage.isDetached()
  644. && IsAlignedImage(storage);
  645. }
  646. // Create a QImage of desired size where all the data is properly aligned.
  647. QImage CreateFrameStorage(QSize size) {
  648. const auto width = size.width();
  649. const auto height = size.height();
  650. const auto widthAlign = kAlignImageBy / kPixelBytesSize;
  651. const auto neededWidth = width + ((width % widthAlign)
  652. ? (widthAlign - (width % widthAlign))
  653. : 0);
  654. const auto perLine = neededWidth * kPixelBytesSize;
  655. const auto buffer = new uchar[perLine * height + kAlignImageBy];
  656. const auto cleanupData = static_cast<void *>(buffer);
  657. const auto address = reinterpret_cast<uintptr_t>(buffer);
  658. const auto alignedBuffer = buffer + ((address % kAlignImageBy)
  659. ? (kAlignImageBy - (address % kAlignImageBy))
  660. : 0);
  661. return QImage(
  662. alignedBuffer,
  663. width,
  664. height,
  665. perLine,
  666. kImageFormat,
  667. AlignedImageBufferCleanupHandler,
  668. cleanupData);
  669. }
  670. void UnPremultiply(QImage &dst, const QImage &src) {
  671. // This creates QImage::Format_ARGB32_Premultiplied, but we use it
  672. // as an image in QImage::Format_ARGB32 format.
  673. if (!GoodStorageForFrame(dst, src.size())) {
  674. dst = CreateFrameStorage(src.size());
  675. }
  676. const auto srcPerLine = src.bytesPerLine();
  677. const auto dstPerLine = dst.bytesPerLine();
  678. const auto width = src.width();
  679. const auto height = src.height();
  680. auto srcBytes = src.bits();
  681. auto dstBytes = dst.bits();
  682. if (srcPerLine != width * 4 || dstPerLine != width * 4) {
  683. for (auto i = 0; i != height; ++i) {
  684. UnPremultiplyLine(dstBytes, srcBytes, width);
  685. srcBytes += srcPerLine;
  686. dstBytes += dstPerLine;
  687. }
  688. } else {
  689. UnPremultiplyLine(dstBytes, srcBytes, width * height);
  690. }
  691. }
  692. void PremultiplyInplace(QImage &image) {
  693. const auto perLine = image.bytesPerLine();
  694. const auto width = image.width();
  695. const auto height = image.height();
  696. auto bytes = image.bits();
  697. if (perLine != width * 4) {
  698. for (auto i = 0; i != height; ++i) {
  699. PremultiplyLine(bytes, bytes, width);
  700. bytes += perLine;
  701. }
  702. } else {
  703. PremultiplyLine(bytes, bytes, width * height);
  704. }
  705. }
  706. } // namespace FFmpeg