26static std::optional<qint64> streamDuration(
const AVStream &
stream)
28 const auto &factor =
stream.time_base;
30 if (
stream.duration > 0 && factor.num > 0 && factor.den > 0) {
31 return qint64(1000000) *
stream.duration * factor.num / factor.den;
39 <<
"is invalid. Taking it from the metadata";
42 if (
const auto duration = av_dict_get(
stream.metadata,
"DURATION",
nullptr, 0)) {
50static int streamOrientation(
const AVStream *
stream)
54 using SideDataSize =
decltype(AVPacketSideData::size);
55 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
56 const auto *sideData = streamSideData(
stream, AV_PKT_DATA_DISPLAYMATRIX);
57 if (!sideData || sideData->size < displayMatrixSize)
60 auto displayMatrix =
reinterpret_cast<const int32_t *
>(sideData->data);
61 auto rotation =
static_cast<int>(std::round(av_display_rotation_get(displayMatrix)));
63 if (rotation % 90 != 0)
65 return rotation < 0 ? -rotation % 360 : -rotation % 360 + 360;
69static bool colorTransferSupportsHdr(
const AVStream *
stream)
74 const AVCodecParameters *codecPar =
stream->codecpar;
93AVFormatContext *MediaDataHolder::avContext()
95 return m_context.get();
100 return m_currentAVStreamIndex[trackType];
107 const auto *codecPar =
stream->codecpar;
114 codecPar->codec_id)));
125 codecPar->codec_id)));
135 case AVMEDIA_TYPE_AUDIO:
137 case AVMEDIA_TYPE_VIDEO:
139 case AVMEDIA_TYPE_SUBTITLE:
147QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
148loadMedia(
const QUrl &mediaUrl,
QIODevice *
stream,
const std::shared_ptr<ICancelToken> &cancelToken)
152 AVFormatContextUPtr
context{ avformat_alloc_context() };
157 return MediaDataHolder::ContextError{
161 if (!
stream->isSequential())
164 constexpr int bufferSize = 32768;
165 unsigned char *
buffer = (
unsigned char *)av_malloc(bufferSize);
166 context->pb = avio_alloc_context(
buffer, bufferSize,
false,
stream, &readQIODevice,
nullptr,
170 AVDictionaryHolder dict;
171 constexpr auto NetworkTimeoutUs =
"5000000";
172 av_dict_set(dict,
"timeout", NetworkTimeoutUs, 0);
175 if (!protocolWhitelist.isNull())
176 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
178 context->interrupt_callback.opaque = cancelToken.get();
179 context->interrupt_callback.callback = [](
void *opaque) {
180 const auto *cancelToken =
static_cast<const ICancelToken *
>(opaque);
181 if (cancelToken && cancelToken->isCancelled())
188 AVFormatContext *contextRaw =
context.release();
189 ret = avformat_open_input(&contextRaw,
url.constData(),
nullptr, dict);
195 if (
ret == AVERROR(EACCES))
197 else if (
ret == AVERROR(EINVAL))
200 return MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") };
203 ret = avformat_find_stream_info(
context.get(),
nullptr);
205 return MediaDataHolder::ContextError{
207 QMediaPlayer::tr(
"Could not find stream information for media file")
212 av_dump_format(
context.get(), 0,
url.constData(), 0);
220 const std::shared_ptr<ICancelToken> &cancelToken)
225 return QSharedPointer<MediaDataHolder>{
new MediaDataHolder{ std::move(
context.value()), cancelToken } };
230MediaDataHolder::MediaDataHolder(AVFormatContextUPtr
context,
231 const std::shared_ptr<ICancelToken> &cancelToken)
232 : m_cancelToken{ cancelToken }
236 m_context = std::move(
context);
237 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
239 for (
unsigned int i = 0;
i < m_context->nb_streams; ++
i) {
241 const auto *
stream = m_context->streams[
i];
242 const auto trackType = trackTypeFromMediaType(
stream->codecpar->codec_type);
247 if (
stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
251 const bool isDefault =
stream->disposition & AV_DISPOSITION_DEFAULT;
254 insertMediaData(metaData, trackType,
stream);
256 if (isDefault && m_requestedStreams[trackType] < 0)
257 m_requestedStreams[trackType] = m_streamMap[trackType].size();
260 if (
auto duration = streamDuration(*
stream)) {
261 m_duration =
qMax(m_duration, *duration);
265 m_streamMap[trackType].append({ (int)
i, isDefault, metaData });
270 if (m_duration == 0 && m_context->duration > 0ll) {
271 m_duration = m_context->duration;
274 for (
auto trackType :
276 auto &requestedStream = m_requestedStreams[trackType];
277 auto &streamMap = m_streamMap[trackType];
279 if (requestedStream < 0 && !streamMap.empty())
282 if (requestedStream >= 0)
283 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
303 for (
unsigned int i = 0;
i <
context->nb_streams; ++
i) {
305 if (!
stream || !(
stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
308 const AVPacket *compressedImage = &
stream->attached_pic;
309 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
324void MediaDataHolder::updateMetaData()
334 m_context->iformat)));
337 if (!m_cachedThumbnail.has_value())
338 m_cachedThumbnail = getAttachedPicture(m_context.get());
340 if (!m_cachedThumbnail->isNull())
343 for (
auto trackType :
345 const auto streamIndex = m_currentAVStreamIndex[trackType];
346 if (streamIndex >= 0)
347 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
356 if (streamNumber < 0 || streamNumber >= m_streamMap[
type].
size())
358 if (m_requestedStreams[
type] == streamNumber)
360 m_requestedStreams[
type] = streamNumber;
361 const int avStreamIndex = m_streamMap[
type].value(streamNumber).avStreamIndex;
363 const int oldIndex = m_currentAVStreamIndex[
type];
364 qCDebug(qLcMediaDataHolder) <<
">>>>> change track" <<
type <<
"from" << oldIndex <<
"to"
368 m_currentAVStreamIndex[
type] = avStreamIndex;
380const QList<MediaDataHolder::StreamInfo> &MediaDataHolder::streamInfo(
385 return m_streamMap[trackType];