2019-02-17 15:08:29 +04:00
|
|
|
/*
|
|
|
|
This file is part of Telegram Desktop,
|
|
|
|
the official desktop application for the Telegram messaging service.
|
|
|
|
|
|
|
|
For license and copyright information please follow this link:
|
|
|
|
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|
|
|
*/
|
|
|
|
#include "media/streaming/media_streaming_video_track.h"
|
|
|
|
|
2021-06-04 13:50:41 +04:00
|
|
|
#include "ffmpeg/ffmpeg_utility.h"
|
2019-02-21 15:15:44 +04:00
|
|
|
#include "media/audio/media_audio.h"
|
2019-02-17 15:08:29 +04:00
|
|
|
#include "base/concurrent_timer.h"
|
2021-06-24 10:43:53 +04:00
|
|
|
#include "core/crash_reports.h"
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
#include "zlib.h"
|
2021-08-23 20:29:40 +03:00
|
|
|
#include <cfenv>
|
2021-08-23 12:17:40 +03:00
|
|
|
|
2021-08-23 17:58:59 +03:00
|
|
|
extern "C" {
|
|
|
|
extern int __isa_available;
|
|
|
|
}
|
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
#define TO_LOG(x) debugLog(QString x)
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
namespace Media {
|
|
|
|
namespace Streaming {
|
|
|
|
namespace {
|
|
|
|
|
2020-03-23 16:59:46 +04:00
|
|
|
constexpr auto kMaxFrameArea = 3840 * 2160; // usual 4K
|
2019-02-17 15:08:29 +04:00
|
|
|
constexpr auto kDisplaySkipped = crl::time(-1);
|
2019-03-06 17:00:03 +04:00
|
|
|
constexpr auto kFinishedPosition = std::numeric_limits<crl::time>::max();
|
2019-02-17 15:08:29 +04:00
|
|
|
static_assert(kDisplaySkipped != kTimeUnknown);
|
|
|
|
|
2021-06-04 13:50:41 +04:00
|
|
|
[[nodiscard]] QImage ConvertToARGB32(const FrameYUV420 &data) {
|
|
|
|
Expects(data.y.data != nullptr);
|
|
|
|
Expects(data.u.data != nullptr);
|
|
|
|
Expects(data.v.data != nullptr);
|
|
|
|
Expects(!data.size.isEmpty());
|
|
|
|
|
|
|
|
//if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
|
|
|
|
// resize.transpose();
|
|
|
|
//}
|
|
|
|
|
|
|
|
auto result = FFmpeg::CreateFrameStorage(data.size);
|
|
|
|
const auto swscale = FFmpeg::MakeSwscalePointer(
|
|
|
|
data.size,
|
|
|
|
AV_PIX_FMT_YUV420P,
|
|
|
|
data.size,
|
|
|
|
AV_PIX_FMT_BGRA);
|
|
|
|
if (!swscale) {
|
|
|
|
return QImage();
|
|
|
|
}
|
|
|
|
|
|
|
|
// AV_NUM_DATA_POINTERS defined in AVFrame struct
|
|
|
|
const uint8_t *srcData[AV_NUM_DATA_POINTERS] = {
|
|
|
|
static_cast<const uint8_t*>(data.y.data),
|
|
|
|
static_cast<const uint8_t*>(data.u.data),
|
|
|
|
static_cast<const uint8_t*>(data.v.data),
|
|
|
|
nullptr,
|
|
|
|
};
|
|
|
|
int srcLinesize[AV_NUM_DATA_POINTERS] = {
|
|
|
|
data.y.stride,
|
|
|
|
data.u.stride,
|
|
|
|
data.v.stride,
|
|
|
|
0,
|
|
|
|
};
|
|
|
|
uint8_t *dstData[AV_NUM_DATA_POINTERS] = { result.bits(), nullptr };
|
|
|
|
int dstLinesize[AV_NUM_DATA_POINTERS] = { result.bytesPerLine(), 0 };
|
|
|
|
|
|
|
|
sws_scale(
|
|
|
|
swscale.get(),
|
|
|
|
srcData,
|
|
|
|
srcLinesize,
|
|
|
|
0,
|
|
|
|
data.size.height(),
|
|
|
|
dstData,
|
|
|
|
dstLinesize);
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-08-24 12:42:27 +03:00
|
|
|
[[nodiscard]] float64 SafeRound(float64 value) {
|
|
|
|
Expects(!std::isnan(value));
|
|
|
|
|
|
|
|
if (const auto result = std::round(value); !std::isnan(result)) {
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
const auto errors = std::fetestexcept(FE_ALL_EXCEPT);
|
|
|
|
LOG(("Streaming Error: Got NAN in std::round(%1), fe: %2."
|
|
|
|
).arg(value
|
|
|
|
).arg(errors));
|
|
|
|
if (const auto result = std::round(value); !std::isnan(result)) {
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
std::feclearexcept(FE_ALL_EXCEPT);
|
|
|
|
if (const auto result = std::round(value); !std::isnan(result)) {
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
CrashReports::SetAnnotation("FE-Error-Value", QString::number(value));
|
|
|
|
CrashReports::SetAnnotation("FE-Errors-Were", QString::number(errors));
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"FE-Errors-Now",
|
|
|
|
QString::number(std::fetestexcept(FE_ALL_EXCEPT)));
|
|
|
|
Unexpected("NAN after third std::round.");
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
} // namespace
|
|
|
|
|
|
|
|
class VideoTrackObject final {
|
|
|
|
public:
|
|
|
|
using Frame = VideoTrack::Frame;
|
|
|
|
using Shared = VideoTrack::Shared;
|
|
|
|
|
|
|
|
VideoTrackObject(
|
|
|
|
crl::weak_on_queue<VideoTrackObject> weak,
|
2019-02-21 13:17:25 +04:00
|
|
|
const PlaybackOptions &options,
|
2019-02-17 15:08:29 +04:00
|
|
|
not_null<Shared*> shared,
|
|
|
|
Stream &&stream,
|
2019-02-21 15:15:44 +04:00
|
|
|
const AudioMsgId &audioId,
|
2019-02-17 15:08:29 +04:00
|
|
|
FnMut<void(const Information &)> ready,
|
2019-03-05 17:56:27 +04:00
|
|
|
Fn<void(Error)> error);
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2019-12-19 18:14:05 +03:00
|
|
|
void process(std::vector<FFmpeg::Packet> &&packets);
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2020-05-12 11:06:44 +04:00
|
|
|
[[nodiscard]] rpl::producer<> checkNextFrame() const;
|
|
|
|
[[nodiscard]] rpl::producer<> waitingForData() const;
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2019-02-21 18:57:00 +04:00
|
|
|
void pause(crl::time time);
|
|
|
|
void resume(crl::time time);
|
2019-02-21 20:01:55 +04:00
|
|
|
void setSpeed(float64 speed);
|
2019-12-19 13:50:33 +03:00
|
|
|
void setWaitForMarkAsShown(bool wait);
|
2019-02-17 15:08:29 +04:00
|
|
|
void interrupt();
|
2019-12-11 17:01:11 +03:00
|
|
|
void frameShown();
|
|
|
|
void addTimelineDelay(crl::time delayed);
|
2019-12-12 09:56:08 +03:00
|
|
|
void updateFrameRequest(
|
|
|
|
const Instance *instance,
|
|
|
|
const FrameRequest &request);
|
|
|
|
void removeFrameRequest(const Instance *instance);
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2020-07-17 22:59:25 +04:00
|
|
|
void rasterizeFrame(not_null<Frame*> frame);
|
2021-06-03 16:57:48 +04:00
|
|
|
[[nodiscard]] bool requireARGB32() const;
|
2020-07-17 22:59:25 +04:00
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
private:
|
2019-03-05 11:40:25 +04:00
|
|
|
enum class FrameResult {
|
|
|
|
Done,
|
|
|
|
Error,
|
|
|
|
Waiting,
|
|
|
|
Looped,
|
|
|
|
Finished,
|
|
|
|
};
|
2020-08-31 13:04:17 +04:00
|
|
|
using ReadEnoughState = std::variant<
|
|
|
|
v::null_t,
|
2019-03-06 17:00:03 +04:00
|
|
|
FrameResult,
|
|
|
|
Shared::PrepareNextCheck>;
|
|
|
|
|
2019-03-13 18:58:50 +04:00
|
|
|
void fail(Error error);
|
2019-02-17 15:08:29 +04:00
|
|
|
[[nodiscard]] bool interrupted() const;
|
2019-06-26 17:04:38 +02:00
|
|
|
[[nodiscard]] bool tryReadFirstFrame(FFmpeg::Packet &&packet);
|
2019-02-17 15:08:29 +04:00
|
|
|
[[nodiscard]] bool fillStateFromFrame();
|
2019-02-22 16:39:32 +04:00
|
|
|
[[nodiscard]] bool processFirstFrame();
|
2019-02-17 15:08:29 +04:00
|
|
|
void queueReadFrames(crl::time delay = 0);
|
|
|
|
void readFrames();
|
2019-03-06 17:00:03 +04:00
|
|
|
[[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime);
|
2019-03-05 11:40:25 +04:00
|
|
|
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
|
2019-12-12 09:56:08 +03:00
|
|
|
void fillRequests(not_null<Frame*> frame) const;
|
|
|
|
[[nodiscard]] QSize chooseOriginalResize() const;
|
2019-02-17 15:08:29 +04:00
|
|
|
void presentFrameIfNeeded();
|
|
|
|
void callReady();
|
2019-03-13 18:58:50 +04:00
|
|
|
[[nodiscard]] bool loopAround();
|
|
|
|
[[nodiscard]] crl::time computeDuration() const;
|
2019-06-26 17:04:38 +02:00
|
|
|
[[nodiscard]] int durationByPacket(const FFmpeg::Packet &packet);
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
// Force frame position to be clamped to [0, duration] and monotonic.
|
|
|
|
[[nodiscard]] crl::time currentFramePosition() const;
|
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
[[nodiscard]] TimePoint trackTime() const;
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2021-08-21 09:33:20 +03:00
|
|
|
void debugAssertKnownTime(int step, crl::time time) const;
|
2021-08-23 12:17:40 +03:00
|
|
|
void debugLog(const QString &entry) const;
|
2021-08-21 09:33:20 +03:00
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
const crl::weak_on_queue<VideoTrackObject> _weak;
|
2019-02-21 20:01:55 +04:00
|
|
|
PlaybackOptions _options;
|
2019-02-17 15:08:29 +04:00
|
|
|
|
|
|
|
// Main thread wrapper destructor will set _shared back to nullptr.
|
|
|
|
// All queued method calls after that should be discarded.
|
|
|
|
Shared *_shared = nullptr;
|
|
|
|
|
|
|
|
Stream _stream;
|
2019-02-21 15:15:44 +04:00
|
|
|
AudioMsgId _audioId;
|
2019-03-13 18:58:50 +04:00
|
|
|
bool _readTillEnd = false;
|
2019-02-17 15:08:29 +04:00
|
|
|
FnMut<void(const Information &)> _ready;
|
2019-03-05 17:56:27 +04:00
|
|
|
Fn<void(Error)> _error;
|
2019-02-21 18:57:00 +04:00
|
|
|
crl::time _pausedTime = kTimeUnknown;
|
|
|
|
crl::time _resumedTime = kTimeUnknown;
|
2019-03-13 18:58:50 +04:00
|
|
|
int _durationByLastPacket = 0;
|
2019-02-21 20:01:55 +04:00
|
|
|
mutable TimePoint _syncTimePoint;
|
2019-03-13 18:58:50 +04:00
|
|
|
crl::time _loopingShift = 0;
|
2019-03-07 17:23:19 +04:00
|
|
|
rpl::event_stream<> _checkNextFrame;
|
2019-02-22 18:28:10 +04:00
|
|
|
rpl::event_stream<> _waitingForData;
|
2019-12-12 09:56:08 +03:00
|
|
|
base::flat_map<const Instance*, FrameRequest> _requests;
|
2019-02-17 15:08:29 +04:00
|
|
|
|
|
|
|
bool _queued = false;
|
|
|
|
base::ConcurrentTimer _readFramesTimer;
|
|
|
|
|
2019-03-13 16:02:59 +04:00
|
|
|
// For initial frame skipping for an exact seek.
|
2019-06-26 17:04:38 +02:00
|
|
|
FFmpeg::FramePointer _initialSkippingFrame;
|
2019-03-13 16:02:59 +04:00
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
mutable QStringList _debugLog;
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
};
|
|
|
|
|
|
|
|
VideoTrackObject::VideoTrackObject(
|
|
|
|
crl::weak_on_queue<VideoTrackObject> weak,
|
2019-02-21 13:17:25 +04:00
|
|
|
const PlaybackOptions &options,
|
2019-02-17 15:08:29 +04:00
|
|
|
not_null<Shared*> shared,
|
|
|
|
Stream &&stream,
|
2019-02-21 15:15:44 +04:00
|
|
|
const AudioMsgId &audioId,
|
2019-02-17 15:08:29 +04:00
|
|
|
FnMut<void(const Information &)> ready,
|
2019-03-05 17:56:27 +04:00
|
|
|
Fn<void(Error)> error)
|
2019-02-17 15:08:29 +04:00
|
|
|
: _weak(std::move(weak))
|
2019-02-21 13:17:25 +04:00
|
|
|
, _options(options)
|
2019-02-17 15:08:29 +04:00
|
|
|
, _shared(shared)
|
|
|
|
, _stream(std::move(stream))
|
2019-02-21 15:15:44 +04:00
|
|
|
, _audioId(audioId)
|
2019-02-17 15:08:29 +04:00
|
|
|
, _ready(std::move(ready))
|
|
|
|
, _error(std::move(error))
|
|
|
|
, _readFramesTimer(_weak, [=] { readFrames(); }) {
|
2019-03-05 11:40:25 +04:00
|
|
|
Expects(_stream.duration > 1);
|
2019-02-17 15:08:29 +04:00
|
|
|
Expects(_ready != nullptr);
|
|
|
|
Expects(_error != nullptr);
|
2021-08-23 12:17:40 +03:00
|
|
|
|
|
|
|
TO_LOG(("created,speed:%1,mode:%2,position:%3,sync:%4,"
|
2021-08-23 17:58:59 +03:00
|
|
|
"loop:%5,wait:%6,duration:%7,initialized:%8,isa:%9"
|
2021-08-23 12:17:40 +03:00
|
|
|
).arg(options.speed
|
|
|
|
).arg(int(options.mode)
|
|
|
|
).arg(options.position
|
|
|
|
).arg(options.syncVideoByAudio ? "true" : "false"
|
|
|
|
).arg(options.loop ? "true" : "false"
|
|
|
|
).arg(options.waitForMarkAsShown ? "true" : "false"
|
|
|
|
).arg(_stream.duration
|
2021-08-23 17:58:59 +03:00
|
|
|
).arg(_shared->initialized() ? "true" : "false"
|
|
|
|
).arg(__isa_available));
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
rpl::producer<> VideoTrackObject::checkNextFrame() const {
|
2019-02-22 15:58:26 +04:00
|
|
|
return interrupted()
|
2019-03-07 17:23:19 +04:00
|
|
|
? (rpl::complete<>() | rpl::type_erased())
|
|
|
|
: !_shared->firstPresentHappened()
|
|
|
|
? (_checkNextFrame.events() | rpl::type_erased())
|
|
|
|
: _checkNextFrame.events_starting_with({});
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-02-22 18:28:10 +04:00
|
|
|
rpl::producer<> VideoTrackObject::waitingForData() const {
|
2019-03-01 18:41:10 +04:00
|
|
|
return interrupted()
|
|
|
|
? (rpl::never() | rpl::type_erased())
|
|
|
|
: _waitingForData.events();
|
2019-02-22 18:28:10 +04:00
|
|
|
}
|
|
|
|
|
2019-12-19 18:14:05 +03:00
|
|
|
void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
|
|
|
|
if (interrupted() || packets.empty()) {
|
2019-02-17 15:08:29 +04:00
|
|
|
return;
|
2019-02-21 20:01:55 +04:00
|
|
|
}
|
2019-12-19 18:14:05 +03:00
|
|
|
if (packets.front().empty()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("process,packets:%1,till_end").arg(packets.size()));
|
2019-12-19 18:14:05 +03:00
|
|
|
Assert(packets.size() == 1);
|
2019-03-13 18:58:50 +04:00
|
|
|
_readTillEnd = true;
|
|
|
|
} else if (!_readTillEnd) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("process,packets:%1,till:%2").arg(packets.size()).arg(durationByPacket(packets.back())));
|
2019-12-19 18:14:05 +03:00
|
|
|
//for (const auto &packet : packets) {
|
|
|
|
// // Maybe it is enough to count by list.back()?.. hope so.
|
|
|
|
// accumulate_max(
|
|
|
|
// _durationByLastPacket,
|
|
|
|
// durationByPacket(packet));
|
|
|
|
// if (interrupted()) {
|
|
|
|
// return;
|
|
|
|
// }
|
|
|
|
//}
|
2019-03-13 18:58:50 +04:00
|
|
|
accumulate_max(
|
|
|
|
_durationByLastPacket,
|
2019-12-19 18:14:05 +03:00
|
|
|
durationByPacket(packets.back()));
|
2019-03-13 18:58:50 +04:00
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2019-12-19 18:14:05 +03:00
|
|
|
for (auto i = begin(packets), e = end(packets); i != e; ++i) {
|
|
|
|
if (_shared->initialized()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("queueing_packets,count:%1").arg(e - i));
|
2019-12-19 18:14:05 +03:00
|
|
|
_stream.queue.insert(
|
|
|
|
end(_stream.queue),
|
|
|
|
std::make_move_iterator(i),
|
|
|
|
std::make_move_iterator(e));
|
|
|
|
queueReadFrames();
|
|
|
|
break;
|
|
|
|
} else if (!tryReadFirstFrame(std::move(*i))) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("fail_read_first_frame"));
|
2019-12-19 18:14:05 +03:00
|
|
|
fail(Error::InvalidData);
|
|
|
|
break;
|
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-26 17:04:38 +02:00
|
|
|
int VideoTrackObject::durationByPacket(const FFmpeg::Packet &packet) {
|
2019-03-13 18:58:50 +04:00
|
|
|
// We've set this value on the first cycle.
|
|
|
|
if (_loopingShift || _stream.duration != kDurationUnavailable) {
|
|
|
|
return 0;
|
|
|
|
}
|
2019-06-26 17:04:38 +02:00
|
|
|
const auto result = FFmpeg::DurationByPacket(packet, _stream.timeBase);
|
2019-03-13 18:58:50 +04:00
|
|
|
if (result < 0) {
|
|
|
|
fail(Error::InvalidData);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ensures(result > 0);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
void VideoTrackObject::queueReadFrames(crl::time delay) {
|
|
|
|
if (delay > 0) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("queue_with_delay:%1").arg(delay));
|
2019-02-17 15:08:29 +04:00
|
|
|
_readFramesTimer.callOnce(delay);
|
|
|
|
} else if (!_queued) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("queue_without_delay"));
|
2019-02-17 15:08:29 +04:00
|
|
|
_queued = true;
|
|
|
|
_weak.with([](VideoTrackObject &that) {
|
2021-08-23 12:17:40 +03:00
|
|
|
that.TO_LOG(("unqueued_without_delay"));
|
2019-02-17 15:08:29 +04:00
|
|
|
that._queued = false;
|
|
|
|
that.readFrames();
|
|
|
|
});
|
2021-08-23 12:17:40 +03:00
|
|
|
} else {
|
|
|
|
TO_LOG(("queued_already"));
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-21 09:33:20 +03:00
|
|
|
void VideoTrackObject::debugAssertKnownTime(int step, crl::time time) const {
|
|
|
|
if (time < kTimeUnknown / 2) {
|
|
|
|
CrashReports::SetAnnotation("DebugStep", QString::number(step));
|
|
|
|
CrashReports::SetAnnotation("CheckedValue", QString::number(time));
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"_syncTimePoint.trackTime",
|
|
|
|
QString::number(_syncTimePoint.trackTime));
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"_syncTimePoint.worldTime",
|
|
|
|
QString::number(_syncTimePoint.worldTime));
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"_pausedTime",
|
|
|
|
QString::number(_pausedTime));
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"_resumedTime",
|
|
|
|
QString::number(_resumedTime));
|
|
|
|
if (!_shared) {
|
|
|
|
CrashReports::SetAnnotation("_shared", "NULL");
|
|
|
|
} else {
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"_shared->initialized",
|
|
|
|
_shared->initialized() ? "true" : "false");
|
|
|
|
}
|
|
|
|
CrashReports::SetAnnotation("Now", QString::number(crl::now()));
|
2021-08-23 12:17:40 +03:00
|
|
|
|
|
|
|
const auto log = _debugLog.join('\n').toUtf8();
|
|
|
|
const auto compressed = [&] {
|
|
|
|
auto result = QByteArray(log.size() + 1024, char(0));
|
|
|
|
z_stream stream;
|
|
|
|
stream.zalloc = nullptr;
|
|
|
|
stream.zfree = nullptr;
|
|
|
|
stream.opaque = nullptr;
|
|
|
|
stream.avail_in = log.size();
|
|
|
|
stream.next_in = reinterpret_cast<Bytef*>(const_cast<char*>(log.data()));
|
|
|
|
stream.avail_out = result.size();
|
|
|
|
stream.next_out = reinterpret_cast<Bytef*>(result.data());
|
|
|
|
int res = deflateInit2(&stream, Z_BEST_COMPRESSION, Z_DEFLATED, -MAX_WBITS, MAX_MEM_LEVEL, Z_DEFAULT_STRATEGY);
|
|
|
|
if (res != Z_OK) {
|
|
|
|
CrashReports::SetAnnotation("Log", "deflatInit2:" + QString::number(res));
|
|
|
|
return QByteArray();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
const auto guard = gsl::finally([&] { deflateEnd(&stream); });
|
|
|
|
int res = deflate(&stream, Z_FINISH);
|
|
|
|
if (res != Z_OK && res != Z_STREAM_END) {
|
|
|
|
CrashReports::SetAnnotation("Log", "deflate:" + QString::number(res));
|
|
|
|
return QByteArray();
|
|
|
|
} else if (!stream.avail_out) {
|
|
|
|
CrashReports::SetAnnotation("Log", "deflate:no_avail_out");
|
|
|
|
return QByteArray();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
result.resize(result.size() - stream.avail_out);
|
|
|
|
return result;
|
|
|
|
}();
|
|
|
|
if (!compressed.isEmpty()) {
|
|
|
|
CrashReports::SetAnnotation("Log", QString::fromUtf8(compressed.toBase64()));
|
|
|
|
}
|
|
|
|
|
2021-08-21 09:33:20 +03:00
|
|
|
Unexpected("Bad time value.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
void VideoTrackObject::debugLog(const QString &entry) const {
|
|
|
|
constexpr auto kMaxEntries = 2048;
|
|
|
|
if (_debugLog.size() >= kMaxEntries) {
|
|
|
|
return;
|
|
|
|
} else if (_debugLog.size() == kMaxEntries - 1) {
|
|
|
|
_debugLog.push_back("...");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
_debugLog.push_back("stp.worldTime:"
|
|
|
|
+ QString::number(_syncTimePoint.worldTime)
|
|
|
|
+ ";stp.trackTime:" + QString::number(_syncTimePoint.trackTime)
|
2021-08-24 12:42:27 +03:00
|
|
|
+ ";fe:" + QString::number(std::fetestexcept(FE_ALL_EXCEPT))
|
2021-08-23 12:17:40 +03:00
|
|
|
+ ";" + entry);
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
void VideoTrackObject::readFrames() {
|
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
2019-03-05 11:40:25 +04:00
|
|
|
auto time = trackTime().trackTime;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("reading_frames,time:%1").arg(time));
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(1, time);
|
2019-03-06 17:00:03 +04:00
|
|
|
while (true) {
|
|
|
|
const auto result = readEnoughFrames(time);
|
2020-08-31 13:04:17 +04:00
|
|
|
v::match(result, [&](FrameResult result) {
|
2019-03-06 17:00:03 +04:00
|
|
|
if (result == FrameResult::Done
|
|
|
|
|| result == FrameResult::Finished) {
|
|
|
|
presentFrameIfNeeded();
|
|
|
|
} else if (result == FrameResult::Looped) {
|
2019-03-13 18:58:50 +04:00
|
|
|
const auto duration = computeDuration();
|
|
|
|
Assert(duration != kDurationUnavailable);
|
|
|
|
time -= duration;
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(2, time);
|
2019-03-06 17:00:03 +04:00
|
|
|
}
|
|
|
|
}, [&](Shared::PrepareNextCheck delay) {
|
|
|
|
Expects(delay == kTimeUnknown || delay > 0);
|
2021-06-24 10:43:53 +04:00
|
|
|
|
2019-03-06 17:00:03 +04:00
|
|
|
if (delay != kTimeUnknown) {
|
|
|
|
queueReadFrames(delay);
|
|
|
|
}
|
2020-08-31 12:14:53 +04:00
|
|
|
}, [](v::null_t) {
|
2019-03-06 17:00:03 +04:00
|
|
|
});
|
2020-08-31 13:04:17 +04:00
|
|
|
if (!v::is_null(result)) {
|
2019-03-06 17:00:03 +04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
|
|
|
|
-> ReadEnoughState {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("reading_enough_frames,time:%1").arg(trackTime));
|
2019-12-11 17:01:11 +03:00
|
|
|
const auto dropStaleFrames = !_options.waitForMarkAsShown;
|
2019-03-06 17:00:03 +04:00
|
|
|
const auto state = _shared->prepareState(trackTime, dropStaleFrames);
|
2021-06-24 10:43:53 +04:00
|
|
|
return v::match(state, [&](Shared::PrepareFrame frame)
|
|
|
|
-> ReadEnoughState {
|
2019-03-05 11:40:25 +04:00
|
|
|
while (true) {
|
|
|
|
const auto result = readFrame(frame);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (result != FrameResult::Done) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_enough_state,frame_result:%1").arg(int(result)));
|
2019-03-06 17:00:03 +04:00
|
|
|
return result;
|
2019-03-05 11:40:25 +04:00
|
|
|
} else if (!dropStaleFrames
|
2019-03-06 17:00:03 +04:00
|
|
|
|| !VideoTrack::IsStale(frame, trackTime)) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_enough_state,null"));
|
2020-08-31 12:14:53 +04:00
|
|
|
return v::null;
|
2019-02-27 15:36:19 +04:00
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-03-06 17:00:03 +04:00
|
|
|
}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_enough_state,delay:%1").arg(delay));
|
2021-06-24 10:43:53 +04:00
|
|
|
Expects(delay == kTimeUnknown || delay > 0); // Debugging crash.
|
|
|
|
|
2019-03-06 17:00:03 +04:00
|
|
|
return delay;
|
2020-08-31 12:14:53 +04:00
|
|
|
}, [&](v::null_t) -> ReadEnoughState {
|
2019-03-06 17:00:03 +04:00
|
|
|
return FrameResult::Done;
|
2019-02-17 15:08:29 +04:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-03-13 18:58:50 +04:00
|
|
|
bool VideoTrackObject::loopAround() {
|
|
|
|
const auto duration = computeDuration();
|
|
|
|
if (duration == kDurationUnavailable) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("loop_around,error_duration_unavailable"));
|
2019-03-13 18:58:50 +04:00
|
|
|
LOG(("Streaming Error: "
|
|
|
|
"Couldn't find out the real video stream duration."));
|
|
|
|
return false;
|
|
|
|
}
|
2019-03-05 11:40:25 +04:00
|
|
|
avcodec_flush_buffers(_stream.codec.get());
|
2019-03-13 18:58:50 +04:00
|
|
|
_loopingShift += duration;
|
|
|
|
_readTillEnd = false;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("loop_around,duration:%1,shift:%2").arg(duration).arg(_loopingShift));
|
2019-03-13 18:58:50 +04:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
crl::time VideoTrackObject::computeDuration() const {
|
|
|
|
if (_stream.duration != kDurationUnavailable) {
|
|
|
|
return _stream.duration;
|
|
|
|
} else if ((_loopingShift || _readTillEnd) && _durationByLastPacket) {
|
|
|
|
// We looped, so it already holds full stream duration.
|
|
|
|
return _durationByLastPacket;
|
|
|
|
}
|
|
|
|
return kDurationUnavailable;
|
2019-03-05 11:40:25 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
|
2019-02-17 15:08:29 +04:00
|
|
|
if (const auto error = ReadNextFrame(_stream)) {
|
|
|
|
if (error.code() == AVERROR_EOF) {
|
2019-03-13 18:58:50 +04:00
|
|
|
if (!_options.loop) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_eof"));
|
2019-03-06 17:00:03 +04:00
|
|
|
frame->position = kFinishedPosition;
|
|
|
|
frame->displayed = kTimeUnknown;
|
2019-03-05 11:40:25 +04:00
|
|
|
return FrameResult::Finished;
|
2019-03-13 18:58:50 +04:00
|
|
|
} else if (loopAround()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_looped"));
|
2019-03-13 18:58:50 +04:00
|
|
|
return FrameResult::Looped;
|
|
|
|
} else {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_bad_data"));
|
2019-03-13 18:58:50 +04:00
|
|
|
fail(Error::InvalidData);
|
|
|
|
return FrameResult::Error;
|
2019-03-05 11:40:25 +04:00
|
|
|
}
|
2019-03-13 18:58:50 +04:00
|
|
|
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_bad_data:%1,till_end:%2").arg(error.code()).arg(_readTillEnd ? "true" : "false"));
|
2019-03-13 18:58:50 +04:00
|
|
|
fail(Error::InvalidData);
|
2019-03-05 11:40:25 +04:00
|
|
|
return FrameResult::Error;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_again"));
|
2019-03-05 11:40:25 +04:00
|
|
|
Assert(_stream.queue.empty());
|
|
|
|
_waitingForData.fire({});
|
|
|
|
return FrameResult::Waiting;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-02-20 17:28:48 +04:00
|
|
|
const auto position = currentFramePosition();
|
2019-02-17 15:08:29 +04:00
|
|
|
if (position == kTimeUnknown) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,error_current_position:%1").arg(position));
|
2019-03-13 18:58:50 +04:00
|
|
|
fail(Error::InvalidData);
|
2019-03-05 11:40:25 +04:00
|
|
|
return FrameResult::Error;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-03-01 01:03:25 +04:00
|
|
|
std::swap(frame->decoded, _stream.frame);
|
2019-02-17 15:08:29 +04:00
|
|
|
frame->position = position;
|
|
|
|
frame->displayed = kTimeUnknown;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("read_frame,current_position:%1").arg(position));
|
2019-03-05 11:40:25 +04:00
|
|
|
return FrameResult::Done;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-12-12 09:56:08 +03:00
|
|
|
void VideoTrackObject::fillRequests(not_null<Frame*> frame) const {
|
|
|
|
auto i = frame->prepared.begin();
|
|
|
|
for (const auto &[instance, request] : _requests) {
|
|
|
|
while (i != frame->prepared.end() && i->first < instance) {
|
|
|
|
i = frame->prepared.erase(i);
|
|
|
|
}
|
|
|
|
if (i == frame->prepared.end() || i->first > instance) {
|
|
|
|
i = frame->prepared.emplace(instance, request).first;
|
|
|
|
}
|
|
|
|
++i;
|
|
|
|
}
|
|
|
|
while (i != frame->prepared.end()) {
|
|
|
|
i = frame->prepared.erase(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
QSize VideoTrackObject::chooseOriginalResize() const {
|
|
|
|
auto chosen = QSize();
|
|
|
|
for (const auto &[_, request] : _requests) {
|
2019-12-23 11:10:11 +03:00
|
|
|
if (request.resize.isEmpty()) {
|
|
|
|
return QSize();
|
|
|
|
}
|
2019-12-12 09:56:08 +03:00
|
|
|
const auto byWidth = (request.resize.width() >= chosen.width());
|
|
|
|
const auto byHeight = (request.resize.height() >= chosen.height());
|
|
|
|
if (byWidth && byHeight) {
|
|
|
|
chosen = request.resize;
|
|
|
|
} else if (byWidth || byHeight) {
|
|
|
|
return QSize();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return chosen;
|
|
|
|
}
|
|
|
|
|
2021-06-03 16:57:48 +04:00
|
|
|
bool VideoTrackObject::requireARGB32() const {
|
|
|
|
for (const auto &[_, request] : _requests) {
|
|
|
|
if (!request.requireARGB32) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-07-17 22:59:25 +04:00
|
|
|
void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
|
|
|
|
Expects(frame->position != kFinishedPosition);
|
|
|
|
|
|
|
|
fillRequests(frame);
|
2021-06-03 16:57:48 +04:00
|
|
|
frame->format = FrameFormat::None;
|
|
|
|
if (frame->decoded->format == AV_PIX_FMT_YUV420P && !requireARGB32()) {
|
|
|
|
frame->alpha = false;
|
|
|
|
frame->yuv420 = ExtractYUV420(_stream, frame->decoded.get());
|
|
|
|
if (frame->yuv420.size.isEmpty()
|
|
|
|
|| frame->yuv420.chromaSize.isEmpty()
|
|
|
|
|| !frame->yuv420.y.data
|
|
|
|
|| !frame->yuv420.u.data
|
|
|
|
|| !frame->yuv420.v.data) {
|
|
|
|
frame->prepared.clear();
|
|
|
|
fail(Error::InvalidData);
|
|
|
|
return;
|
|
|
|
}
|
2021-06-04 13:50:41 +04:00
|
|
|
if (!frame->original.isNull()) {
|
|
|
|
frame->original = QImage();
|
|
|
|
for (auto &[_, prepared] : frame->prepared) {
|
|
|
|
prepared.image = QImage();
|
|
|
|
}
|
|
|
|
}
|
2021-06-03 16:57:48 +04:00
|
|
|
frame->format = FrameFormat::YUV420;
|
|
|
|
} else {
|
|
|
|
frame->alpha = (frame->decoded->format == AV_PIX_FMT_BGRA);
|
|
|
|
frame->yuv420.size = {
|
|
|
|
frame->decoded->width,
|
|
|
|
frame->decoded->height
|
|
|
|
};
|
|
|
|
frame->original = ConvertFrame(
|
|
|
|
_stream,
|
|
|
|
frame->decoded.get(),
|
|
|
|
chooseOriginalResize(),
|
|
|
|
std::move(frame->original));
|
|
|
|
if (frame->original.isNull()) {
|
|
|
|
frame->prepared.clear();
|
|
|
|
fail(Error::InvalidData);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
frame->format = FrameFormat::ARGB32;
|
2019-02-21 18:57:00 +04:00
|
|
|
}
|
2019-02-27 15:36:19 +04:00
|
|
|
|
2020-07-17 22:59:25 +04:00
|
|
|
VideoTrack::PrepareFrameByRequests(frame, _stream.rotation);
|
2019-02-27 15:36:19 +04:00
|
|
|
|
2020-07-17 22:59:25 +04:00
|
|
|
Ensures(VideoTrack::IsRasterized(frame));
|
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::presentFrameIfNeeded() {
|
|
|
|
if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
|
|
|
|
return;
|
|
|
|
}
|
2019-12-11 17:01:11 +03:00
|
|
|
const auto dropStaleFrames = !_options.waitForMarkAsShown;
|
2021-08-23 12:17:40 +03:00
|
|
|
const auto time = trackTime();
|
|
|
|
TO_LOG(("present_frame_check,world:%1,track:%2").arg(time.worldTime).arg(time.trackTime));
|
2019-03-06 17:00:03 +04:00
|
|
|
const auto presented = _shared->presentFrame(
|
2020-07-17 22:59:25 +04:00
|
|
|
this,
|
2021-08-23 12:17:40 +03:00
|
|
|
time,
|
2019-03-07 17:23:19 +04:00
|
|
|
_options.speed,
|
2020-07-17 22:59:25 +04:00
|
|
|
dropStaleFrames);
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("present_frame_check,add:%1,position:%2,next:%3").arg(presented.addedWorldTimeDelay).arg(presented.displayPosition).arg(presented.nextCheckDelay));
|
2019-12-11 17:01:11 +03:00
|
|
|
addTimelineDelay(presented.addedWorldTimeDelay);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (presented.displayPosition == kFinishedPosition) {
|
|
|
|
interrupt();
|
2019-03-07 17:23:19 +04:00
|
|
|
_checkNextFrame = rpl::event_stream<>();
|
2019-03-06 17:00:03 +04:00
|
|
|
return;
|
|
|
|
} else if (presented.displayPosition != kTimeUnknown) {
|
2019-03-07 17:23:19 +04:00
|
|
|
_checkNextFrame.fire({});
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-03-06 17:00:03 +04:00
|
|
|
if (presented.nextCheckDelay != kTimeUnknown) {
|
|
|
|
Assert(presented.nextCheckDelay >= 0);
|
|
|
|
queueReadFrames(presented.nextCheckDelay);
|
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-02-21 18:57:00 +04:00
|
|
|
void VideoTrackObject::pause(crl::time time) {
|
2019-02-21 20:01:55 +04:00
|
|
|
Expects(_syncTimePoint.valid());
|
2019-02-21 18:57:00 +04:00
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("pause,time:%1,paused:%2").arg(time).arg(_pausedTime));
|
2019-02-21 20:01:55 +04:00
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
} else if (_pausedTime == kTimeUnknown) {
|
2019-02-21 18:57:00 +04:00
|
|
|
_pausedTime = time;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::resume(crl::time time) {
|
2019-02-21 20:01:55 +04:00
|
|
|
Expects(_syncTimePoint.trackTime != kTimeUnknown);
|
|
|
|
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("resume,time:%1,paused:%2,resumed:%3").arg(time).arg(_pausedTime).arg(_resumedTime));
|
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
2019-02-21 18:57:00 +04:00
|
|
|
|
|
|
|
// Resumed time used to validate sync to audio.
|
|
|
|
_resumedTime = time;
|
|
|
|
if (_pausedTime != kTimeUnknown) {
|
|
|
|
Assert(_pausedTime <= time);
|
2019-02-21 20:01:55 +04:00
|
|
|
_syncTimePoint.worldTime += (time - _pausedTime);
|
2019-02-21 18:57:00 +04:00
|
|
|
_pausedTime = kTimeUnknown;
|
|
|
|
} else {
|
2019-02-21 20:01:55 +04:00
|
|
|
_syncTimePoint.worldTime = time;
|
2019-02-21 18:57:00 +04:00
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
queueReadFrames();
|
2019-02-21 18:57:00 +04:00
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
Ensures(_syncTimePoint.valid());
|
|
|
|
Ensures(_pausedTime == kTimeUnknown);
|
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::setSpeed(float64 speed) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("set_speed,speed:%1").arg(speed));
|
2019-02-21 20:01:55 +04:00
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (_syncTimePoint.valid()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
const auto time = trackTime();
|
|
|
|
TO_LOG(("set_speed_changing_time,world:%1,track:%2").arg(time.worldTime).arg(time.trackTime));
|
|
|
|
_syncTimePoint = time;
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(3, _syncTimePoint.trackTime);
|
2019-02-21 20:01:55 +04:00
|
|
|
}
|
|
|
|
_options.speed = speed;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-12-19 13:50:33 +03:00
|
|
|
void VideoTrackObject::setWaitForMarkAsShown(bool wait) {
|
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("set_wait_for_mark,wait:%1").arg(wait ? "true" : "false"));
|
2019-12-19 13:50:33 +03:00
|
|
|
_options.waitForMarkAsShown = wait;
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
bool VideoTrackObject::interrupted() const {
|
2021-08-23 12:17:40 +03:00
|
|
|
if (_shared == nullptr) {
|
|
|
|
TO_LOG(("interruped_true"));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-12-11 17:01:11 +03:00
|
|
|
void VideoTrackObject::frameShown() {
|
2019-02-21 20:01:55 +04:00
|
|
|
if (interrupted()) {
|
|
|
|
return;
|
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("frame_shown"));
|
2019-02-17 15:08:29 +04:00
|
|
|
queueReadFrames();
|
|
|
|
}
|
|
|
|
|
2019-12-11 17:01:11 +03:00
|
|
|
void VideoTrackObject::addTimelineDelay(crl::time delayed) {
|
|
|
|
Expects(_syncTimePoint.valid());
|
|
|
|
|
|
|
|
if (!delayed) {
|
|
|
|
return;
|
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("adding_delay:%1").arg(delayed));
|
2019-12-11 17:01:11 +03:00
|
|
|
_syncTimePoint.worldTime += delayed;
|
|
|
|
}
|
|
|
|
|
2019-12-12 09:56:08 +03:00
|
|
|
void VideoTrackObject::updateFrameRequest(
|
|
|
|
const Instance *instance,
|
|
|
|
const FrameRequest &request) {
|
2021-05-18 14:15:11 +04:00
|
|
|
_requests[instance] = request;
|
2019-12-12 09:56:08 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::removeFrameRequest(const Instance *instance) {
|
|
|
|
_requests.remove(instance);
|
2019-02-27 15:36:19 +04:00
|
|
|
}
|
|
|
|
|
2019-06-26 17:04:38 +02:00
|
|
|
bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
|
2019-02-17 15:08:29 +04:00
|
|
|
if (ProcessPacket(_stream, std::move(packet)).failed()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_process_failed"));
|
2019-02-17 15:08:29 +04:00
|
|
|
return false;
|
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame"));
|
2019-03-13 16:02:59 +04:00
|
|
|
while (true) {
|
|
|
|
if (const auto error = ReadNextFrame(_stream)) {
|
|
|
|
if (error.code() == AVERROR_EOF) {
|
|
|
|
if (!_initialSkippingFrame) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_eof_bad"));
|
2019-03-13 16:02:59 +04:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
// Return the last valid frame if we seek too far.
|
|
|
|
_stream.frame = std::move(_initialSkippingFrame);
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_eof_to_initial"));
|
2019-03-13 16:02:59 +04:00
|
|
|
return processFirstFrame();
|
2019-03-13 18:58:50 +04:00
|
|
|
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_error,end:%1").arg(_readTillEnd ? "true" : "false"));
|
2019-03-13 16:02:59 +04:00
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
// Waiting for more packets.
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_waiting"));
|
2019-03-13 16:02:59 +04:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
} else if (!fillStateFromFrame()) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_bad_state"));
|
2019-02-17 15:08:29 +04:00
|
|
|
return false;
|
2019-03-13 16:02:59 +04:00
|
|
|
} else if (_syncTimePoint.trackTime >= _options.position) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("try_read_first_frame_process"));
|
2019-03-13 16:02:59 +04:00
|
|
|
return processFirstFrame();
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-03-13 16:02:59 +04:00
|
|
|
|
2019-02-22 16:39:32 +04:00
|
|
|
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
|
|
|
|
// Try skipping frames until one is after the requested position.
|
2019-03-13 16:02:59 +04:00
|
|
|
std::swap(_initialSkippingFrame, _stream.frame);
|
|
|
|
if (!_stream.frame) {
|
2019-06-26 17:04:38 +02:00
|
|
|
_stream.frame = FFmpeg::MakeFramePointer();
|
2019-03-13 16:02:59 +04:00
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-02-22 15:58:26 +04:00
|
|
|
}
|
|
|
|
|
2019-02-22 16:39:32 +04:00
|
|
|
bool VideoTrackObject::processFirstFrame() {
|
2020-04-13 15:13:27 +04:00
|
|
|
if (_stream.frame->width * _stream.frame->height > kMaxFrameArea) {
|
2020-03-23 16:59:46 +04:00
|
|
|
return false;
|
|
|
|
}
|
2019-03-01 01:03:25 +04:00
|
|
|
auto frame = ConvertFrame(
|
|
|
|
_stream,
|
|
|
|
_stream.frame.get(),
|
|
|
|
QSize(),
|
|
|
|
QImage());
|
2019-02-17 15:08:29 +04:00
|
|
|
if (frame.isNull()) {
|
|
|
|
return false;
|
|
|
|
}
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(4, _syncTimePoint.trackTime);
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("process_first_frame"));
|
2019-02-21 20:01:55 +04:00
|
|
|
_shared->init(std::move(frame), _syncTimePoint.trackTime);
|
2019-02-17 15:08:29 +04:00
|
|
|
callReady();
|
2019-03-13 16:02:59 +04:00
|
|
|
queueReadFrames();
|
2019-02-17 15:08:29 +04:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
crl::time VideoTrackObject::currentFramePosition() const {
|
2019-03-06 17:00:03 +04:00
|
|
|
const auto position = FramePosition(_stream);
|
|
|
|
if (position == kTimeUnknown || position == kFinishedPosition) {
|
2019-02-20 17:28:48 +04:00
|
|
|
return kTimeUnknown;
|
|
|
|
}
|
2019-03-13 18:58:50 +04:00
|
|
|
return _loopingShift + std::clamp(
|
2019-03-06 17:00:03 +04:00
|
|
|
position,
|
|
|
|
crl::time(0),
|
2019-03-13 18:58:50 +04:00
|
|
|
computeDuration() - 1);
|
2019-02-20 17:28:48 +04:00
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
bool VideoTrackObject::fillStateFromFrame() {
|
2019-02-22 16:39:32 +04:00
|
|
|
const auto position = currentFramePosition();
|
|
|
|
if (position == kTimeUnknown) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("fill_state_from_frame_bad"));
|
2019-02-21 18:57:00 +04:00
|
|
|
return false;
|
|
|
|
}
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("fill_state_from_frame,position:%1").arg(position));
|
2019-02-22 16:39:32 +04:00
|
|
|
_syncTimePoint.trackTime = position;
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(5, _syncTimePoint.trackTime);
|
2019-02-21 18:57:00 +04:00
|
|
|
return true;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::callReady() {
|
|
|
|
Expects(_ready != nullptr);
|
|
|
|
|
|
|
|
const auto frame = _shared->frameForPaint();
|
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
auto data = VideoInformation();
|
2019-06-26 17:04:38 +02:00
|
|
|
data.size = FFmpeg::CorrectByAspect(
|
|
|
|
frame->original.size(),
|
|
|
|
_stream.aspect);
|
|
|
|
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
|
2019-02-20 17:28:48 +04:00
|
|
|
data.size.transpose();
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-02-20 17:28:48 +04:00
|
|
|
data.cover = frame->original;
|
|
|
|
data.rotation = _stream.rotation;
|
|
|
|
data.state.duration = _stream.duration;
|
2019-02-21 20:01:55 +04:00
|
|
|
data.state.position = _syncTimePoint.trackTime;
|
2019-03-13 18:58:50 +04:00
|
|
|
data.state.receivedTill = _readTillEnd
|
2019-02-20 17:28:48 +04:00
|
|
|
? _stream.duration
|
2019-02-21 20:01:55 +04:00
|
|
|
: _syncTimePoint.trackTime;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("call_ready,till:%1").arg(data.state.receivedTill));
|
2019-02-20 17:28:48 +04:00
|
|
|
base::take(_ready)({ data });
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
TimePoint VideoTrackObject::trackTime() const {
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(7, _syncTimePoint.trackTime);
|
2021-08-20 18:41:48 +03:00
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
auto result = TimePoint();
|
|
|
|
result.worldTime = (_pausedTime != kTimeUnknown)
|
|
|
|
? _pausedTime
|
|
|
|
: crl::now();
|
|
|
|
if (!_syncTimePoint) {
|
|
|
|
result.trackTime = _syncTimePoint.trackTime;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("track_time,paused:%1,result_world:%2").arg(_pausedTime).arg(result.worldTime));
|
2019-02-21 15:15:44 +04:00
|
|
|
return result;
|
|
|
|
}
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(8, _syncTimePoint.worldTime);
|
|
|
|
debugAssertKnownTime(9, result.worldTime);
|
2019-02-21 15:15:44 +04:00
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
Assert(_resumedTime != kTimeUnknown);
|
2019-03-01 01:03:25 +04:00
|
|
|
if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
|
2019-02-21 18:57:00 +04:00
|
|
|
const auto mixer = Media::Player::mixer();
|
2019-03-01 01:03:25 +04:00
|
|
|
const auto point = mixer->getExternalSyncTimePoint(_audioId);
|
2019-02-21 20:01:55 +04:00
|
|
|
if (point && point.worldTime > _resumedTime) {
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("track_time_sync,world:%1,track:%2,resumed:%3").arg(point.worldTime).arg(point.trackTime).arg(_resumedTime));
|
2019-02-21 20:01:55 +04:00
|
|
|
_syncTimePoint = point;
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(6, _syncTimePoint.trackTime);
|
|
|
|
debugAssertKnownTime(10, _syncTimePoint.worldTime);
|
2019-02-21 18:57:00 +04:00
|
|
|
}
|
|
|
|
}
|
2019-02-21 20:01:55 +04:00
|
|
|
const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
|
2021-08-23 17:58:59 +03:00
|
|
|
const auto adjustSpeed = adjust * _options.speed;
|
2021-08-24 12:42:27 +03:00
|
|
|
const auto roundAdjustSpeed = SafeRound(adjustSpeed);
|
2021-08-23 17:58:59 +03:00
|
|
|
auto timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
|
|
|
|
const auto fpuErrorHappened = [](crl::time value) {
|
|
|
|
return uint64(value) == 0x8000'0000'0000'0000ULL
|
|
|
|
|| uint64(value) == 0x8000'0000ULL
|
|
|
|
|| uint64(value) == 0xFFFF'FFFF'FFFF'FFFFULL
|
|
|
|
|| uint64(value) == 0xFFFF'FFFFULL;
|
|
|
|
};
|
|
|
|
if (roundAdjustSpeed > -1000'000'000.
|
|
|
|
&& roundAdjustSpeed < 1000'000'000.
|
|
|
|
&& fpuErrorHappened(timeRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("BAD1,round:%1").arg(roundAdjustSpeed));
|
|
|
|
timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(timeRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD2,round:%1,result:%2").arg(roundAdjustSpeed).arg(timeRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-1, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD2,round:%1").arg(roundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto floatRoundAdjustSpeed = float(roundAdjustSpeed);
|
|
|
|
timeRoundAdjustSpeed = crl::time(floatRoundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(timeRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD3,round:%1,result:%2").arg(floatRoundAdjustSpeed).arg(timeRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-2, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD3,round:%1").arg(floatRoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto intRoundAdjustSpeet = int(roundAdjustSpeed);
|
|
|
|
timeRoundAdjustSpeed = crl::time(intRoundAdjustSpeet);
|
|
|
|
if (!fpuErrorHappened(timeRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD4,int:%1,result:%2").arg(intRoundAdjustSpeet).arg(timeRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-3, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD4,int:%1").arg(intRoundAdjustSpeet));
|
|
|
|
}
|
|
|
|
const auto intFloatRoundAdjustSpeed = int(floatRoundAdjustSpeed);
|
|
|
|
timeRoundAdjustSpeed = crl::time(intFloatRoundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(timeRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD5,int:%1,result:%2").arg(intFloatRoundAdjustSpeed).arg(timeRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-4, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD5,int:%1").arg(intFloatRoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto uint64RoundAdjustSpeed = uint64((roundAdjustSpeed >= 0.)
|
|
|
|
? roundAdjustSpeed
|
|
|
|
: -roundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(uint64RoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD6,round:%1,uint:%2").arg(roundAdjustSpeed).arg(uint64RoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-5, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD6,uint:%1").arg(uint64RoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto uint64FloatRoundAdjustSpeed = uint64((floatRoundAdjustSpeed >= 0.)
|
|
|
|
? floatRoundAdjustSpeed
|
|
|
|
: -floatRoundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(uint64FloatRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD7,round:%1,uint:%2").arg(floatRoundAdjustSpeed).arg(uint64FloatRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-6, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD7,uint:%1").arg(uint64FloatRoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto uint32RoundAdjustSpeed = uint32((roundAdjustSpeed >= 0.)
|
|
|
|
? roundAdjustSpeed
|
|
|
|
: -roundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(uint32RoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD8,round:%1,uint:%2").arg(roundAdjustSpeed).arg(uint32RoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-7, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD8,uint:%1").arg(uint32RoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
const auto uint32FloatRoundAdjustSpeed = uint32((floatRoundAdjustSpeed >= 0.)
|
|
|
|
? floatRoundAdjustSpeed
|
|
|
|
: -floatRoundAdjustSpeed);
|
|
|
|
if (!fpuErrorHappened(uint32FloatRoundAdjustSpeed)) {
|
|
|
|
TO_LOG(("GOOD9,round:%1,uint:%2").arg(floatRoundAdjustSpeed).arg(uint32FloatRoundAdjustSpeed));
|
|
|
|
debugAssertKnownTime(-8, kTimeUnknown);
|
|
|
|
} else {
|
|
|
|
TO_LOG(("BAD9,uint:%1").arg(uint32FloatRoundAdjustSpeed));
|
|
|
|
}
|
|
|
|
debugAssertKnownTime(-9, kTimeUnknown);
|
|
|
|
}
|
|
|
|
const auto trackTime = _syncTimePoint.trackTime + timeRoundAdjustSpeed;
|
2021-08-23 12:17:40 +03:00
|
|
|
TO_LOG(("track_time_adjusted,world:%1,adjust:%2,speed:%3,delta:%4,rounded:%5,casted:%6,final:%7"
|
|
|
|
).arg(result.worldTime
|
|
|
|
).arg(adjust
|
|
|
|
).arg(_options.speed
|
2021-08-23 17:58:59 +03:00
|
|
|
).arg(adjustSpeed
|
|
|
|
).arg(roundAdjustSpeed
|
|
|
|
).arg(timeRoundAdjustSpeed
|
2021-08-23 12:17:40 +03:00
|
|
|
).arg(result.trackTime));
|
2021-08-23 17:58:59 +03:00
|
|
|
result.trackTime = trackTime;
|
2021-08-21 09:33:20 +03:00
|
|
|
debugAssertKnownTime(11, result.trackTime);
|
2019-02-21 15:15:44 +04:00
|
|
|
return result;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrackObject::interrupt() {
|
|
|
|
_shared = nullptr;
|
|
|
|
}
|
|
|
|
|
2019-03-13 18:58:50 +04:00
|
|
|
void VideoTrackObject::fail(Error error) {
|
|
|
|
interrupt();
|
|
|
|
_error(error);
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
|
|
|
|
Expects(!initialized());
|
|
|
|
|
|
|
|
_frames[0].original = std::move(cover);
|
|
|
|
_frames[0].position = position;
|
2021-06-03 16:57:48 +04:00
|
|
|
_frames[0].format = FrameFormat::ARGB32;
|
2019-02-17 15:08:29 +04:00
|
|
|
|
|
|
|
// Usually main thread sets displayed time before _counter increment.
|
|
|
|
// But in this case we update _counter, so we set a fake displayed time.
|
|
|
|
_frames[0].displayed = kDisplaySkipped;
|
|
|
|
|
2019-12-11 17:01:11 +03:00
|
|
|
_delay = 0;
|
2019-02-17 15:08:29 +04:00
|
|
|
_counter.store(0, std::memory_order_release);
|
|
|
|
}
|
|
|
|
|
|
|
|
int VideoTrack::Shared::counter() const {
|
|
|
|
return _counter.load(std::memory_order_acquire);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool VideoTrack::Shared::initialized() const {
|
|
|
|
return (counter() != kCounterUninitialized);
|
|
|
|
}
|
|
|
|
|
|
|
|
not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
|
|
|
|
Expects(index >= 0 && index < kFramesCount);
|
|
|
|
|
|
|
|
return &_frames[index];
|
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
not_null<const VideoTrack::Frame*> VideoTrack::Shared::getFrame(
|
|
|
|
int index) const {
|
|
|
|
Expects(index >= 0 && index < kFramesCount);
|
|
|
|
|
|
|
|
return &_frames[index];
|
|
|
|
}
|
|
|
|
|
2019-02-27 15:36:19 +04:00
|
|
|
auto VideoTrack::Shared::prepareState(
|
|
|
|
crl::time trackTime,
|
|
|
|
bool dropStaleFrames)
|
|
|
|
-> PrepareState {
|
2019-02-17 15:08:29 +04:00
|
|
|
const auto prepareNext = [&](int index) -> PrepareState {
|
|
|
|
const auto frame = getFrame(index);
|
|
|
|
const auto next = getFrame((index + 1) % kFramesCount);
|
2019-02-27 15:36:19 +04:00
|
|
|
if (!IsDecoded(frame)) {
|
2019-02-17 15:08:29 +04:00
|
|
|
return frame;
|
2019-03-06 17:00:03 +04:00
|
|
|
} else if (!IsDecoded(next)) {
|
|
|
|
return next;
|
|
|
|
} else if (next->position < frame->position) {
|
|
|
|
std::swap(*frame, *next);
|
|
|
|
}
|
|
|
|
if (next->position == kFinishedPosition || !dropStaleFrames) {
|
|
|
|
return PrepareNextCheck(kTimeUnknown);
|
|
|
|
} else if (IsStale(frame, trackTime)) {
|
2019-02-17 15:08:29 +04:00
|
|
|
std::swap(*frame, *next);
|
|
|
|
next->displayed = kDisplaySkipped;
|
|
|
|
return next;
|
|
|
|
} else {
|
2021-06-24 10:43:53 +04:00
|
|
|
if (frame->position - trackTime + 1 <= 0) { // Debugging crash.
|
|
|
|
CrashReports::SetAnnotation(
|
|
|
|
"DelayValues",
|
|
|
|
(QString::number(frame->position)
|
|
|
|
+ " + 1 <= "
|
|
|
|
+ QString::number(trackTime)));
|
|
|
|
}
|
|
|
|
Assert(frame->position >= trackTime);
|
|
|
|
Assert(frame->position - trackTime + 1 > 0);
|
|
|
|
|
2019-02-25 14:17:25 +04:00
|
|
|
return PrepareNextCheck(frame->position - trackTime + 1);
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
};
|
2019-03-06 17:00:03 +04:00
|
|
|
const auto finishPrepare = [&](int index) -> PrepareState {
|
2019-02-17 15:08:29 +04:00
|
|
|
// If player already awaits next frame - we ignore if it's stale.
|
2019-03-06 17:00:03 +04:00
|
|
|
dropStaleFrames = false;
|
|
|
|
const auto result = prepareNext(index);
|
2020-08-31 13:04:17 +04:00
|
|
|
return v::is<PrepareNextCheck>(result) ? PrepareState() : result;
|
2019-02-17 15:08:29 +04:00
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: return finishPrepare(1);
|
|
|
|
case 1: return prepareNext(2);
|
|
|
|
case 2: return finishPrepare(2);
|
|
|
|
case 3: return prepareNext(3);
|
|
|
|
case 4: return finishPrepare(3);
|
|
|
|
case 5: return prepareNext(0);
|
|
|
|
case 6: return finishPrepare(0);
|
|
|
|
case 7: return prepareNext(1);
|
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
|
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
// Sometimes main thread subscribes to check frame requests before
|
|
|
|
// the first frame is ready and presented and sometimes after.
|
|
|
|
bool VideoTrack::Shared::firstPresentHappened() const {
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: return false;
|
|
|
|
case 1: return true;
|
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::firstPresentHappened.");
|
|
|
|
}
|
|
|
|
|
2019-02-27 15:36:19 +04:00
|
|
|
auto VideoTrack::Shared::presentFrame(
|
2020-07-17 22:59:25 +04:00
|
|
|
not_null<VideoTrackObject*> object,
|
2019-03-07 17:23:19 +04:00
|
|
|
TimePoint time,
|
|
|
|
float64 playbackSpeed,
|
2020-07-17 22:59:25 +04:00
|
|
|
bool dropStaleFrames)
|
2019-02-27 15:36:19 +04:00
|
|
|
-> PresentFrame {
|
2019-02-17 15:08:29 +04:00
|
|
|
const auto present = [&](int counter, int index) -> PresentFrame {
|
|
|
|
const auto frame = getFrame(index);
|
2019-02-25 14:17:25 +04:00
|
|
|
const auto position = frame->position;
|
2019-12-11 17:01:11 +03:00
|
|
|
const auto addedWorldTimeDelay = base::take(_delay);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (position == kFinishedPosition) {
|
2019-12-11 17:01:11 +03:00
|
|
|
return { kFinishedPosition, kTimeUnknown, addedWorldTimeDelay };
|
2019-03-06 17:00:03 +04:00
|
|
|
}
|
2020-07-17 22:59:25 +04:00
|
|
|
object->rasterizeFrame(frame);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (!IsRasterized(frame)) {
|
|
|
|
// Error happened during frame prepare.
|
2019-12-11 17:01:11 +03:00
|
|
|
return { kTimeUnknown, kTimeUnknown, addedWorldTimeDelay };
|
2019-02-27 15:36:19 +04:00
|
|
|
}
|
2019-03-07 17:23:19 +04:00
|
|
|
const auto trackLeft = position - time.trackTime;
|
2021-08-24 12:42:27 +03:00
|
|
|
const auto adjustedBySpeed = trackLeft / playbackSpeed;
|
|
|
|
const auto roundedAdjustedBySpeed = SafeRound(adjustedBySpeed);
|
2019-03-07 17:23:19 +04:00
|
|
|
frame->display = time.worldTime
|
2019-12-11 17:01:11 +03:00
|
|
|
+ addedWorldTimeDelay
|
2021-08-24 12:42:27 +03:00
|
|
|
+ crl::time(roundedAdjustedBySpeed);
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
// Release this frame to the main thread for rendering.
|
2019-02-17 15:08:29 +04:00
|
|
|
_counter.store(
|
2021-06-03 16:57:48 +04:00
|
|
|
counter + 1,
|
2019-02-17 15:08:29 +04:00
|
|
|
std::memory_order_release);
|
2019-12-11 17:01:11 +03:00
|
|
|
return { position, crl::time(0), addedWorldTimeDelay };
|
2019-02-17 15:08:29 +04:00
|
|
|
};
|
|
|
|
const auto nextCheckDelay = [&](int index) -> PresentFrame {
|
|
|
|
const auto frame = getFrame(index);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (frame->position == kFinishedPosition) {
|
|
|
|
return { kFinishedPosition, kTimeUnknown };
|
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
const auto next = getFrame((index + 1) % kFramesCount);
|
2019-03-06 17:00:03 +04:00
|
|
|
if (!IsDecoded(frame) || !IsDecoded(next)) {
|
2019-02-17 15:08:29 +04:00
|
|
|
return { kTimeUnknown, crl::time(0) };
|
2019-03-06 17:00:03 +04:00
|
|
|
} else if (next->position == kFinishedPosition
|
|
|
|
|| !dropStaleFrames
|
2019-03-07 17:23:19 +04:00
|
|
|
|| IsStale(frame, time.trackTime)) {
|
2019-03-06 17:00:03 +04:00
|
|
|
return { kTimeUnknown, kTimeUnknown };
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
2019-03-07 17:23:19 +04:00
|
|
|
return { kTimeUnknown, (frame->position - time.trackTime + 1) };
|
2019-02-17 15:08:29 +04:00
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: return present(0, 1);
|
|
|
|
case 1: return nextCheckDelay(2);
|
|
|
|
case 2: return present(2, 2);
|
|
|
|
case 3: return nextCheckDelay(3);
|
|
|
|
case 4: return present(4, 3);
|
|
|
|
case 5: return nextCheckDelay(0);
|
|
|
|
case 6: return present(6, 0);
|
|
|
|
case 7: return nextCheckDelay(1);
|
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
|
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
|
2019-03-13 15:11:54 +04:00
|
|
|
const auto frameDisplayTime = [&](int counter) {
|
|
|
|
const auto next = (counter + 1) % (2 * kFramesCount);
|
|
|
|
const auto index = next / 2;
|
2019-03-07 17:23:19 +04:00
|
|
|
const auto frame = getFrame(index);
|
2019-12-11 17:01:11 +03:00
|
|
|
if (frame->displayed != kTimeUnknown) {
|
|
|
|
// Frame already displayed, but not yet shown.
|
|
|
|
return kFrameDisplayTimeAlreadyDone;
|
|
|
|
}
|
2019-03-13 15:11:54 +04:00
|
|
|
Assert(IsRasterized(frame));
|
|
|
|
Assert(frame->display != kTimeUnknown);
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
return frame->display;
|
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: return kTimeUnknown;
|
|
|
|
case 1: return frameDisplayTime(1);
|
|
|
|
case 2: return kTimeUnknown;
|
2019-03-13 15:11:54 +04:00
|
|
|
case 3: return frameDisplayTime(3);
|
2019-03-07 17:23:19 +04:00
|
|
|
case 4: return kTimeUnknown;
|
2019-03-13 15:11:54 +04:00
|
|
|
case 5: return frameDisplayTime(5);
|
2019-03-07 17:23:19 +04:00
|
|
|
case 6: return kTimeUnknown;
|
2019-03-13 15:11:54 +04:00
|
|
|
case 7: return frameDisplayTime(7);
|
2019-03-07 17:23:19 +04:00
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::nextFrameDisplayTime.");
|
|
|
|
}
|
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
|
2019-12-11 17:01:11 +03:00
|
|
|
const auto mark = [&](int counter) {
|
2019-03-13 15:11:54 +04:00
|
|
|
const auto next = (counter + 1) % (2 * kFramesCount);
|
|
|
|
const auto index = next / 2;
|
2019-02-17 15:08:29 +04:00
|
|
|
const auto frame = getFrame(index);
|
2019-03-13 15:11:54 +04:00
|
|
|
Assert(frame->position != kTimeUnknown);
|
2019-12-11 17:01:11 +03:00
|
|
|
if (frame->displayed == kTimeUnknown) {
|
|
|
|
frame->displayed = now;
|
|
|
|
}
|
2019-02-20 17:28:48 +04:00
|
|
|
return frame->position;
|
2019-02-17 15:08:29 +04:00
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
2019-03-13 15:11:54 +04:00
|
|
|
case 0: Unexpected("Value 0 in VideoTrack::Shared::markFrameDisplayed.");
|
2019-12-11 17:01:11 +03:00
|
|
|
case 1: return mark(1);
|
2019-03-13 15:11:54 +04:00
|
|
|
case 2: Unexpected("Value 2 in VideoTrack::Shared::markFrameDisplayed.");
|
2019-12-11 17:01:11 +03:00
|
|
|
case 3: return mark(3);
|
2019-03-13 15:11:54 +04:00
|
|
|
case 4: Unexpected("Value 4 in VideoTrack::Shared::markFrameDisplayed.");
|
2019-12-11 17:01:11 +03:00
|
|
|
case 5: return mark(5);
|
2019-03-13 15:11:54 +04:00
|
|
|
case 6: Unexpected("Value 6 in VideoTrack::Shared::markFrameDisplayed.");
|
2019-12-11 17:01:11 +03:00
|
|
|
case 7: return mark(7);
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::markFrameDisplayed.");
|
|
|
|
}
|
|
|
|
|
2019-12-11 17:01:11 +03:00
|
|
|
void VideoTrack::Shared::addTimelineDelay(crl::time delayed) {
|
|
|
|
if (!delayed) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const auto recountCurrentFrame = [&](int counter) {
|
|
|
|
_delay += delayed;
|
|
|
|
//const auto next = (counter + 1) % (2 * kFramesCount);
|
|
|
|
//const auto index = next / 2;
|
|
|
|
//const auto frame = getFrame(index);
|
|
|
|
//if (frame->displayed != kTimeUnknown) {
|
|
|
|
// // Frame already displayed.
|
|
|
|
// return;
|
|
|
|
//}
|
|
|
|
//Assert(IsRasterized(frame));
|
|
|
|
//Assert(frame->display != kTimeUnknown);
|
|
|
|
//frame->display = countFrameDisplayTime(frame->index);
|
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: Unexpected("Value 0 in VideoTrack::Shared::addTimelineDelay.");
|
|
|
|
case 1: return recountCurrentFrame(1);
|
|
|
|
case 2: Unexpected("Value 2 in VideoTrack::Shared::addTimelineDelay.");
|
|
|
|
case 3: return recountCurrentFrame(3);
|
|
|
|
case 4: Unexpected("Value 4 in VideoTrack::Shared::addTimelineDelay.");
|
|
|
|
case 5: return recountCurrentFrame(5);
|
|
|
|
case 6: Unexpected("Value 6 in VideoTrack::Shared::addTimelineDelay.");
|
|
|
|
case 7: return recountCurrentFrame(7);
|
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::addTimelineDelay.");
|
|
|
|
}
|
|
|
|
|
|
|
|
bool VideoTrack::Shared::markFrameShown() {
|
|
|
|
const auto jump = [&](int counter) {
|
|
|
|
const auto next = (counter + 1) % (2 * kFramesCount);
|
|
|
|
const auto index = next / 2;
|
|
|
|
const auto frame = getFrame(index);
|
|
|
|
if (frame->displayed == kTimeUnknown) {
|
|
|
|
return false;
|
|
|
|
}
|
2021-06-03 16:57:48 +04:00
|
|
|
if (counter == 2 * kFramesCount - 1) {
|
|
|
|
++_counterCycle;
|
|
|
|
}
|
2019-12-11 17:01:11 +03:00
|
|
|
_counter.store(
|
|
|
|
next,
|
|
|
|
std::memory_order_release);
|
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
|
|
|
switch (counter()) {
|
|
|
|
case 0: return false;
|
|
|
|
case 1: return jump(1);
|
|
|
|
case 2: return false;
|
|
|
|
case 3: return jump(3);
|
|
|
|
case 4: return false;
|
|
|
|
case 5: return jump(5);
|
|
|
|
case 6: return false;
|
|
|
|
case 7: return jump(7);
|
|
|
|
}
|
|
|
|
Unexpected("Counter value in VideoTrack::Shared::markFrameShown.");
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
not_null<VideoTrack::Frame*> VideoTrack::Shared::frameForPaint() {
|
2021-06-03 16:57:48 +04:00
|
|
|
return frameForPaintWithIndex().frame;
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoTrack::FrameWithIndex VideoTrack::Shared::frameForPaintWithIndex() {
|
|
|
|
const auto index = counter() / 2;
|
|
|
|
const auto frame = getFrame(index);
|
|
|
|
Assert(frame->format != FrameFormat::None);
|
|
|
|
Assert(frame->position != kTimeUnknown);
|
|
|
|
Assert(frame->displayed != kTimeUnknown);
|
|
|
|
return {
|
|
|
|
.frame = frame,
|
|
|
|
.index = (_counterCycle * 2 * kFramesCount) + index,
|
|
|
|
};
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
VideoTrack::VideoTrack(
|
2019-02-21 13:17:25 +04:00
|
|
|
const PlaybackOptions &options,
|
2019-02-17 15:08:29 +04:00
|
|
|
Stream &&stream,
|
2019-02-21 15:15:44 +04:00
|
|
|
const AudioMsgId &audioId,
|
2019-02-17 15:08:29 +04:00
|
|
|
FnMut<void(const Information &)> ready,
|
2019-03-05 17:56:27 +04:00
|
|
|
Fn<void(Error)> error)
|
2019-02-17 15:08:29 +04:00
|
|
|
: _streamIndex(stream.index)
|
|
|
|
, _streamTimeBase(stream.timeBase)
|
2019-03-05 11:40:25 +04:00
|
|
|
, _streamDuration(stream.duration)
|
2019-12-18 20:15:42 +03:00
|
|
|
, _streamRotation(stream.rotation)
|
2019-03-05 13:00:49 +04:00
|
|
|
//, _streamAspect(stream.aspect)
|
2019-02-17 15:08:29 +04:00
|
|
|
, _shared(std::make_unique<Shared>())
|
|
|
|
, _wrapped(
|
2019-02-21 13:17:25 +04:00
|
|
|
options,
|
2019-02-17 15:08:29 +04:00
|
|
|
_shared.get(),
|
|
|
|
std::move(stream),
|
2019-02-21 15:15:44 +04:00
|
|
|
audioId,
|
2019-02-17 15:08:29 +04:00
|
|
|
std::move(ready),
|
|
|
|
std::move(error)) {
|
|
|
|
}
|
|
|
|
|
|
|
|
int VideoTrack::streamIndex() const {
|
|
|
|
return _streamIndex;
|
|
|
|
}
|
|
|
|
|
|
|
|
AVRational VideoTrack::streamTimeBase() const {
|
|
|
|
return _streamTimeBase;
|
|
|
|
}
|
|
|
|
|
2019-03-05 11:40:25 +04:00
|
|
|
crl::time VideoTrack::streamDuration() const {
|
|
|
|
return _streamDuration;
|
|
|
|
}
|
|
|
|
|
2019-12-19 18:14:05 +03:00
|
|
|
void VideoTrack::process(std::vector<FFmpeg::Packet> &&packets) {
|
2019-02-17 15:08:29 +04:00
|
|
|
_wrapped.with([
|
2019-12-19 18:14:05 +03:00
|
|
|
packets = std::move(packets)
|
2019-02-17 15:08:29 +04:00
|
|
|
](Implementation &unwrapped) mutable {
|
2019-12-19 18:14:05 +03:00
|
|
|
unwrapped.process(std::move(packets));
|
2019-02-17 15:08:29 +04:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-02-21 17:40:09 +04:00
|
|
|
void VideoTrack::waitForData() {
|
|
|
|
}
|
|
|
|
|
2019-02-21 18:57:00 +04:00
|
|
|
void VideoTrack::pause(crl::time time) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
|
|
|
unwrapped.pause(time);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void VideoTrack::resume(crl::time time) {
|
2019-02-21 13:17:25 +04:00
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
2019-02-21 18:57:00 +04:00
|
|
|
unwrapped.resume(time);
|
2019-02-17 15:08:29 +04:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-02-21 20:01:55 +04:00
|
|
|
void VideoTrack::setSpeed(float64 speed) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
|
|
|
unwrapped.setSpeed(speed);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-12-19 13:50:33 +03:00
|
|
|
void VideoTrack::setWaitForMarkAsShown(bool wait) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
|
|
|
unwrapped.setWaitForMarkAsShown(wait);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
crl::time VideoTrack::nextFrameDisplayTime() const {
|
|
|
|
return _shared->nextFrameDisplayTime();
|
|
|
|
}
|
|
|
|
|
2019-02-20 17:28:48 +04:00
|
|
|
crl::time VideoTrack::markFrameDisplayed(crl::time now) {
|
2019-03-13 15:11:54 +04:00
|
|
|
const auto result = _shared->markFrameDisplayed(now);
|
|
|
|
|
|
|
|
Ensures(result != kTimeUnknown);
|
|
|
|
return result;
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
|
2019-12-11 17:01:11 +03:00
|
|
|
void VideoTrack::addTimelineDelay(crl::time delayed) {
|
|
|
|
_shared->addTimelineDelay(delayed);
|
|
|
|
//if (!delayed) {
|
|
|
|
// return;
|
|
|
|
//}
|
|
|
|
//_wrapped.with([=](Implementation &unwrapped) mutable {
|
|
|
|
// unwrapped.addTimelineDelay(delayed);
|
|
|
|
//});
|
|
|
|
}
|
|
|
|
|
|
|
|
bool VideoTrack::markFrameShown() {
|
|
|
|
if (!_shared->markFrameShown()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
_wrapped.with([](Implementation &unwrapped) {
|
|
|
|
unwrapped.frameShown();
|
|
|
|
});
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-12-12 09:56:08 +03:00
|
|
|
QImage VideoTrack::frame(
|
|
|
|
const FrameRequest &request,
|
|
|
|
const Instance *instance) {
|
2021-06-04 13:50:41 +04:00
|
|
|
const auto frame = _shared->frameForPaint();
|
2019-12-12 09:56:08 +03:00
|
|
|
const auto i = frame->prepared.find(instance);
|
|
|
|
const auto none = (i == frame->prepared.end());
|
2019-12-23 11:10:11 +03:00
|
|
|
const auto preparedFor = frame->prepared.empty()
|
2019-12-12 09:56:08 +03:00
|
|
|
? FrameRequest::NonStrict()
|
2019-12-23 11:10:11 +03:00
|
|
|
: (none ? frame->prepared.begin() : i)->second.request;
|
2019-12-12 09:56:08 +03:00
|
|
|
const auto changed = !preparedFor.goodFor(request);
|
|
|
|
const auto useRequest = changed ? request : preparedFor;
|
2019-02-27 15:36:19 +04:00
|
|
|
if (changed) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
2019-12-23 11:10:11 +03:00
|
|
|
unwrapped.updateFrameRequest(instance, useRequest);
|
2019-02-27 15:36:19 +04:00
|
|
|
});
|
|
|
|
}
|
2021-06-04 13:50:41 +04:00
|
|
|
if (frame->original.isNull()
|
|
|
|
&& frame->format == FrameFormat::YUV420) {
|
|
|
|
frame->original = ConvertToARGB32(frame->yuv420);
|
|
|
|
}
|
2020-02-24 17:48:23 +04:00
|
|
|
if (!frame->alpha
|
|
|
|
&& GoodForRequest(frame->original, _streamRotation, useRequest)) {
|
2019-12-12 09:56:08 +03:00
|
|
|
return frame->original;
|
|
|
|
} else if (changed || none || i->second.image.isNull()) {
|
|
|
|
const auto j = none
|
|
|
|
? frame->prepared.emplace(instance, useRequest).first
|
|
|
|
: i;
|
2019-12-12 16:26:53 +03:00
|
|
|
if (changed && !none) {
|
|
|
|
i->second.request = useRequest;
|
|
|
|
}
|
2019-12-12 09:56:08 +03:00
|
|
|
if (frame->prepared.size() > 1) {
|
|
|
|
for (auto &[alreadyInstance, prepared] : frame->prepared) {
|
|
|
|
if (alreadyInstance != instance
|
|
|
|
&& prepared.request == useRequest
|
|
|
|
&& !prepared.image.isNull()) {
|
|
|
|
return prepared.image;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
j->second.image = PrepareByRequest(
|
|
|
|
frame->original,
|
2020-02-24 17:48:23 +04:00
|
|
|
frame->alpha,
|
2019-12-18 20:15:42 +03:00
|
|
|
_streamRotation,
|
2019-12-12 09:56:08 +03:00
|
|
|
useRequest,
|
|
|
|
std::move(j->second.image));
|
|
|
|
return j->second.image;
|
|
|
|
}
|
|
|
|
return i->second.image;
|
|
|
|
}
|
|
|
|
|
2021-06-03 16:57:48 +04:00
|
|
|
FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) {
|
|
|
|
const auto data = _shared->frameForPaintWithIndex();
|
|
|
|
const auto i = data.frame->prepared.find(instance);
|
|
|
|
const auto none = (i == data.frame->prepared.end());
|
|
|
|
if (none || i->second.request.requireARGB32) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
|
|
|
unwrapped.updateFrameRequest(
|
|
|
|
instance,
|
|
|
|
{ .requireARGB32 = false });
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
.original = data.frame->original,
|
|
|
|
.yuv420 = &data.frame->yuv420,
|
|
|
|
.format = data.frame->format,
|
|
|
|
.index = data.index,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-06-04 13:50:41 +04:00
|
|
|
QImage VideoTrack::currentFrameImage() {
|
|
|
|
const auto frame = _shared->frameForPaint();
|
|
|
|
if (frame->original.isNull() && frame->format == FrameFormat::YUV420) {
|
|
|
|
frame->original = ConvertToARGB32(frame->yuv420);
|
|
|
|
}
|
|
|
|
return frame->original;
|
|
|
|
}
|
|
|
|
|
2019-12-12 09:56:08 +03:00
|
|
|
void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
|
|
|
|
_wrapped.with([=](Implementation &unwrapped) {
|
|
|
|
unwrapped.removeFrameRequest(instance);
|
|
|
|
});
|
2019-02-27 15:36:19 +04:00
|
|
|
}
|
|
|
|
|
2019-12-18 20:15:42 +03:00
|
|
|
void VideoTrack::PrepareFrameByRequests(
|
|
|
|
not_null<Frame*> frame,
|
|
|
|
int rotation) {
|
2021-06-03 16:57:48 +04:00
|
|
|
Expects(frame->format != FrameFormat::ARGB32
|
|
|
|
|| !frame->original.isNull());
|
|
|
|
|
|
|
|
if (frame->format != FrameFormat::ARGB32) {
|
|
|
|
return;
|
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
|
2019-12-12 09:56:08 +03:00
|
|
|
const auto begin = frame->prepared.begin();
|
|
|
|
const auto end = frame->prepared.end();
|
|
|
|
for (auto i = begin; i != end; ++i) {
|
|
|
|
auto &prepared = i->second;
|
2020-02-24 17:48:23 +04:00
|
|
|
if (frame->alpha
|
|
|
|
|| !GoodForRequest(frame->original, rotation, prepared.request)) {
|
2019-12-12 09:56:08 +03:00
|
|
|
auto j = begin;
|
|
|
|
for (; j != i; ++j) {
|
|
|
|
if (j->second.request == prepared.request) {
|
|
|
|
prepared.image = QImage();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (j == i) {
|
|
|
|
prepared.image = PrepareByRequest(
|
|
|
|
frame->original,
|
2020-02-24 17:48:23 +04:00
|
|
|
frame->alpha,
|
2019-12-18 20:15:42 +03:00
|
|
|
rotation,
|
2019-12-12 09:56:08 +03:00
|
|
|
prepared.request,
|
|
|
|
std::move(prepared.image));
|
|
|
|
}
|
|
|
|
}
|
2019-02-17 15:08:29 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-13 15:11:54 +04:00
|
|
|
bool VideoTrack::IsDecoded(not_null<const Frame*> frame) {
|
2019-02-27 15:36:19 +04:00
|
|
|
return (frame->position != kTimeUnknown)
|
|
|
|
&& (frame->displayed == kTimeUnknown);
|
|
|
|
}
|
|
|
|
|
2019-03-13 15:11:54 +04:00
|
|
|
bool VideoTrack::IsRasterized(not_null<const Frame*> frame) {
|
2019-02-27 15:36:19 +04:00
|
|
|
return IsDecoded(frame)
|
2021-06-03 16:57:48 +04:00
|
|
|
&& (!frame->original.isNull()
|
|
|
|
|| frame->format == FrameFormat::YUV420);
|
2019-02-27 15:36:19 +04:00
|
|
|
}
|
|
|
|
|
2019-03-13 15:11:54 +04:00
|
|
|
bool VideoTrack::IsStale(not_null<const Frame*> frame, crl::time trackTime) {
|
2019-02-27 15:36:19 +04:00
|
|
|
Expects(IsDecoded(frame));
|
|
|
|
|
|
|
|
return (frame->position < trackTime);
|
|
|
|
}
|
|
|
|
|
2019-03-07 17:23:19 +04:00
|
|
|
rpl::producer<> VideoTrack::checkNextFrame() const {
|
2019-02-17 15:08:29 +04:00
|
|
|
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
|
2019-03-07 17:23:19 +04:00
|
|
|
return unwrapped.checkNextFrame();
|
2019-02-17 15:08:29 +04:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-02-22 18:28:10 +04:00
|
|
|
rpl::producer<> VideoTrack::waitingForData() const {
|
|
|
|
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
|
|
|
|
return unwrapped.waitingForData();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-02-17 15:08:29 +04:00
|
|
|
VideoTrack::~VideoTrack() {
|
|
|
|
_wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
|
|
|
|
unwrapped.interrupt();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Streaming
|
|
|
|
} // namespace Media
|