LibMedia: Prefix AK::Duration with AK Namespace

This commit is contained in:
Andrew Kaster 2024-07-16 23:44:30 -06:00 committed by Sam Atkins
parent 28ab1941b9
commit c1681931ba
Notes: sideshowbarker 2024-07-18 23:46:50 +09:00
18 changed files with 80 additions and 80 deletions

View file

@ -28,7 +28,7 @@ static inline void decode_video(StringView path, size_t expected_frame_count, T
size_t frame_count = 0;
NonnullOwnPtr<Media::VideoDecoder> decoder = create_decoder(iterator);
auto last_timestamp = Duration::min();
auto last_timestamp = AK::Duration::min();
while (frame_count <= expected_frame_count) {
auto block_result = iterator.next_block();

View file

@ -20,6 +20,6 @@ TEST_CASE(master_elements_containing_crc32)
auto iterator = MUST(matroska_reader.create_sample_iterator(video_track));
MUST(iterator.next_block());
MUST(matroska_reader.seek_to_random_access_point(iterator, Duration::from_seconds(7)));
MUST(matroska_reader.seek_to_random_access_point(iterator, AK::Duration::from_seconds(7)));
MUST(iterator.next_block());
}

View file

@ -33,11 +33,11 @@ public:
void set_writing_app(ByteString writing_app) { m_writing_app = move(writing_app); }
Optional<double> duration_unscaled() const { return m_duration_unscaled; }
void set_duration_unscaled(double duration) { m_duration_unscaled.emplace(duration); }
Optional<Duration> duration() const
Optional<AK::Duration> duration() const
{
if (!duration_unscaled().has_value())
return {};
return Duration::from_nanoseconds(static_cast<i64>(static_cast<double>(timestamp_scale()) * duration_unscaled().value()));
return AK::Duration::from_nanoseconds(static_cast<i64>(static_cast<double>(timestamp_scale()) * duration_unscaled().value()));
}
private:
@ -175,8 +175,8 @@ public:
u64 track_number() const { return m_track_number; }
void set_track_number(u64 track_number) { m_track_number = track_number; }
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
AK::Duration timestamp() const { return m_timestamp; }
void set_timestamp(AK::Duration timestamp) { m_timestamp = timestamp; }
bool only_keyframes() const { return m_only_keyframes; }
void set_only_keyframes(bool only_keyframes) { m_only_keyframes = only_keyframes; }
bool invisible() const { return m_invisible; }
@ -193,7 +193,7 @@ public:
private:
u64 m_track_number { 0 };
Duration m_timestamp { Duration::zero() };
AK::Duration m_timestamp { AK::Duration::zero() };
bool m_only_keyframes { false };
bool m_invisible { false };
Lacing m_lacing { None };
@ -203,11 +203,11 @@ private:
class Cluster {
public:
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
AK::Duration timestamp() const { return m_timestamp; }
void set_timestamp(AK::Duration timestamp) { m_timestamp = timestamp; }
private:
Duration m_timestamp { Duration::zero() };
AK::Duration m_timestamp { AK::Duration::zero() };
};
class CueTrackPosition {
@ -227,14 +227,14 @@ private:
class CuePoint {
public:
Duration timestamp() const { return m_timestamp; }
void set_timestamp(Duration timestamp) { m_timestamp = timestamp; }
AK::Duration timestamp() const { return m_timestamp; }
void set_timestamp(AK::Duration timestamp) { m_timestamp = timestamp; }
OrderedHashMap<u64, CueTrackPosition>& track_positions() { return m_track_positions; }
OrderedHashMap<u64, CueTrackPosition> const& track_positions() const { return m_track_positions; }
Optional<CueTrackPosition const&> position_for_track(u64 track_number) const { return m_track_positions.get(track_number); }
private:
Duration m_timestamp = Duration::min();
AK::Duration m_timestamp = AK::Duration::min();
OrderedHashMap<u64, CueTrackPosition> m_track_positions;
};

View file

@ -109,7 +109,7 @@ DecoderErrorOr<ReadonlyBytes> MatroskaDemuxer::get_codec_initialization_data_for
return TRY(m_reader.track_for_track_number(track.identifier()))->codec_private_data();
}
DecoderErrorOr<Optional<Duration>> MatroskaDemuxer::seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample)
DecoderErrorOr<Optional<AK::Duration>> MatroskaDemuxer::seek_to_most_recent_keyframe(Track track, AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample)
{
// Removing the track status will cause us to start from the beginning.
if (timestamp.is_zero()) {
@ -151,10 +151,10 @@ DecoderErrorOr<Sample> MatroskaDemuxer::get_next_sample_for_track(Track track)
return Sample(status.block->timestamp(), status.block->frame(status.frame_index++), VideoSampleData(cicp));
}
DecoderErrorOr<Duration> MatroskaDemuxer::duration()
DecoderErrorOr<AK::Duration> MatroskaDemuxer::duration()
{
auto duration = TRY(m_reader.segment_information()).duration();
return duration.value_or(Duration::zero());
return duration.value_or(AK::Duration::zero());
}
}

View file

@ -29,9 +29,9 @@ public:
DecoderErrorOr<Vector<Track>> get_tracks_for_type(TrackType type) override;
DecoderErrorOr<Optional<Duration>> seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone()) override;
DecoderErrorOr<Optional<AK::Duration>> seek_to_most_recent_keyframe(Track track, AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample = OptionalNone()) override;
DecoderErrorOr<Duration> duration() override;
DecoderErrorOr<AK::Duration> duration() override;
DecoderErrorOr<CodecID> get_codec_id_for_track(Track track) override;

View file

@ -593,11 +593,11 @@ static DecoderErrorOr<Cluster> parse_cluster(Streamer& streamer, u64 timestamp_s
TRY_READ(streamer.seek_to_position(first_element_position));
Cluster cluster;
cluster.set_timestamp(Duration::from_nanoseconds(timestamp.release_value() * timestamp_scale));
cluster.set_timestamp(AK::Duration::from_nanoseconds(timestamp.release_value() * timestamp_scale));
return cluster;
}
static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, Duration cluster_timestamp, u64 segment_timestamp_scale, TrackEntry const& track)
static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, AK::Duration cluster_timestamp, u64 segment_timestamp_scale, TrackEntry const& track)
{
Block block;
@ -623,7 +623,7 @@ static DecoderErrorOr<Block> parse_simple_block(Streamer& streamer, Duration clu
// This is only mentioned in the elements specification under TrackOffset.
// https://www.matroska.org/technical/elements.html
timestamp_offset_ns.saturating_add(AK::clamp_to<i64>(track.timestamp_offset()));
Duration timestamp_offset = Duration::from_nanoseconds(timestamp_offset_ns.value());
AK::Duration timestamp_offset = AK::Duration::from_nanoseconds(timestamp_offset_ns.value());
block.set_timestamp(cluster_timestamp + timestamp_offset);
auto flags = TRY_READ(streamer.read_octet());
@ -756,7 +756,7 @@ static DecoderErrorOr<CuePoint> parse_cue_point(Streamer& streamer, u64 timestam
// https://github.com/mozilla/nestegg/tree/ec6adfbbf979678e3058cc4695257366f39e290b/src/nestegg.c#L2411-L2416
// https://github.com/mozilla/nestegg/tree/ec6adfbbf979678e3058cc4695257366f39e290b/src/nestegg.c#L1383-L1392
// Other fields that specify Matroska Ticks may also use Segment Ticks instead, who knows :^(
auto timestamp = Duration::from_nanoseconds(static_cast<i64>(TRY_READ(streamer.read_u64()) * timestamp_scale));
auto timestamp = AK::Duration::from_nanoseconds(static_cast<i64>(TRY_READ(streamer.read_u64()) * timestamp_scale));
cue_point.set_timestamp(timestamp);
dbgln_if(MATROSKA_DEBUG, "Read CuePoint timestamp {}ms", cue_point.timestamp().to_milliseconds());
break;
@ -827,7 +827,7 @@ DecoderErrorOr<void> Reader::ensure_cues_are_parsed()
return {};
}
DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator, Duration const& timestamp)
DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator, AK::Duration const& timestamp)
{
auto const& cue_points = MUST(cue_points_for_track(iterator.m_track->track_number())).release_value();
@ -866,7 +866,7 @@ DecoderErrorOr<void> Reader::seek_to_cue_for_timestamp(SampleIterator& iterator,
return {};
}
static DecoderErrorOr<void> search_clusters_for_keyframe_before_timestamp(SampleIterator& iterator, Duration const& timestamp)
static DecoderErrorOr<void> search_clusters_for_keyframe_before_timestamp(SampleIterator& iterator, AK::Duration const& timestamp)
{
#if MATROSKA_DEBUG
size_t inter_frames_count;
@ -908,7 +908,7 @@ DecoderErrorOr<bool> Reader::has_cues_for_track(u64 track_number)
return m_cues.contains(track_number);
}
DecoderErrorOr<SampleIterator> Reader::seek_to_random_access_point(SampleIterator iterator, Duration timestamp)
DecoderErrorOr<SampleIterator> Reader::seek_to_random_access_point(SampleIterator iterator, AK::Duration timestamp)
{
if (TRY(has_cues_for_track(iterator.m_track->track_number()))) {
TRY(seek_to_cue_for_timestamp(iterator, timestamp));

View file

@ -39,7 +39,7 @@ public:
DecoderErrorOr<size_t> track_count();
DecoderErrorOr<SampleIterator> create_sample_iterator(u64 track_number);
DecoderErrorOr<SampleIterator> seek_to_random_access_point(SampleIterator, Duration);
DecoderErrorOr<SampleIterator> seek_to_random_access_point(SampleIterator, AK::Duration);
DecoderErrorOr<Optional<Vector<CuePoint> const&>> cue_points_for_track(u64 track_number);
DecoderErrorOr<bool> has_cues_for_track(u64 track_number);
@ -58,7 +58,7 @@ private:
DecoderErrorOr<void> parse_cues(Streamer&);
DecoderErrorOr<void> ensure_cues_are_parsed();
DecoderErrorOr<void> seek_to_cue_for_timestamp(SampleIterator&, Duration const&);
DecoderErrorOr<void> seek_to_cue_for_timestamp(SampleIterator&, AK::Duration const&);
RefPtr<Core::SharedMappedFile> m_mapped_file;
ReadonlyBytes m_data;
@ -84,7 +84,7 @@ class SampleIterator {
public:
DecoderErrorOr<Block> next_block();
Cluster const& current_cluster() const { return *m_current_cluster; }
Optional<Duration> const& last_timestamp() const { return m_last_timestamp; }
Optional<AK::Duration> const& last_timestamp() const { return m_last_timestamp; }
TrackEntry const& track() const { return *m_track; }
private:
@ -109,7 +109,7 @@ private:
// Must always point to an element ID or the end of the stream.
size_t m_position { 0 };
Optional<Duration> m_last_timestamp;
Optional<AK::Duration> m_last_timestamp;
Optional<Cluster> m_current_cluster;
};

View file

@ -31,9 +31,9 @@ public:
// Returns the timestamp of the keyframe that was seeked to.
// The value is `Optional` to allow the demuxer to decide not to seek so that it can keep its position
// in the case that the timestamp is closer to the current time than the nearest keyframe.
virtual DecoderErrorOr<Optional<Duration>> seek_to_most_recent_keyframe(Track track, Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone()) = 0;
virtual DecoderErrorOr<Optional<AK::Duration>> seek_to_most_recent_keyframe(Track track, AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample = OptionalNone()) = 0;
virtual DecoderErrorOr<Duration> duration() = 0;
virtual DecoderErrorOr<AK::Duration> duration() = 0;
};
}

View file

@ -101,7 +101,7 @@ FFmpegVideoDecoder::~FFmpegVideoDecoder()
avcodec_free_context(&m_codec_context);
}
DecoderErrorOr<void> FFmpegVideoDecoder::receive_sample(Duration timestamp, ReadonlyBytes sample)
DecoderErrorOr<void> FFmpegVideoDecoder::receive_sample(AK::Duration timestamp, ReadonlyBytes sample)
{
VERIFY(sample.size() < NumericLimits<int>::max());
@ -189,7 +189,7 @@ DecoderErrorOr<NonnullOwnPtr<VideoFrame>> FFmpegVideoDecoder::get_decoded_frame(
auto size = Gfx::Size<u32> { m_frame->width, m_frame->height };
auto timestamp = Duration::from_microseconds(m_frame->pts);
auto timestamp = AK::Duration::from_microseconds(m_frame->pts);
auto frame = DECODER_TRY_ALLOC(SubsampledYUVFrame::try_create(timestamp, size, bit_depth, cicp, subsampling));
for (u32 plane = 0; plane < 3; plane++) {

View file

@ -19,13 +19,13 @@ public:
FFmpegVideoDecoder(AVCodecContext* codec_context, AVPacket* packet, AVFrame* frame);
~FFmpegVideoDecoder();
DecoderErrorOr<void> receive_sample(Duration timestamp, ReadonlyBytes sample) override;
DecoderErrorOr<void> receive_sample(AK::Duration timestamp, ReadonlyBytes sample) override;
DecoderErrorOr<NonnullOwnPtr<VideoFrame>> get_decoded_frame() override;
void flush() override;
private:
DecoderErrorOr<void> decode_single_sample(Duration timestamp, u8* data, int size);
DecoderErrorOr<void> decode_single_sample(AK::Duration timestamp, u8* data, int size);
AVCodecContext* m_codec_context;
AVPacket* m_packet;

View file

@ -30,7 +30,7 @@ FFmpegVideoDecoder::~FFmpegVideoDecoder()
{
}
DecoderErrorOr<void> FFmpegVideoDecoder::receive_sample(Duration timestamp, ReadonlyBytes sample)
DecoderErrorOr<void> FFmpegVideoDecoder::receive_sample(AK::Duration timestamp, ReadonlyBytes sample)
{
(void)timestamp;
(void)sample;

View file

@ -84,12 +84,12 @@ void PlaybackManager::terminate_playback()
}
}
Duration PlaybackManager::current_playback_time()
AK::Duration PlaybackManager::current_playback_time()
{
return m_playback_handler->current_time();
}
Duration PlaybackManager::duration()
AK::Duration PlaybackManager::duration()
{
auto duration_result = ({
auto demuxer_locker = Threading::MutexLocker(m_decoder_mutex);
@ -100,7 +100,7 @@ Duration PlaybackManager::duration()
// FIXME: We should determine the last sample that the demuxer knows is available and
// use that as the current duration. The duration may change if the demuxer doesn't
// know there is a fixed duration.
return Duration::zero();
return AK::Duration::zero();
}
return duration_result.release_value();
}
@ -161,12 +161,12 @@ void PlaybackManager::timer_callback()
TRY_OR_FATAL_ERROR(m_playback_handler->do_timed_state_update());
}
void PlaybackManager::seek_to_timestamp(Duration target_timestamp, SeekMode seek_mode)
void PlaybackManager::seek_to_timestamp(AK::Duration target_timestamp, SeekMode seek_mode)
{
TRY_OR_FATAL_ERROR(m_playback_handler->seek(target_timestamp, seek_mode));
}
DecoderErrorOr<Optional<Duration>> PlaybackManager::seek_demuxer_to_most_recent_keyframe(Duration timestamp, Optional<Duration> earliest_available_sample)
DecoderErrorOr<Optional<AK::Duration>> PlaybackManager::seek_demuxer_to_most_recent_keyframe(AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample)
{
auto seeked_timestamp = TRY(m_demuxer->seek_to_most_recent_keyframe(m_selected_video_track, timestamp, move(earliest_available_sample)));
if (seeked_timestamp.has_value())
@ -193,7 +193,7 @@ void PlaybackManager::set_state_update_timer(int delay_ms)
void PlaybackManager::restart_playback()
{
seek_to_timestamp(Duration::zero());
seek_to_timestamp(AK::Duration::zero());
}
void PlaybackManager::decode_and_queue_one_sample()
@ -313,12 +313,12 @@ void PlaybackManager::decode_and_queue_one_sample()
m_buffer_is_full.exchange(false);
}
Duration PlaybackManager::PlaybackStateHandler::current_time() const
AK::Duration PlaybackManager::PlaybackStateHandler::current_time() const
{
return m_manager.m_last_present_in_media_time;
}
ErrorOr<void> PlaybackManager::PlaybackStateHandler::seek(Duration target_timestamp, SeekMode seek_mode)
ErrorOr<void> PlaybackManager::PlaybackStateHandler::seek(AK::Duration target_timestamp, SeekMode seek_mode)
{
return replace_handler_and_delete_this<SeekingStateHandler>(is_playing(), target_timestamp, seek_mode);
}
@ -414,7 +414,7 @@ private:
return replace_handler_and_delete_this<BufferingStateHandler>(true);
}
Duration current_time() const override
AK::Duration current_time() const override
{
return manager().m_last_present_in_media_time + (MonotonicTime::now() - m_last_present_in_real_time);
}
@ -561,7 +561,7 @@ class PlaybackManager::BufferingStateHandler : public PlaybackManager::ResumingS
class PlaybackManager::SeekingStateHandler : public PlaybackManager::ResumingStateHandler {
public:
SeekingStateHandler(PlaybackManager& manager, bool playing, Duration target_timestamp, SeekMode seek_mode)
SeekingStateHandler(PlaybackManager& manager, bool playing, AK::Duration target_timestamp, SeekMode seek_mode)
: ResumingStateHandler(manager, playing)
, m_target_timestamp(target_timestamp)
, m_seek_mode(seek_mode)
@ -648,14 +648,14 @@ private:
StringView name() override { return "Seeking"sv; }
ErrorOr<void> seek(Duration target_timestamp, SeekMode seek_mode) override
ErrorOr<void> seek(AK::Duration target_timestamp, SeekMode seek_mode) override
{
m_target_timestamp = target_timestamp;
m_seek_mode = seek_mode;
return on_enter();
}
Duration current_time() const override
AK::Duration current_time() const override
{
return m_target_timestamp;
}
@ -669,7 +669,7 @@ private:
PlaybackState get_state() const override { return PlaybackState::Seeking; }
Duration m_target_timestamp { Duration::zero() };
AK::Duration m_target_timestamp { AK::Duration::zero() };
SeekMode m_seek_mode { SeekMode::Accurate };
};
@ -693,7 +693,7 @@ private:
{
// When Stopped, the decoder thread will be waiting for a signal to start its loop going again.
manager().m_decode_wait_condition.broadcast();
return replace_handler_and_delete_this<SeekingStateHandler>(true, Duration::zero(), SeekMode::Fast);
return replace_handler_and_delete_this<SeekingStateHandler>(true, AK::Duration::zero(), SeekMode::Fast);
}
bool is_playing() const override { return false; }
PlaybackState get_state() const override { return PlaybackState::Stopped; }
@ -725,7 +725,7 @@ DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> PlaybackManager::create(NonnullOw
},
"Media Decoder"sv));
playback_manager->m_playback_handler = make<SeekingStateHandler>(*playback_manager, false, Duration::zero(), SeekMode::Fast);
playback_manager->m_playback_handler = make<SeekingStateHandler>(*playback_manager, false, AK::Duration::zero(), SeekMode::Fast);
DECODER_TRY_ALLOC(playback_manager->m_playback_handler->on_enter());
playback_manager->m_decode_thread->start();

View file

@ -27,30 +27,30 @@ class FrameQueueItem {
public:
FrameQueueItem()
: m_data(Empty())
, m_timestamp(Duration::zero())
, m_timestamp(AK::Duration::zero())
{
}
static constexpr Duration no_timestamp = Duration::min();
static constexpr AK::Duration no_timestamp = AK::Duration::min();
enum class Type {
Frame,
Error,
};
static FrameQueueItem frame(RefPtr<Gfx::Bitmap> bitmap, Duration timestamp)
static FrameQueueItem frame(RefPtr<Gfx::Bitmap> bitmap, AK::Duration timestamp)
{
return FrameQueueItem(move(bitmap), timestamp);
}
static FrameQueueItem error_marker(DecoderError&& error, Duration timestamp)
static FrameQueueItem error_marker(DecoderError&& error, AK::Duration timestamp)
{
return FrameQueueItem(move(error), timestamp);
}
bool is_frame() const { return m_data.has<RefPtr<Gfx::Bitmap>>(); }
RefPtr<Gfx::Bitmap> bitmap() const { return m_data.get<RefPtr<Gfx::Bitmap>>(); }
Duration timestamp() const { return m_timestamp; }
AK::Duration timestamp() const { return m_timestamp; }
bool is_error() const { return m_data.has<DecoderError>(); }
DecoderError const& error() const { return m_data.get<DecoderError>(); }
@ -71,21 +71,21 @@ public:
}
private:
FrameQueueItem(RefPtr<Gfx::Bitmap> bitmap, Duration timestamp)
FrameQueueItem(RefPtr<Gfx::Bitmap> bitmap, AK::Duration timestamp)
: m_data(move(bitmap))
, m_timestamp(timestamp)
{
VERIFY(m_timestamp != no_timestamp);
}
FrameQueueItem(DecoderError&& error, Duration timestamp)
FrameQueueItem(DecoderError&& error, AK::Duration timestamp)
: m_data(move(error))
, m_timestamp(timestamp)
{
}
Variant<Empty, RefPtr<Gfx::Bitmap>, DecoderError> m_data { Empty() };
Duration m_timestamp { no_timestamp };
AK::Duration m_timestamp { no_timestamp };
};
static constexpr size_t frame_buffer_count = 4;
@ -123,7 +123,7 @@ public:
void pause_playback();
void restart_playback();
void terminate_playback();
void seek_to_timestamp(Duration, SeekMode = DEFAULT_SEEK_MODE);
void seek_to_timestamp(AK::Duration, SeekMode = DEFAULT_SEEK_MODE);
bool is_playing() const
{
return m_playback_handler->is_playing();
@ -135,8 +135,8 @@ public:
u64 number_of_skipped_frames() const { return m_skipped_frames; }
Duration current_playback_time();
Duration duration();
AK::Duration current_playback_time();
AK::Duration duration();
Function<void(RefPtr<Gfx::Bitmap>)> on_video_frame;
Function<void()> on_playback_state_change;
@ -159,7 +159,7 @@ private:
void timer_callback();
// This must be called with m_demuxer_mutex locked!
DecoderErrorOr<Optional<Duration>> seek_demuxer_to_most_recent_keyframe(Duration timestamp, Optional<Duration> earliest_available_sample = OptionalNone());
DecoderErrorOr<Optional<AK::Duration>> seek_demuxer_to_most_recent_keyframe(AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample = OptionalNone());
Optional<FrameQueueItem> dequeue_one_frame();
void set_state_update_timer(int delay_ms);
@ -173,7 +173,7 @@ private:
void dispatch_state_change();
void dispatch_fatal_error(Error);
Duration m_last_present_in_media_time = Duration::zero();
AK::Duration m_last_present_in_media_time = AK::Duration::zero();
NonnullOwnPtr<Demuxer> m_demuxer;
Threading::Mutex m_decoder_mutex;
@ -213,10 +213,10 @@ private:
virtual PlaybackState get_state() const = 0;
virtual ErrorOr<void> pause() { return {}; }
virtual ErrorOr<void> buffer() { return {}; }
virtual ErrorOr<void> seek(Duration target_timestamp, SeekMode);
virtual ErrorOr<void> seek(AK::Duration target_timestamp, SeekMode);
virtual ErrorOr<void> stop();
virtual Duration current_time() const;
virtual AK::Duration current_time() const;
virtual ErrorOr<void> do_timed_state_update() { return {}; }

View file

@ -17,19 +17,19 @@ class Sample final {
public:
using AuxiliaryData = Variant<VideoSampleData>;
Sample(Duration timestamp, ReadonlyBytes data, AuxiliaryData auxiliary_data)
Sample(AK::Duration timestamp, ReadonlyBytes data, AuxiliaryData auxiliary_data)
: m_timestamp(timestamp)
, m_data(data)
, m_auxiliary_data(auxiliary_data)
{
}
Duration timestamp() const { return m_timestamp; }
AK::Duration timestamp() const { return m_timestamp; }
ReadonlyBytes const& data() const { return m_data; }
AuxiliaryData const& auxiliary_data() const { return m_auxiliary_data; }
private:
Duration m_timestamp;
AK::Duration m_timestamp;
ReadonlyBytes m_data;
AuxiliaryData m_auxiliary_data;
};

View file

@ -22,7 +22,7 @@ enum class TrackType : u32 {
class Track {
struct VideoData {
Duration duration {};
AK::Duration duration {};
u64 pixel_width { 0 };
u64 pixel_height { 0 };
};

View file

@ -18,8 +18,8 @@ class VideoDecoder {
public:
virtual ~VideoDecoder() {};
virtual DecoderErrorOr<void> receive_sample(Duration timestamp, ReadonlyBytes sample) = 0;
DecoderErrorOr<void> receive_sample(Duration timestamp, ByteBuffer const& sample) { return receive_sample(timestamp, sample.span()); }
virtual DecoderErrorOr<void> receive_sample(AK::Duration timestamp, ReadonlyBytes sample) = 0;
DecoderErrorOr<void> receive_sample(AK::Duration timestamp, ByteBuffer const& sample) { return receive_sample(timestamp, sample.span()); }
virtual DecoderErrorOr<NonnullOwnPtr<VideoFrame>> get_decoded_frame() = 0;
virtual void flush() = 0;

View file

@ -13,7 +13,7 @@
namespace Media {
ErrorOr<NonnullOwnPtr<SubsampledYUVFrame>> SubsampledYUVFrame::try_create(
Duration timestamp,
AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp,
Subsampling subsampling)
@ -40,7 +40,7 @@ ErrorOr<NonnullOwnPtr<SubsampledYUVFrame>> SubsampledYUVFrame::try_create(
}
ErrorOr<NonnullOwnPtr<SubsampledYUVFrame>> SubsampledYUVFrame::try_create_from_data(
Duration timestamp,
AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp,
Subsampling subsampling,

View file

@ -31,7 +31,7 @@ public:
return bitmap;
}
inline Duration timestamp() const { return m_timestamp; }
inline AK::Duration timestamp() const { return m_timestamp; }
inline Gfx::Size<u32> size() const { return m_size; }
inline u32 width() const { return size().width(); }
@ -41,7 +41,7 @@ public:
inline CodingIndependentCodePoints& cicp() { return m_cicp; }
protected:
VideoFrame(Duration timestamp,
VideoFrame(AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp)
: m_timestamp(timestamp)
@ -51,7 +51,7 @@ protected:
{
}
Duration m_timestamp;
AK::Duration m_timestamp;
Gfx::Size<u32> m_size;
u8 m_bit_depth;
CodingIndependentCodePoints m_cicp;
@ -61,20 +61,20 @@ class SubsampledYUVFrame : public VideoFrame {
public:
static ErrorOr<NonnullOwnPtr<SubsampledYUVFrame>> try_create(
Duration timestamp,
AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp,
Subsampling subsampling);
static ErrorOr<NonnullOwnPtr<SubsampledYUVFrame>> try_create_from_data(
Duration timestamp,
AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp,
Subsampling subsampling,
ReadonlyBytes y_data, ReadonlyBytes u_data, ReadonlyBytes v_data);
SubsampledYUVFrame(
Duration timestamp,
AK::Duration timestamp,
Gfx::Size<u32> size,
u8 bit_depth, CodingIndependentCodePoints cicp,
Subsampling subsampling,