Add a runtime flag to use dts in timeline for mp4
This flag is needed to workaround the Chromium bug https://crbug.com/398130, which uses decoding timestamp in buffered range. Closes #113 Change-Id: Ib8f18be7165dd968bdc36c18ce29f694235c0c26
This commit is contained in:
parent
a80995538d
commit
fae7874c4c
|
@ -25,6 +25,12 @@
|
|||
#include "packager/media/file/file.h"
|
||||
#include "packager/mpd/base/mpd_builder.h"
|
||||
|
||||
DEFINE_bool(mp4_use_decoding_timestamp_in_timeline,
|
||||
false,
|
||||
"If set, decoding timestamp instead of presentation timestamp will "
|
||||
"be used when generating media timeline, e.g. timestamps in sidx "
|
||||
"and mpd. This is to workaround a Chromium bug that decoding "
|
||||
"timestamp is used in buffered range, https://crbug.com/398130.");
|
||||
DEFINE_bool(dump_stream_info, false, "Dump demuxed stream info.");
|
||||
|
||||
namespace shaka {
|
||||
|
@ -148,6 +154,16 @@ bool GetMuxerOptions(MuxerOptions* muxer_options) {
|
|||
muxer_options->segment_sap_aligned = FLAGS_segment_sap_aligned;
|
||||
muxer_options->fragment_sap_aligned = FLAGS_fragment_sap_aligned;
|
||||
muxer_options->num_subsegments_per_sidx = FLAGS_num_subsegments_per_sidx;
|
||||
|
||||
if (FLAGS_mp4_use_decoding_timestamp_in_timeline) {
|
||||
LOG(WARNING) << "Flag --mp4_use_decoding_timestamp_in_timeline is set. "
|
||||
"Note that it is a temporary hack to workaround Chromium "
|
||||
"bug https://crbug.com/398130. The flag may be removed "
|
||||
"when the Chromium bug is fixed.";
|
||||
}
|
||||
muxer_options->mp4_use_decoding_timestamp_in_timeline =
|
||||
FLAGS_mp4_use_decoding_timestamp_in_timeline;
|
||||
|
||||
muxer_options->temp_dir = FLAGS_temp_dir;
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ MuxerOptions::MuxerOptions()
|
|||
segment_sap_aligned(false),
|
||||
fragment_sap_aligned(false),
|
||||
num_subsegments_per_sidx(0),
|
||||
mp4_use_decoding_timestamp_in_timeline(false),
|
||||
bandwidth(0) {}
|
||||
MuxerOptions::~MuxerOptions() {}
|
||||
|
||||
|
|
|
@ -49,6 +49,13 @@ struct MuxerOptions {
|
|||
/// segment_duration/N/fragment_duration fragments per subsegment.
|
||||
int num_subsegments_per_sidx;
|
||||
|
||||
/// For ISO BMFF only.
|
||||
/// Set the flag use_decoding_timestamp_in_timeline, which if set to true, use
|
||||
/// decoding timestamp instead of presentation timestamp in media timeline,
|
||||
/// which is needed to workaround a Chromium bug that decoding timestamp is
|
||||
/// used in buffered range, https://crbug.com/398130.
|
||||
bool mp4_use_decoding_timestamp_in_timeline;
|
||||
|
||||
/// Output file name. If segment_template is not specified, the Muxer
|
||||
/// generates this single output file with all segments concatenated;
|
||||
/// Otherwise, it specifies the init segment name.
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
#include "packager/media/file/file.h"
|
||||
|
||||
#include <gflags/gflags.h>
|
||||
#include <algorithm>
|
||||
|
||||
#include <gflags/gflags.h>
|
||||
#include "packager/base/logging.h"
|
||||
#include "packager/base/memory/scoped_ptr.h"
|
||||
#include "packager/media/file/local_file.h"
|
||||
|
|
|
@ -21,7 +21,8 @@ namespace {
|
|||
const int64_t kInvalidTime = std::numeric_limits<int64_t>::max();
|
||||
|
||||
uint64_t GetSeekPreroll(const StreamInfo& stream_info) {
|
||||
if (stream_info.stream_type() != kStreamAudio) return 0;
|
||||
if (stream_info.stream_type() != kStreamAudio)
|
||||
return 0;
|
||||
const AudioStreamInfo& audio_stream_info =
|
||||
static_cast<const AudioStreamInfo&>(stream_info);
|
||||
return audio_stream_info.seek_preroll_ns();
|
||||
|
@ -29,7 +30,8 @@ uint64_t GetSeekPreroll(const StreamInfo& stream_info) {
|
|||
} // namespace
|
||||
|
||||
Fragmenter::Fragmenter(scoped_refptr<StreamInfo> info, TrackFragment* traf)
|
||||
: traf_(traf),
|
||||
: use_decoding_timestamp_in_timeline_(false),
|
||||
traf_(traf),
|
||||
seek_preroll_(GetSeekPreroll(*info)),
|
||||
fragment_initialized_(false),
|
||||
fragment_finalized_(false),
|
||||
|
@ -64,15 +66,17 @@ Status Fragmenter::AddSample(scoped_refptr<MediaSample> sample) {
|
|||
data_->AppendArray(sample->data(), sample->data_size());
|
||||
fragment_duration_ += sample->duration();
|
||||
|
||||
int64_t pts = sample->pts();
|
||||
const int64_t pts = sample->pts();
|
||||
const int64_t dts = sample->dts();
|
||||
|
||||
// Set |earliest_presentation_time_| to |pts| if |pts| is smaller or if it is
|
||||
// not yet initialized (kInvalidTime > pts is always true).
|
||||
if (earliest_presentation_time_ > pts)
|
||||
earliest_presentation_time_ = pts;
|
||||
const int64_t timestamp = use_decoding_timestamp_in_timeline_ ? dts : pts;
|
||||
// Set |earliest_presentation_time_| to |timestamp| if |timestamp| is smaller
|
||||
// or if it is not yet initialized (kInvalidTime > timestamp is always true).
|
||||
if (earliest_presentation_time_ > timestamp)
|
||||
earliest_presentation_time_ = timestamp;
|
||||
|
||||
traf_->runs[0].sample_composition_time_offsets.push_back(pts - sample->dts());
|
||||
if (pts != sample->dts())
|
||||
traf_->runs[0].sample_composition_time_offsets.push_back(pts - dts);
|
||||
if (pts != dts)
|
||||
traf_->runs[0].flags |= TrackFragmentRun::kSampleCompTimeOffsetsPresentMask;
|
||||
|
||||
if (sample->is_key_frame()) {
|
||||
|
@ -141,7 +145,8 @@ void Fragmenter::FinalizeFragment() {
|
|||
sample_to_group_entry.group_description_index =
|
||||
SampleToGroupEntry::kTrackGroupDescriptionIndexBase + 1;
|
||||
}
|
||||
for (const auto& sample_group_description : traf_->sample_group_descriptions) {
|
||||
for (const auto& sample_group_description :
|
||||
traf_->sample_group_descriptions) {
|
||||
traf_->sample_to_groups.resize(traf_->sample_to_groups.size() + 1);
|
||||
SampleToGroup& sample_to_group = traf_->sample_to_groups.back();
|
||||
sample_to_group.grouping_type = sample_group_description.grouping_type;
|
||||
|
|
|
@ -62,6 +62,15 @@ class Fragmenter {
|
|||
bool fragment_finalized() const { return fragment_finalized_; }
|
||||
BufferWriter* data() { return data_.get(); }
|
||||
|
||||
/// Set the flag use_decoding_timestamp_in_timeline, which if set to true, use
|
||||
/// decoding timestamp instead of presentation timestamp in media timeline,
|
||||
/// which is needed to workaround a Chromium bug that decoding timestamp is
|
||||
/// used in buffered range, https://crbug.com/398130.
|
||||
void set_use_decoding_timestamp_in_timeline(
|
||||
bool use_decoding_timestamp_in_timeline) {
|
||||
use_decoding_timestamp_in_timeline_ = use_decoding_timestamp_in_timeline;
|
||||
}
|
||||
|
||||
protected:
|
||||
TrackFragment* traf() { return traf_; }
|
||||
|
||||
|
@ -75,6 +84,7 @@ class Fragmenter {
|
|||
// Check if the current fragment starts with SAP.
|
||||
bool StartsWithSAP();
|
||||
|
||||
bool use_decoding_timestamp_in_timeline_;
|
||||
TrackFragment* traf_;
|
||||
uint64_t seek_preroll_;
|
||||
bool fragment_initialized_;
|
||||
|
|
|
@ -264,6 +264,11 @@ Status Segmenter::Initialize(const std::vector<MediaStream*>& streams,
|
|||
muxer_listener_);
|
||||
}
|
||||
|
||||
if (options_.mp4_use_decoding_timestamp_in_timeline) {
|
||||
for (uint32_t i = 0; i < streams.size(); ++i)
|
||||
fragmenters_[i]->set_use_decoding_timestamp_in_timeline(true);
|
||||
}
|
||||
|
||||
// Choose the first stream if there is no VIDEO.
|
||||
if (sidx_->reference_id == 0)
|
||||
sidx_->reference_id = 1;
|
||||
|
|
Loading…
Reference in New Issue