2017-09-18 15:43:21 +00:00
|
|
|
// Copyright 2017 Google Inc. All rights reserved.
|
|
|
|
//
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file or at
|
|
|
|
// https://developers.google.com/open-source/licenses/bsd
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
#include "packager/media/chunking/text_chunker.h"
|
2017-09-18 15:43:21 +00:00
|
|
|
|
|
|
|
namespace shaka {
|
|
|
|
namespace media {
|
|
|
|
namespace {
|
|
|
|
const size_t kStreamIndex = 0;
|
2018-02-05 19:16:00 +00:00
|
|
|
} // namespace
|
2017-09-18 15:43:21 +00:00
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
TextChunker::TextChunker(uint64_t segment_duration_ms)
|
2017-09-18 15:43:21 +00:00
|
|
|
: segment_duration_ms_(segment_duration_ms) {}
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
Status TextChunker::InitializeInternal() {
|
2017-09-18 15:43:21 +00:00
|
|
|
return Status::OK;
|
|
|
|
}
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
Status TextChunker::Process(std::unique_ptr<StreamData> stream_data) {
|
2017-09-18 15:43:21 +00:00
|
|
|
switch (stream_data->stream_data_type) {
|
|
|
|
case StreamDataType::kStreamInfo:
|
|
|
|
return DispatchStreamInfo(kStreamIndex,
|
|
|
|
std::move(stream_data->stream_info));
|
|
|
|
case StreamDataType::kTextSample:
|
|
|
|
return OnTextSample(stream_data->text_sample);
|
|
|
|
default:
|
|
|
|
return Status(error::INTERNAL_ERROR,
|
|
|
|
"Invalid stream data type for this handler");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
Status TextChunker::OnFlushRequest(size_t input_stream_index) {
|
2018-02-05 19:16:00 +00:00
|
|
|
// At this point we know that there is a single series of consecutive
|
|
|
|
// segments, all we need to do is run through all of them.
|
|
|
|
for (const auto& pair : segment_map_) {
|
|
|
|
Status status = DispatchSegmentWithSamples(pair.first, pair.second);
|
2018-02-05 17:55:58 +00:00
|
|
|
|
|
|
|
if (!status.ok()) {
|
|
|
|
return status;
|
|
|
|
}
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
2018-02-05 17:55:58 +00:00
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
segment_map_.clear();
|
|
|
|
|
2018-02-05 17:55:58 +00:00
|
|
|
return FlushAllDownstreams();
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
Status TextChunker::OnTextSample(std::shared_ptr<const TextSample> sample) {
|
2017-09-18 15:43:21 +00:00
|
|
|
const uint64_t start_segment = sample->start_time() / segment_duration_ms_;
|
|
|
|
|
|
|
|
// Find the last segment that overlaps the sample. Adjust the sample by one
|
|
|
|
// ms (smallest time unit) in case |EndTime| falls on the segment boundary.
|
|
|
|
DCHECK_GT(sample->duration(), 0u);
|
|
|
|
const uint64_t ending_segment =
|
|
|
|
(sample->EndTime() - 1) / segment_duration_ms_;
|
|
|
|
|
|
|
|
DCHECK_GE(ending_segment, start_segment);
|
|
|
|
|
|
|
|
// Samples must always be advancing. If a sample comes in out of order,
|
|
|
|
// skip the sample.
|
2018-02-05 17:55:58 +00:00
|
|
|
if (head_segment_ > start_segment) {
|
2017-09-18 15:43:21 +00:00
|
|
|
LOG(WARNING) << "New sample has arrived out of order. Skipping sample "
|
|
|
|
<< "as segment start is " << start_segment << " and segment "
|
2018-02-05 17:55:58 +00:00
|
|
|
<< "head is " << head_segment_ << ".";
|
2017-09-18 15:43:21 +00:00
|
|
|
return Status::OK;
|
|
|
|
}
|
|
|
|
|
2018-02-05 17:55:58 +00:00
|
|
|
// Add the sample to each segment it spans.
|
|
|
|
for (uint64_t segment = start_segment; segment <= ending_segment; segment++) {
|
|
|
|
segment_map_[segment].push_back(sample);
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
// Move forward segment-by-segment so that we output empty segments to fill
|
|
|
|
// any segments with no cues.
|
|
|
|
for (uint64_t segment = head_segment_; segment < start_segment; segment++) {
|
|
|
|
auto it = segment_map_.find(segment);
|
|
|
|
|
|
|
|
Status status;
|
|
|
|
if (it == segment_map_.end()) {
|
2018-03-23 22:28:30 +00:00
|
|
|
const SegmentSamples kNoSamples;
|
2018-02-05 19:16:00 +00:00
|
|
|
status.Update(DispatchSegmentWithSamples(segment, kNoSamples));
|
|
|
|
} else {
|
|
|
|
// We found a segment, output all the samples. Remove it from the map as
|
|
|
|
// we should never need to write to it again.
|
|
|
|
status.Update(DispatchSegmentWithSamples(segment, it->second));
|
|
|
|
segment_map_.erase(it);
|
2018-02-05 17:55:58 +00:00
|
|
|
}
|
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
// If we fail to output a single sample, just stop.
|
2018-01-31 19:10:13 +00:00
|
|
|
if (!status.ok()) {
|
|
|
|
return status;
|
|
|
|
}
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
// Jump ahead to the start of this segment as we should never have any samples
|
|
|
|
// start before |start_segment|.
|
|
|
|
head_segment_ = start_segment;
|
|
|
|
|
2018-01-31 19:10:13 +00:00
|
|
|
return Status::OK;
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
|
2018-03-23 22:28:30 +00:00
|
|
|
Status TextChunker::DispatchSegmentWithSamples(uint64_t segment,
|
|
|
|
const SegmentSamples& samples) {
|
2018-01-02 21:58:47 +00:00
|
|
|
Status status;
|
2018-02-05 19:16:00 +00:00
|
|
|
for (const auto& sample : samples) {
|
|
|
|
status.Update(DispatchTextSample(kStreamIndex, sample));
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
// Only send the segment info if all the samples were successful.
|
|
|
|
if (!status.ok()) {
|
|
|
|
return status;
|
2018-01-02 21:58:47 +00:00
|
|
|
}
|
|
|
|
|
2018-02-05 19:16:00 +00:00
|
|
|
std::shared_ptr<SegmentInfo> info = std::make_shared<SegmentInfo>();
|
|
|
|
info->start_timestamp = segment * segment_duration_ms_;
|
|
|
|
info->duration = segment_duration_ms_;
|
|
|
|
|
|
|
|
return DispatchSegmentInfo(kStreamIndex, std::move(info));
|
2017-09-18 15:43:21 +00:00
|
|
|
}
|
|
|
|
} // namespace media
|
|
|
|
} // namespace shaka
|