7 #include "packager/file/io_cache.h"
13 #include "packager/base/logging.h"
18 using base::AutoUnlock;
20 IoCache::IoCache(uint64_t cache_size)
21 : cache_size_(cache_size),
22 read_event_(base::WaitableEvent::ResetPolicy::AUTOMATIC,
23 base::WaitableEvent::InitialState::NOT_SIGNALED),
24 write_event_(base::WaitableEvent::ResetPolicy::AUTOMATIC,
25 base::WaitableEvent::InitialState::NOT_SIGNALED),
28 circular_buffer_(cache_size + 1),
29 end_ptr_(&circular_buffer_[0] + cache_size + 1),
30 r_ptr_(circular_buffer_.data()),
31 w_ptr_(circular_buffer_.data()),
42 while (!closed_ && (BytesCachedInternal() == 0)) {
43 AutoUnlock unlock(lock_);
47 size = std::min(size, BytesCachedInternal());
48 uint64_t first_chunk_size(
49 std::min(size,
static_cast<uint64_t
>(end_ptr_ - r_ptr_)));
50 memcpy(buffer, r_ptr_, first_chunk_size);
51 r_ptr_ += first_chunk_size;
52 DCHECK_GE(end_ptr_, r_ptr_);
53 if (r_ptr_ == end_ptr_)
54 r_ptr_ = &circular_buffer_[0];
55 uint64_t second_chunk_size(size - first_chunk_size);
56 if (second_chunk_size) {
57 memcpy(
static_cast<uint8_t*
>(buffer) + first_chunk_size, r_ptr_,
59 r_ptr_ += second_chunk_size;
60 DCHECK_GT(end_ptr_, r_ptr_);
69 const uint8_t* r_ptr(
static_cast<const uint8_t*
>(buffer));
70 uint64_t bytes_left(size);
73 while (!closed_ && (BytesFreeInternal() == 0)) {
74 AutoUnlock unlock(lock_);
75 VLOG(1) <<
"Circular buffer is full, which can happen if data arrives "
76 "faster than being consumed by packager. Ignore if it is not "
77 "live packaging. Otherwise, try increasing --io_cache_size.";
83 uint64_t write_size(std::min(bytes_left, BytesFreeInternal()));
84 uint64_t first_chunk_size(
85 std::min(write_size,
static_cast<uint64_t
>(end_ptr_ - w_ptr_)));
86 memcpy(w_ptr_, r_ptr, first_chunk_size);
87 w_ptr_ += first_chunk_size;
88 DCHECK_GE(end_ptr_, w_ptr_);
89 if (w_ptr_ == end_ptr_)
90 w_ptr_ = &circular_buffer_[0];
91 r_ptr += first_chunk_size;
92 uint64_t second_chunk_size(write_size - first_chunk_size);
93 if (second_chunk_size) {
94 memcpy(w_ptr_, r_ptr, second_chunk_size);
95 w_ptr_ += second_chunk_size;
96 DCHECK_GT(end_ptr_, w_ptr_);
97 r_ptr += second_chunk_size;
99 bytes_left -= write_size;
100 write_event_.Signal();
106 AutoLock lock(lock_);
107 r_ptr_ = w_ptr_ = circular_buffer_.data();
109 read_event_.Signal();
113 AutoLock lock(lock_);
115 read_event_.Signal();
116 write_event_.Signal();
120 AutoLock lock(lock_);
122 r_ptr_ = w_ptr_ = circular_buffer_.data();
125 write_event_.Reset();
129 AutoLock lock(lock_);
130 return BytesCachedInternal();
134 AutoLock lock(lock_);
135 return BytesFreeInternal();
138 uint64_t IoCache::BytesCachedInternal() {
139 return (r_ptr_ <= w_ptr_)
141 : (end_ptr_ - r_ptr_) + (w_ptr_ - circular_buffer_.data());
144 uint64_t IoCache::BytesFreeInternal() {
145 return cache_size_ - BytesCachedInternal();
149 AutoLock lock(lock_);
150 while (!closed_ && BytesCachedInternal()) {
151 AutoUnlock unlock(lock_);