test: Retry HTTP file tests on temporary httpbin failure (#1203)

This commit is contained in:
Joey Parrish 2023-05-01 16:56:46 -07:00 committed by GitHub
parent 901013c34e
commit 2e349845c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 339 additions and 165 deletions

View File

@ -176,6 +176,7 @@ HttpFile::HttpFile(HttpMethod method,
download_cache_(absl::GetFlag(FLAGS_io_cache_size)), download_cache_(absl::GetFlag(FLAGS_io_cache_size)),
upload_cache_(absl::GetFlag(FLAGS_io_cache_size)), upload_cache_(absl::GetFlag(FLAGS_io_cache_size)),
curl_(curl_easy_init()), curl_(curl_easy_init()),
http_status_code_(0),
status_(Status::OK), status_(Status::OK),
user_agent_(absl::GetFlag(FLAGS_user_agent)), user_agent_(absl::GetFlag(FLAGS_user_agent)),
ca_file_(absl::GetFlag(FLAGS_ca_file)), ca_file_(absl::GetFlag(FLAGS_ca_file)),
@ -231,6 +232,10 @@ bool HttpFile::Open() {
return true; return true;
} }
int HttpFile::http_status_code() const {
return http_status_code_;
}
Status HttpFile::CloseWithStatus() { Status HttpFile::CloseWithStatus() {
VLOG(2) << "Closing " << url_; VLOG(2) << "Closing " << url_;
// Close the cache first so the thread will finish uploading. Otherwise it // Close the cache first so the thread will finish uploading. Otherwise it
@ -353,6 +358,7 @@ void HttpFile::ThreadMain() {
if (res == CURLE_HTTP_RETURNED_ERROR) { if (res == CURLE_HTTP_RETURNED_ERROR) {
long response_code = 0; long response_code = 0;
curl_easy_getinfo(curl_.get(), CURLINFO_RESPONSE_CODE, &response_code); curl_easy_getinfo(curl_.get(), CURLINFO_RESPONSE_CODE, &response_code);
http_status_code_ = static_cast<int>(response_code);
error_message += absl::StrFormat(", response code: %ld.", response_code); error_message += absl::StrFormat(", response code: %ld.", response_code);
} }

View File

@ -47,6 +47,7 @@ class HttpFile : public File {
HttpFile(const HttpFile&) = delete; HttpFile(const HttpFile&) = delete;
HttpFile& operator=(const HttpFile&) = delete; HttpFile& operator=(const HttpFile&) = delete;
int http_status_code() const;
Status CloseWithStatus(); Status CloseWithStatus();
/// @name File implementation overrides. /// @name File implementation overrides.
@ -82,6 +83,7 @@ class HttpFile : public File {
std::unique_ptr<CURL, CurlDelete> curl_; std::unique_ptr<CURL, CurlDelete> curl_;
// The headers need to remain alive for the duration of the request. // The headers need to remain alive for the duration of the request.
std::unique_ptr<curl_slist, CurlDelete> request_headers_; std::unique_ptr<curl_slist, CurlDelete> request_headers_;
int http_status_code_;
Status status_; Status status_;
std::string user_agent_; std::string user_agent_;
std::string ca_file_; std::string ca_file_;

View File

@ -8,7 +8,9 @@
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include <chrono>
#include <memory> #include <memory>
#include <thread>
#include <vector> #include <vector>
#include "absl/strings/str_split.h" #include "absl/strings/str_split.h"
@ -64,176 +66,249 @@ nlohmann::json HandleResponse(const FilePtr& file) {
return value; return value;
} }
// Tests using httpbin can sometimes be flaky. We get HTTP 502 errors when it
// is overloaded. This will retry a test with delays, up to a limit, if the
// HTTP status code is 502.
void RetryTest(std::function<HttpFile*()> setup,
std::function<void(FilePtr&)> pre_read,
std::function<void(FilePtr&, nlohmann::json)> post_read) {
nlohmann::json json;
FilePtr file;
for (int i = 0; i < 3; ++i) {
file.reset(setup());
ASSERT_TRUE(file->Open());
pre_read(file);
if (testing::Test::HasFailure()) return;
json = HandleResponse(file);
if (file->http_status_code() != 502) {
// Not a 502 error, so take this result.
break;
}
// Delay with exponential increase (1s, 2s, 4s), then loop try again.
int delay = 1 << i;
LOG(WARNING) << "httpbin failure (" << file->http_status_code() << "): "
<< "Delaying " << delay << " seconds and retrying.";
std::this_thread::sleep_for(std::chrono::seconds(delay));
}
// Out of retries? Check what we have.
post_read(file, json);
}
} // namespace } // namespace
TEST(HttpFileTest, BasicGet) { TEST(HttpFileTest, BasicGet) {
FilePtr file(new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything")); RetryTest(
ASSERT_TRUE(file); // setup
ASSERT_TRUE(file->Open()); []() -> HttpFile* {
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything");
auto json = HandleResponse(file); },
ASSERT_TRUE(json.is_object()); // pre_read
ASSERT_TRUE(file.release()->Close()); [](FilePtr&) -> void {},
ASSERT_JSON_STRING(json, "method", "GET"); // post_read
[](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
ASSERT_JSON_STRING(json, "method", "GET");
});
} }
TEST(HttpFileTest, CustomHeaders) { TEST(HttpFileTest, CustomHeaders) {
std::vector<std::string> headers{"Host: foo", "X-My-Header: Something"}; RetryTest(
FilePtr file(new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything", // setup
"", headers, 0)); []() -> HttpFile* {
ASSERT_TRUE(file); std::vector<std::string> headers{"Host: foo", "X-My-Header: Something"};
ASSERT_TRUE(file->Open()); return new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything",
"", headers, 0);
auto json = HandleResponse(file); },
ASSERT_TRUE(json.is_object()); // pre_read
ASSERT_TRUE(file.release()->Close()); [](FilePtr&) -> void {},
// post_read
ASSERT_JSON_STRING(json, "method", "GET"); [](FilePtr&, nlohmann::json json) -> void {
ASSERT_JSON_STRING(json, "headers.Host", "foo"); ASSERT_TRUE(json.is_object());
ASSERT_JSON_STRING(json, "headers.X-My-Header", "Something"); ASSERT_JSON_STRING(json, "method", "GET");
ASSERT_JSON_STRING(json, "headers.Host", "foo");
ASSERT_JSON_STRING(json, "headers.X-My-Header", "Something");
});
} }
TEST(HttpFileTest, BasicPost) { TEST(HttpFileTest, BasicPost) {
FilePtr file(new HttpFile(HttpMethod::kPost, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data = "abcd"; const std::string data = "abcd";
ASSERT_EQ(file->Write(data.data(), data.size()), RetryTest(
static_cast<int64_t>(data.size())); // setup
ASSERT_TRUE(file->Flush()); []() -> HttpFile* {
return new HttpFile(HttpMethod::kPost, "https://httpbin.org/anything");
},
// pre_read
[&data](FilePtr& file) -> void {
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file); ASSERT_JSON_STRING(json, "method", "POST");
ASSERT_TRUE(json.is_object()); ASSERT_JSON_STRING(json, "data", data);
ASSERT_TRUE(file.release()->Close()); ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "POST"); // Curl may choose to send chunked or not based on the data. We request
ASSERT_JSON_STRING(json, "data", data); // chunked encoding, but don't control if it is actually used. If we
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream"); // get chunked transfer, there is no Content-Length header reflected
// back to us.
// Curl may choose to send chunked or not based on the data. We request if (!GetJsonString(json, "headers.Content-Length").empty()) {
// chunked encoding, but don't control if it is actually used. If we get ASSERT_JSON_STRING(json, "headers.Content-Length",
// chunked transfer, there is no Content-Length header reflected back to us. std::to_string(data.size()));
if (!GetJsonString(json, "headers.Content-Length").empty()) { } else {
ASSERT_JSON_STRING(json, "headers.Content-Length", ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
std::to_string(data.size())); }
} else { });
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
} }
TEST(HttpFileTest, BasicPut) { TEST(HttpFileTest, BasicPut) {
FilePtr file(new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data = "abcd"; const std::string data = "abcd";
ASSERT_EQ(file->Write(data.data(), data.size()), RetryTest(
static_cast<int64_t>(data.size())); // setup
ASSERT_TRUE(file->Flush()); []() -> HttpFile* {
return new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything");
},
// pre_read
[&data](FilePtr& file) -> void {
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file); ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_TRUE(json.is_object()); ASSERT_JSON_STRING(json, "data", data);
ASSERT_TRUE(file.release()->Close()); ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "PUT"); // Curl may choose to send chunked or not based on the data. We request
ASSERT_JSON_STRING(json, "data", data); // chunked encoding, but don't control if it is actually used. If we
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream"); // get chunked transfer, there is no Content-Length header reflected
// back to us.
// Curl may choose to send chunked or not based on the data. We request if (!GetJsonString(json, "headers.Content-Length").empty()) {
// chunked encoding, but don't control if it is actually used. If we get ASSERT_JSON_STRING(json, "headers.Content-Length",
// chunked transfer, there is no Content-Length header reflected back to us. std::to_string(data.size()));
if (!GetJsonString(json, "headers.Content-Length").empty()) { } else {
ASSERT_JSON_STRING(json, "headers.Content-Length", ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
std::to_string(data.size())); }
} else { });
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
} }
TEST(HttpFileTest, MultipleWrites) { TEST(HttpFileTest, MultipleWrites) {
FilePtr file(new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data1 = "abcd"; const std::string data1 = "abcd";
const std::string data2 = "efgh"; const std::string data2 = "efgh";
const std::string data3 = "ijkl"; const std::string data3 = "ijkl";
const std::string data4 = "mnop"; const std::string data4 = "mnop";
ASSERT_EQ(file->Write(data1.data(), data1.size()), RetryTest(
static_cast<int64_t>(data1.size())); // setup
ASSERT_EQ(file->Write(data2.data(), data2.size()), []() -> HttpFile* {
static_cast<int64_t>(data2.size())); return new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything");
ASSERT_EQ(file->Write(data3.data(), data3.size()), },
static_cast<int64_t>(data3.size())); // pre_read
ASSERT_EQ(file->Write(data4.data(), data4.size()), [&data1, &data2, &data3, &data4](FilePtr& file) -> void {
static_cast<int64_t>(data4.size())); ASSERT_EQ(file->Write(data1.data(), data1.size()),
ASSERT_TRUE(file->Flush()); static_cast<int64_t>(data1.size()));
ASSERT_EQ(file->Write(data2.data(), data2.size()),
static_cast<int64_t>(data2.size()));
ASSERT_EQ(file->Write(data3.data(), data3.size()),
static_cast<int64_t>(data3.size()));
ASSERT_EQ(file->Write(data4.data(), data4.size()),
static_cast<int64_t>(data4.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data1, &data2, &data3, &data4](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file); ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_TRUE(json.is_object()); ASSERT_JSON_STRING(json, "data", data1 + data2 + data3 + data4);
ASSERT_TRUE(file.release()->Close()); ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "PUT"); // Curl may choose to send chunked or not based on the data. We request
ASSERT_JSON_STRING(json, "data", data1 + data2 + data3 + data4); // chunked encoding, but don't control if it is actually used. If we
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream"); // get chunked transfer, there is no Content-Length header reflected
// back to us.
// Curl may choose to send chunked or not based on the data. We request if (!GetJsonString(json, "headers.Content-Length").empty()) {
// chunked encoding, but don't control if it is actually used. If we get auto totalSize =
// chunked transfer, there is no Content-Length header reflected back to us. data1.size() + data2.size() + data3.size() + data4.size();
if (!GetJsonString(json, "headers.Content-Length").empty()) { ASSERT_JSON_STRING(json, "headers.Content-Length",
auto totalSize = data1.size() + data2.size() + data3.size() + data4.size(); std::to_string(totalSize));
ASSERT_JSON_STRING(json, "headers.Content-Length", } else {
std::to_string(totalSize)); ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
} else { }
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked"); });
}
} }
// TODO: Test chunked uploads explicitly. // TODO: Test chunked uploads explicitly.
TEST(HttpFileTest, Error404) { TEST(HttpFileTest, Error404) {
FilePtr file( RetryTest(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/status/404")); // setup
ASSERT_TRUE(file); []() -> HttpFile* {
ASSERT_TRUE(file->Open()); return new HttpFile(HttpMethod::kGet, "https://httpbin.org/status/404");
},
// The site returns an empty response. // pre_read
uint8_t buffer[1]; [](FilePtr&) -> void {},
ASSERT_EQ(file->Read(buffer, sizeof(buffer)), 0); // post_read
[](FilePtr& file, nlohmann::json) -> void {
auto status = file.release()->CloseWithStatus(); // The site returns an empty response, not JSON.
ASSERT_FALSE(status.ok()); auto status = file.release()->CloseWithStatus();
ASSERT_EQ(status.error_code(), error::HTTP_FAILURE); ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::HTTP_FAILURE);
});
} }
TEST(HttpFileTest, TimeoutTriggered) { TEST(HttpFileTest, TimeoutTriggered) {
FilePtr file( RetryTest(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/8", "", {}, 1)); // setup
ASSERT_TRUE(file); []() -> HttpFile* {
ASSERT_TRUE(file->Open()); return new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/8", "",
{}, 1);
// Request should timeout; error is reported in Close/CloseWithStatus. },
uint8_t buffer[1]; // pre_read
ASSERT_EQ(file->Read(buffer, sizeof(buffer)), 0); [](FilePtr&) -> void {},
// post_read
auto status = file.release()->CloseWithStatus(); [](FilePtr& file, nlohmann::json) -> void {
ASSERT_FALSE(status.ok()); // Request should timeout; error is reported in Close/CloseWithStatus.
ASSERT_EQ(status.error_code(), error::TIME_OUT); auto status = file.release()->CloseWithStatus();
ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::TIME_OUT);
});
} }
TEST(HttpFileTest, TimeoutNotTriggered) { TEST(HttpFileTest, TimeoutNotTriggered) {
FilePtr file( RetryTest(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/1", "", {}, 5)); // setup
ASSERT_TRUE(file); []() -> HttpFile* {
ASSERT_TRUE(file->Open()); return new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/1", "",
{}, 10);
auto json = HandleResponse(file); },
ASSERT_TRUE(json.is_object()); // pre_read
ASSERT_TRUE(file.release()->Close()); [](FilePtr&) -> void {},
// post_read
[](FilePtr& file, nlohmann::json json) -> void {
// The timeout was not triggered. We got back some JSON.
auto status = file.release()->CloseWithStatus();
ASSERT_TRUE(status.ok());
ASSERT_TRUE(json.is_object());
});
} }
} // namespace shaka } // namespace shaka

View File

@ -25,10 +25,14 @@ constexpr size_t kBufferSize = 64 * 1024;
HttpKeyFetcher::HttpKeyFetcher() : timeout_in_seconds_(0) {} HttpKeyFetcher::HttpKeyFetcher() : timeout_in_seconds_(0) {}
HttpKeyFetcher::HttpKeyFetcher(int32_t timeout_in_seconds) HttpKeyFetcher::HttpKeyFetcher(int32_t timeout_in_seconds)
: timeout_in_seconds_(timeout_in_seconds) {} : timeout_in_seconds_(timeout_in_seconds), http_status_code_(0) {}
HttpKeyFetcher::~HttpKeyFetcher() {} HttpKeyFetcher::~HttpKeyFetcher() {}
int HttpKeyFetcher::http_status_code() const {
return http_status_code_;
}
Status HttpKeyFetcher::FetchKeys(const std::string& url, Status HttpKeyFetcher::FetchKeys(const std::string& url,
const std::string& request, const std::string& request,
std::string* response) { std::string* response) {
@ -74,6 +78,9 @@ Status HttpKeyFetcher::FetchInternal(HttpMethod method,
break; break;
response->append(temp, ret); response->append(temp, ret);
} }
http_status_code_ = file->http_status_code();
return file.release()->CloseWithStatus(); return file.release()->CloseWithStatus();
} }

View File

@ -31,6 +31,8 @@ class HttpKeyFetcher : public KeyFetcher {
HttpKeyFetcher(int32_t timeout_in_seconds); HttpKeyFetcher(int32_t timeout_in_seconds);
~HttpKeyFetcher() override; ~HttpKeyFetcher() override;
int http_status_code() const;
/// @name KeyFetcher implementation overrides. /// @name KeyFetcher implementation overrides.
Status FetchKeys(const std::string& url, Status FetchKeys(const std::string& url,
const std::string& request, const std::string& request,
@ -57,6 +59,7 @@ class HttpKeyFetcher : public KeyFetcher {
const std::string& data, std::string* response); const std::string& data, std::string* response);
const int32_t timeout_in_seconds_; const int32_t timeout_in_seconds_;
int http_status_code_;
DISALLOW_COPY_AND_ASSIGN(HttpKeyFetcher); DISALLOW_COPY_AND_ASSIGN(HttpKeyFetcher);
}; };

View File

@ -7,71 +7,152 @@
#include "packager/media/base/http_key_fetcher.h" #include "packager/media/base/http_key_fetcher.h"
#include <algorithm> #include <algorithm>
#include <chrono>
#include <thread>
#include "glog/logging.h" #include "glog/logging.h"
#include "packager/status/status_test_util.h" #include "packager/status/status_test_util.h"
namespace shaka {
namespace media {
namespace { namespace {
const char kTestUrl[] = "https://httpbin.org/anything"; const char kTestUrl[] = "https://httpbin.org/anything";
const char kTestUrl404[] = "https://httpbin.org/status/404"; const char kTestUrl404[] = "https://httpbin.org/status/404";
const char kTestUrlWithPort[] = "https://httpbin.org:443/anything"; const char kTestUrlWithPort[] = "https://httpbin.org:443/anything";
const char kTestUrlDelayTwoSecs[] = "https://httpbin.org/delay/2"; const char kTestUrlDelayTwoSecs[] = "https://httpbin.org/delay/2";
} // namespace
namespace shaka { // Tests using httpbin can sometimes be flaky. We get HTTP 502 errors when it
namespace media { // is overloaded. This will retry a test with delays, up to a limit, if the
// HTTP status code is 502.
TEST(HttpFetcherTest, HttpGet) { void RetryTest(
HttpKeyFetcher fetcher; std::function<Status(HttpKeyFetcher&, std::string*)> make_request,
std::function<void(Status, std::string&)> check_response,
int32_t timeout_in_seconds = 0) {
std::string response; std::string response;
ASSERT_OK(fetcher.Get(kTestUrl, &response)); Status status;
EXPECT_NE(std::string::npos, response.find("\"method\": \"GET\""));
for (int i = 0; i < 3; ++i) {
HttpKeyFetcher fetcher(timeout_in_seconds);
response.clear();
status = make_request(fetcher, &response);
if (testing::Test::HasFailure()) return;
if (fetcher.http_status_code() != 502) {
// Not a 502 error, so take this result.
break;
}
// Delay with exponential increase (1s, 2s, 4s), then loop try again.
int delay = 1 << i;
LOG(WARNING) << "httpbin failure (" << fetcher.http_status_code() << "): "
<< "Delaying " << delay << " seconds and retrying.";
std::this_thread::sleep_for(std::chrono::seconds(delay));
}
// Out of retries? Check what we have.
check_response(status, response);
} }
TEST(HttpFetcherTest, HttpPost) { } // namespace
HttpKeyFetcher fetcher;
std::string response; TEST(HttpKeyFetcherTest, HttpGet) {
ASSERT_OK(fetcher.Post(kTestUrl, "", &response)); RetryTest(
EXPECT_NE(std::string::npos, response.find("\"method\": \"POST\"")); // make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.Get(kTestUrl, response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"method\": \"GET\""));
});
}
TEST(HttpKeyFetcherTest, HttpPost) {
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.Post(kTestUrl, "", response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"method\": \"POST\""));
});
} }
TEST(HttpKeyFetcherTest, HttpFetchKeys) { TEST(HttpKeyFetcherTest, HttpFetchKeys) {
HttpKeyFetcher fetcher; RetryTest(
std::string response; // make_request
ASSERT_OK(fetcher.FetchKeys(kTestUrl, "foo=62&type=mp4", &response)); [](HttpKeyFetcher& fetcher, std::string* response) -> Status {
EXPECT_NE(std::string::npos, response.find("\"foo=62&type=mp4\"")); return fetcher.FetchKeys(kTestUrl, "foo=62&type=mp4", response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"foo=62&type=mp4\""));
});
} }
TEST(HttpKeyFetcherTest, InvalidUrl) { TEST(HttpKeyFetcherTest, InvalidUrl) {
HttpKeyFetcher fetcher; RetryTest(
std::string response; // make_request
Status status = fetcher.FetchKeys(kTestUrl404, "", &response); [](HttpKeyFetcher& fetcher, std::string* response) -> Status {
EXPECT_EQ(error::HTTP_FAILURE, status.error_code()); return fetcher.FetchKeys(kTestUrl404, "", response);
EXPECT_NE(std::string::npos, status.error_message().find("404")); },
// check_response
[](Status status, std::string&) -> void {
EXPECT_EQ(error::HTTP_FAILURE, status.error_code());
EXPECT_NE(std::string::npos, status.error_message().find("404"));
});
} }
TEST(HttpKeyFetcherTest, UrlWithPort) { TEST(HttpKeyFetcherTest, UrlWithPort) {
HttpKeyFetcher fetcher; RetryTest(
std::string response; // make_request
ASSERT_OK(fetcher.FetchKeys(kTestUrlWithPort, "", &response)); [](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlWithPort, "", response);
},
// check_response
[](Status status, std::string&) -> void {
ASSERT_OK(status);
});
} }
TEST(HttpKeyFetcherTest, SmallTimeout) { TEST(HttpKeyFetcherTest, SmallTimeout) {
const int32_t kTimeoutInSeconds = 1; const int32_t kTimeoutInSeconds = 1;
HttpKeyFetcher fetcher(kTimeoutInSeconds);
std::string response; RetryTest(
Status status = fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", &response); // make_request
EXPECT_EQ(error::TIME_OUT, status.error_code()); [](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", response);
},
// check_response
[](Status status, std::string&) -> void {
EXPECT_EQ(error::TIME_OUT, status.error_code());
},
// timeout_in_seconds
kTimeoutInSeconds);
} }
TEST(HttpKeyFetcherTest, BigTimeout) { TEST(HttpKeyFetcherTest, BigTimeout) {
const int32_t kTimeoutInSeconds = 5; const int32_t kTimeoutInSeconds = 10;
HttpKeyFetcher fetcher(kTimeoutInSeconds);
std::string response; RetryTest(
Status status = fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", &response); // make_request
EXPECT_OK(status); [](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", response);
},
// check_response
[](Status status, std::string&) -> void {
ASSERT_OK(status);
},
// timeout_in_seconds
kTimeoutInSeconds);
} }
} // namespace media } // namespace media
} // namespace shaka } // namespace shaka