test: Retry HTTP file tests on temporary httpbin failure (#1203)

This commit is contained in:
Joey Parrish 2023-05-01 16:56:46 -07:00 committed by GitHub
parent 901013c34e
commit 2e349845c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 339 additions and 165 deletions

View File

@ -176,6 +176,7 @@ HttpFile::HttpFile(HttpMethod method,
download_cache_(absl::GetFlag(FLAGS_io_cache_size)),
upload_cache_(absl::GetFlag(FLAGS_io_cache_size)),
curl_(curl_easy_init()),
http_status_code_(0),
status_(Status::OK),
user_agent_(absl::GetFlag(FLAGS_user_agent)),
ca_file_(absl::GetFlag(FLAGS_ca_file)),
@ -231,6 +232,10 @@ bool HttpFile::Open() {
return true;
}
int HttpFile::http_status_code() const {
return http_status_code_;
}
Status HttpFile::CloseWithStatus() {
VLOG(2) << "Closing " << url_;
// Close the cache first so the thread will finish uploading. Otherwise it
@ -353,6 +358,7 @@ void HttpFile::ThreadMain() {
if (res == CURLE_HTTP_RETURNED_ERROR) {
long response_code = 0;
curl_easy_getinfo(curl_.get(), CURLINFO_RESPONSE_CODE, &response_code);
http_status_code_ = static_cast<int>(response_code);
error_message += absl::StrFormat(", response code: %ld.", response_code);
}

View File

@ -47,6 +47,7 @@ class HttpFile : public File {
HttpFile(const HttpFile&) = delete;
HttpFile& operator=(const HttpFile&) = delete;
int http_status_code() const;
Status CloseWithStatus();
/// @name File implementation overrides.
@ -82,6 +83,7 @@ class HttpFile : public File {
std::unique_ptr<CURL, CurlDelete> curl_;
// The headers need to remain alive for the duration of the request.
std::unique_ptr<curl_slist, CurlDelete> request_headers_;
int http_status_code_;
Status status_;
std::string user_agent_;
std::string ca_file_;

View File

@ -8,7 +8,9 @@
#include <gtest/gtest.h>
#include <chrono>
#include <memory>
#include <thread>
#include <vector>
#include "absl/strings/str_split.h"
@ -64,176 +66,249 @@ nlohmann::json HandleResponse(const FilePtr& file) {
return value;
}
// Tests using httpbin can sometimes be flaky. We get HTTP 502 errors when it
// is overloaded. This will retry a test with delays, up to a limit, if the
// HTTP status code is 502.
void RetryTest(std::function<HttpFile*()> setup,
std::function<void(FilePtr&)> pre_read,
std::function<void(FilePtr&, nlohmann::json)> post_read) {
nlohmann::json json;
FilePtr file;
for (int i = 0; i < 3; ++i) {
file.reset(setup());
ASSERT_TRUE(file->Open());
pre_read(file);
if (testing::Test::HasFailure()) return;
json = HandleResponse(file);
if (file->http_status_code() != 502) {
// Not a 502 error, so take this result.
break;
}
// Delay with exponential increase (1s, 2s, 4s), then loop try again.
int delay = 1 << i;
LOG(WARNING) << "httpbin failure (" << file->http_status_code() << "): "
<< "Delaying " << delay << " seconds and retrying.";
std::this_thread::sleep_for(std::chrono::seconds(delay));
}
// Out of retries? Check what we have.
post_read(file, json);
}
} // namespace
TEST(HttpFileTest, BasicGet) {
FilePtr file(new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
ASSERT_JSON_STRING(json, "method", "GET");
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything");
},
// pre_read
[](FilePtr&) -> void {},
// post_read
[](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
ASSERT_JSON_STRING(json, "method", "GET");
});
}
TEST(HttpFileTest, CustomHeaders) {
std::vector<std::string> headers{"Host: foo", "X-My-Header: Something"};
FilePtr file(new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything",
"", headers, 0));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
ASSERT_JSON_STRING(json, "method", "GET");
ASSERT_JSON_STRING(json, "headers.Host", "foo");
ASSERT_JSON_STRING(json, "headers.X-My-Header", "Something");
RetryTest(
// setup
[]() -> HttpFile* {
std::vector<std::string> headers{"Host: foo", "X-My-Header: Something"};
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/anything",
"", headers, 0);
},
// pre_read
[](FilePtr&) -> void {},
// post_read
[](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
ASSERT_JSON_STRING(json, "method", "GET");
ASSERT_JSON_STRING(json, "headers.Host", "foo");
ASSERT_JSON_STRING(json, "headers.X-My-Header", "Something");
});
}
TEST(HttpFileTest, BasicPost) {
FilePtr file(new HttpFile(HttpMethod::kPost, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data = "abcd";
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kPost, "https://httpbin.org/anything");
},
// pre_read
[&data](FilePtr& file) -> void {
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
ASSERT_JSON_STRING(json, "method", "POST");
ASSERT_JSON_STRING(json, "data", data);
ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "POST");
ASSERT_JSON_STRING(json, "data", data);
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream");
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we get
// chunked transfer, there is no Content-Length header reflected back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(data.size()));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we
// get chunked transfer, there is no Content-Length header reflected
// back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(data.size()));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
});
}
TEST(HttpFileTest, BasicPut) {
FilePtr file(new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data = "abcd";
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything");
},
// pre_read
[&data](FilePtr& file) -> void {
ASSERT_EQ(file->Write(data.data(), data.size()),
static_cast<int64_t>(data.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_JSON_STRING(json, "data", data);
ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_JSON_STRING(json, "data", data);
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream");
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we get
// chunked transfer, there is no Content-Length header reflected back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(data.size()));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we
// get chunked transfer, there is no Content-Length header reflected
// back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(data.size()));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
});
}
TEST(HttpFileTest, MultipleWrites) {
FilePtr file(new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
const std::string data1 = "abcd";
const std::string data2 = "efgh";
const std::string data3 = "ijkl";
const std::string data4 = "mnop";
ASSERT_EQ(file->Write(data1.data(), data1.size()),
static_cast<int64_t>(data1.size()));
ASSERT_EQ(file->Write(data2.data(), data2.size()),
static_cast<int64_t>(data2.size()));
ASSERT_EQ(file->Write(data3.data(), data3.size()),
static_cast<int64_t>(data3.size()));
ASSERT_EQ(file->Write(data4.data(), data4.size()),
static_cast<int64_t>(data4.size()));
ASSERT_TRUE(file->Flush());
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kPut, "https://httpbin.org/anything");
},
// pre_read
[&data1, &data2, &data3, &data4](FilePtr& file) -> void {
ASSERT_EQ(file->Write(data1.data(), data1.size()),
static_cast<int64_t>(data1.size()));
ASSERT_EQ(file->Write(data2.data(), data2.size()),
static_cast<int64_t>(data2.size()));
ASSERT_EQ(file->Write(data3.data(), data3.size()),
static_cast<int64_t>(data3.size()));
ASSERT_EQ(file->Write(data4.data(), data4.size()),
static_cast<int64_t>(data4.size()));
ASSERT_TRUE(file->Flush());
},
// post_read
[&data1, &data2, &data3, &data4](FilePtr&, nlohmann::json json) -> void {
ASSERT_TRUE(json.is_object());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_JSON_STRING(json, "data", data1 + data2 + data3 + data4);
ASSERT_JSON_STRING(json, "headers.Content-Type",
"application/octet-stream");
ASSERT_JSON_STRING(json, "method", "PUT");
ASSERT_JSON_STRING(json, "data", data1 + data2 + data3 + data4);
ASSERT_JSON_STRING(json, "headers.Content-Type", "application/octet-stream");
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we get
// chunked transfer, there is no Content-Length header reflected back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
auto totalSize = data1.size() + data2.size() + data3.size() + data4.size();
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(totalSize));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
// Curl may choose to send chunked or not based on the data. We request
// chunked encoding, but don't control if it is actually used. If we
// get chunked transfer, there is no Content-Length header reflected
// back to us.
if (!GetJsonString(json, "headers.Content-Length").empty()) {
auto totalSize =
data1.size() + data2.size() + data3.size() + data4.size();
ASSERT_JSON_STRING(json, "headers.Content-Length",
std::to_string(totalSize));
} else {
ASSERT_JSON_STRING(json, "headers.Transfer-Encoding", "chunked");
}
});
}
// TODO: Test chunked uploads explicitly.
TEST(HttpFileTest, Error404) {
FilePtr file(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/status/404"));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
// The site returns an empty response.
uint8_t buffer[1];
ASSERT_EQ(file->Read(buffer, sizeof(buffer)), 0);
auto status = file.release()->CloseWithStatus();
ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::HTTP_FAILURE);
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/status/404");
},
// pre_read
[](FilePtr&) -> void {},
// post_read
[](FilePtr& file, nlohmann::json) -> void {
// The site returns an empty response, not JSON.
auto status = file.release()->CloseWithStatus();
ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::HTTP_FAILURE);
});
}
TEST(HttpFileTest, TimeoutTriggered) {
FilePtr file(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/8", "", {}, 1));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
// Request should timeout; error is reported in Close/CloseWithStatus.
uint8_t buffer[1];
ASSERT_EQ(file->Read(buffer, sizeof(buffer)), 0);
auto status = file.release()->CloseWithStatus();
ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::TIME_OUT);
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/8", "",
{}, 1);
},
// pre_read
[](FilePtr&) -> void {},
// post_read
[](FilePtr& file, nlohmann::json) -> void {
// Request should timeout; error is reported in Close/CloseWithStatus.
auto status = file.release()->CloseWithStatus();
ASSERT_FALSE(status.ok());
ASSERT_EQ(status.error_code(), error::TIME_OUT);
});
}
TEST(HttpFileTest, TimeoutNotTriggered) {
FilePtr file(
new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/1", "", {}, 5));
ASSERT_TRUE(file);
ASSERT_TRUE(file->Open());
auto json = HandleResponse(file);
ASSERT_TRUE(json.is_object());
ASSERT_TRUE(file.release()->Close());
RetryTest(
// setup
[]() -> HttpFile* {
return new HttpFile(HttpMethod::kGet, "https://httpbin.org/delay/1", "",
{}, 10);
},
// pre_read
[](FilePtr&) -> void {},
// post_read
[](FilePtr& file, nlohmann::json json) -> void {
// The timeout was not triggered. We got back some JSON.
auto status = file.release()->CloseWithStatus();
ASSERT_TRUE(status.ok());
ASSERT_TRUE(json.is_object());
});
}
} // namespace shaka

View File

@ -25,10 +25,14 @@ constexpr size_t kBufferSize = 64 * 1024;
HttpKeyFetcher::HttpKeyFetcher() : timeout_in_seconds_(0) {}
HttpKeyFetcher::HttpKeyFetcher(int32_t timeout_in_seconds)
: timeout_in_seconds_(timeout_in_seconds) {}
: timeout_in_seconds_(timeout_in_seconds), http_status_code_(0) {}
HttpKeyFetcher::~HttpKeyFetcher() {}
int HttpKeyFetcher::http_status_code() const {
return http_status_code_;
}
Status HttpKeyFetcher::FetchKeys(const std::string& url,
const std::string& request,
std::string* response) {
@ -74,6 +78,9 @@ Status HttpKeyFetcher::FetchInternal(HttpMethod method,
break;
response->append(temp, ret);
}
http_status_code_ = file->http_status_code();
return file.release()->CloseWithStatus();
}

View File

@ -31,6 +31,8 @@ class HttpKeyFetcher : public KeyFetcher {
HttpKeyFetcher(int32_t timeout_in_seconds);
~HttpKeyFetcher() override;
int http_status_code() const;
/// @name KeyFetcher implementation overrides.
Status FetchKeys(const std::string& url,
const std::string& request,
@ -57,6 +59,7 @@ class HttpKeyFetcher : public KeyFetcher {
const std::string& data, std::string* response);
const int32_t timeout_in_seconds_;
int http_status_code_;
DISALLOW_COPY_AND_ASSIGN(HttpKeyFetcher);
};

View File

@ -7,71 +7,152 @@
#include "packager/media/base/http_key_fetcher.h"
#include <algorithm>
#include <chrono>
#include <thread>
#include "glog/logging.h"
#include "packager/status/status_test_util.h"
namespace shaka {
namespace media {
namespace {
const char kTestUrl[] = "https://httpbin.org/anything";
const char kTestUrl404[] = "https://httpbin.org/status/404";
const char kTestUrlWithPort[] = "https://httpbin.org:443/anything";
const char kTestUrlDelayTwoSecs[] = "https://httpbin.org/delay/2";
} // namespace
namespace shaka {
namespace media {
TEST(HttpFetcherTest, HttpGet) {
HttpKeyFetcher fetcher;
// Tests using httpbin can sometimes be flaky. We get HTTP 502 errors when it
// is overloaded. This will retry a test with delays, up to a limit, if the
// HTTP status code is 502.
void RetryTest(
std::function<Status(HttpKeyFetcher&, std::string*)> make_request,
std::function<void(Status, std::string&)> check_response,
int32_t timeout_in_seconds = 0) {
std::string response;
ASSERT_OK(fetcher.Get(kTestUrl, &response));
EXPECT_NE(std::string::npos, response.find("\"method\": \"GET\""));
Status status;
for (int i = 0; i < 3; ++i) {
HttpKeyFetcher fetcher(timeout_in_seconds);
response.clear();
status = make_request(fetcher, &response);
if (testing::Test::HasFailure()) return;
if (fetcher.http_status_code() != 502) {
// Not a 502 error, so take this result.
break;
}
// Delay with exponential increase (1s, 2s, 4s), then loop try again.
int delay = 1 << i;
LOG(WARNING) << "httpbin failure (" << fetcher.http_status_code() << "): "
<< "Delaying " << delay << " seconds and retrying.";
std::this_thread::sleep_for(std::chrono::seconds(delay));
}
// Out of retries? Check what we have.
check_response(status, response);
}
TEST(HttpFetcherTest, HttpPost) {
HttpKeyFetcher fetcher;
std::string response;
ASSERT_OK(fetcher.Post(kTestUrl, "", &response));
EXPECT_NE(std::string::npos, response.find("\"method\": \"POST\""));
} // namespace
TEST(HttpKeyFetcherTest, HttpGet) {
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.Get(kTestUrl, response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"method\": \"GET\""));
});
}
TEST(HttpKeyFetcherTest, HttpPost) {
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.Post(kTestUrl, "", response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"method\": \"POST\""));
});
}
TEST(HttpKeyFetcherTest, HttpFetchKeys) {
HttpKeyFetcher fetcher;
std::string response;
ASSERT_OK(fetcher.FetchKeys(kTestUrl, "foo=62&type=mp4", &response));
EXPECT_NE(std::string::npos, response.find("\"foo=62&type=mp4\""));
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrl, "foo=62&type=mp4", response);
},
// check_response
[](Status status, std::string& response) -> void {
ASSERT_OK(status);
EXPECT_NE(std::string::npos, response.find("\"foo=62&type=mp4\""));
});
}
TEST(HttpKeyFetcherTest, InvalidUrl) {
HttpKeyFetcher fetcher;
std::string response;
Status status = fetcher.FetchKeys(kTestUrl404, "", &response);
EXPECT_EQ(error::HTTP_FAILURE, status.error_code());
EXPECT_NE(std::string::npos, status.error_message().find("404"));
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrl404, "", response);
},
// check_response
[](Status status, std::string&) -> void {
EXPECT_EQ(error::HTTP_FAILURE, status.error_code());
EXPECT_NE(std::string::npos, status.error_message().find("404"));
});
}
TEST(HttpKeyFetcherTest, UrlWithPort) {
HttpKeyFetcher fetcher;
std::string response;
ASSERT_OK(fetcher.FetchKeys(kTestUrlWithPort, "", &response));
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlWithPort, "", response);
},
// check_response
[](Status status, std::string&) -> void {
ASSERT_OK(status);
});
}
TEST(HttpKeyFetcherTest, SmallTimeout) {
const int32_t kTimeoutInSeconds = 1;
HttpKeyFetcher fetcher(kTimeoutInSeconds);
std::string response;
Status status = fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", &response);
EXPECT_EQ(error::TIME_OUT, status.error_code());
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", response);
},
// check_response
[](Status status, std::string&) -> void {
EXPECT_EQ(error::TIME_OUT, status.error_code());
},
// timeout_in_seconds
kTimeoutInSeconds);
}
TEST(HttpKeyFetcherTest, BigTimeout) {
const int32_t kTimeoutInSeconds = 5;
HttpKeyFetcher fetcher(kTimeoutInSeconds);
std::string response;
Status status = fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", &response);
EXPECT_OK(status);
const int32_t kTimeoutInSeconds = 10;
RetryTest(
// make_request
[](HttpKeyFetcher& fetcher, std::string* response) -> Status {
return fetcher.FetchKeys(kTestUrlDelayTwoSecs, "", response);
},
// check_response
[](Status status, std::string&) -> void {
ASSERT_OK(status);
},
// timeout_in_seconds
kTimeoutInSeconds);
}
} // namespace media
} // namespace shaka