Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions be/src/http/action/http_stream.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,12 @@ Status HttpStreamAction::_on_header(HttpRequest* http_req, std::shared_ptr<Strea
ctx->body_bytes = 0;
size_t csv_max_body_bytes = config::streaming_load_max_mb * 1024 * 1024;
if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) {
ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
try {
ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
} catch (const std::exception& e) {
return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}",
http_req->header(HttpHeaders::CONTENT_LENGTH), e.what());
}
// csv max body size
if (ctx->body_bytes > csv_max_body_bytes) {
LOG(WARNING) << "body exceed max size." << ctx->brief();
Expand Down Expand Up @@ -349,7 +354,13 @@ Status HttpStreamAction::process_put(HttpRequest* http_req,
// FIXME find a way to avoid chunked stream load write large WALs
size_t content_length = 0;
if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) {
content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
try {
content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
} catch (const std::exception& e) {
return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}",
http_req->header(HttpHeaders::CONTENT_LENGTH),
e.what());
}
if (ctx->format == TFileFormatType::FORMAT_CSV_GZ ||
ctx->format == TFileFormatType::FORMAT_CSV_LZO ||
ctx->format == TFileFormatType::FORMAT_CSV_BZ2 ||
Expand Down
15 changes: 13 additions & 2 deletions be/src/http/action/stream_load.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,12 @@ Status StreamLoadAction::_on_header(HttpRequest* http_req, std::shared_ptr<Strea
}
}
if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) {
ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
try {
ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
} catch (const std::exception& e) {
return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}",
http_req->header(HttpHeaders::CONTENT_LENGTH), e.what());
}
// json max body size
if ((ctx->format == TFileFormatType::FORMAT_JSON) &&
(ctx->body_bytes > json_max_body_bytes) && !read_json_by_line) {
Expand Down Expand Up @@ -663,7 +668,13 @@ Status StreamLoadAction::_process_put(HttpRequest* http_req,
// FIXME find a way to avoid chunked stream load write large WALs
size_t content_length = 0;
if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) {
content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
try {
content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH));
} catch (const std::exception& e) {
return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}",
http_req->header(HttpHeaders::CONTENT_LENGTH),
e.what());
}
if (ctx->format == TFileFormatType::FORMAT_CSV_GZ ||
ctx->format == TFileFormatType::FORMAT_CSV_LZO ||
ctx->format == TFileFormatType::FORMAT_CSV_BZ2 ||
Expand Down