diff --git a/be/src/http/action/http_stream.cpp b/be/src/http/action/http_stream.cpp index c6176c52815459..4a34605aa336a1 100644 --- a/be/src/http/action/http_stream.cpp +++ b/be/src/http/action/http_stream.cpp @@ -197,7 +197,12 @@ Status HttpStreamAction::_on_header(HttpRequest* http_req, std::shared_ptrbody_bytes = 0; size_t csv_max_body_bytes = config::streaming_load_max_mb * 1024 * 1024; if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) { - ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + try { + ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + } catch (const std::exception& e) { + return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}", + http_req->header(HttpHeaders::CONTENT_LENGTH), e.what()); + } // csv max body size if (ctx->body_bytes > csv_max_body_bytes) { LOG(WARNING) << "body exceed max size." << ctx->brief(); @@ -352,7 +357,13 @@ Status HttpStreamAction::process_put(HttpRequest* http_req, // FIXME find a way to avoid chunked stream load write large WALs size_t content_length = 0; if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) { - content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + try { + content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + } catch (const std::exception& e) { + return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}", + http_req->header(HttpHeaders::CONTENT_LENGTH), + e.what()); + } if (ctx->format == TFileFormatType::FORMAT_CSV_GZ || ctx->format == TFileFormatType::FORMAT_CSV_LZO || ctx->format == TFileFormatType::FORMAT_CSV_BZ2 || diff --git a/be/src/http/action/stream_load.cpp b/be/src/http/action/stream_load.cpp index 1a9420dea637db..eef6a27b626539 100644 --- a/be/src/http/action/stream_load.cpp +++ b/be/src/http/action/stream_load.cpp @@ -266,7 +266,12 @@ Status StreamLoadAction::_on_header(HttpRequest* http_req, std::shared_ptrheader(HttpHeaders::CONTENT_LENGTH).empty()) { - ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + try { + ctx->body_bytes = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + } catch (const std::exception& e) { + return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}", + http_req->header(HttpHeaders::CONTENT_LENGTH), e.what()); + } // json max body size if ((ctx->format == TFileFormatType::FORMAT_JSON) && (ctx->body_bytes > json_max_body_bytes) && !read_json_by_line) { @@ -671,7 +676,13 @@ Status StreamLoadAction::_process_put(HttpRequest* http_req, // FIXME find a way to avoid chunked stream load write large WALs size_t content_length = 0; if (!http_req->header(HttpHeaders::CONTENT_LENGTH).empty()) { - content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + try { + content_length = std::stol(http_req->header(HttpHeaders::CONTENT_LENGTH)); + } catch (const std::exception& e) { + return Status::InvalidArgument("invalid HTTP header CONTENT_LENGTH={}: {}", + http_req->header(HttpHeaders::CONTENT_LENGTH), + e.what()); + } if (ctx->format == TFileFormatType::FORMAT_CSV_GZ || ctx->format == TFileFormatType::FORMAT_CSV_LZO || ctx->format == TFileFormatType::FORMAT_CSV_BZ2 ||