Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/reusable_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,11 @@ jobs:
uses: ./.github/actions/common_setup
with:
job_type: build_check
- name: Create source tar
run: |
mkdir -p "$TEMP_PATH/build_check/package_release"
cd .. && tar czf $TEMP_PATH/build_source.src.tar.gz ClickHouse/
cd $TEMP_PATH && tar xvzf $TEMP_PATH/build_source.src.tar.gz
- name: Pre
run: |
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --pre --job-name '${{inputs.build_name}}'
Expand Down
23 changes: 23 additions & 0 deletions tests/ci/build_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from git_helper import Git
from pr_info import PRInfo, EventType
from report import FAILURE, SUCCESS, JobReport, StatusType
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
from version_helper import (
Expand Down Expand Up @@ -223,6 +224,28 @@ def main():
f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True
)
logging.info("Build finished as %s, log path %s", build_status, log_path)

s3_helper = S3Helper()
s3_path_prefix = "/".join(
(
get_release_or_pr(pr_info, get_version_from_repo())[0],
pr_info.sha,
build_name,
)
)
src_path = temp_path / "build_source.src.tar.gz"
s3_path = s3_path_prefix + "/clickhouse-" + version.string + ".src.tar.gz"
logging.info("s3_path %s", s3_path)
if src_path.exists():
src_url = s3_helper.upload_build_file_to_s3(
src_path, s3_path
)
logging.info("Source tar %s", src_url)
print(f"::notice ::Source tar URL: {src_url}")
else:
logging.info("Source tar doesn't exist")
print("Source tar doesn't exist")

if build_status != SUCCESS:
# We check if docker works, because if it's down, it's infrastructure
try:
Expand Down
Loading