Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ci/Jenkinsfile_docker_cache
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ core_logic: {
timeout(time: total_timeout, unit: 'MINUTES') {
utils.init_git()
sh "python3 ./ci/docker_cache.py --docker-registry ${env.DOCKER_CACHE_REGISTRY}"
sh "cd ci && docker-compose -f docker/docker-compose.yml build --parallel && docker-compose -f docker/docker-compose.yml push "
sh "cd ci && python3 ./docker_login.py && docker-compose -f docker/docker-compose.yml build --parallel && docker-compose -f docker/docker-compose.yml push && docker logout"
}
}
}
Expand Down
3 changes: 0 additions & 3 deletions ci/docker/Dockerfile.publish.test.centos7
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,6 @@ FROM $BASE_IMAGE

WORKDIR /work/deps

COPY install/centos7_scala.sh /work/
RUN /work/centos7_scala.sh

# Install runtime dependencies for publish tests
# - make is used to run tests ci/publish/scala/test.sh
# - unzip is used to run org.apache.mxnetexamples.neuralstyle.NeuralStyleSuite
Expand Down
24 changes: 10 additions & 14 deletions ci/docker_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
DOCKER_CACHE_RETRY_SECONDS = 5


def build_save_containers(platforms, registry, load_cache) -> int:
def build_save_containers(platforms, registry, load_cache, no_publish) -> int:
"""
Entry point to build and upload all built dockerimages in parallel
:param platforms: List of platforms
Expand All @@ -54,7 +54,7 @@ def build_save_containers(platforms, registry, load_cache) -> int:
return 0

platform_results = Parallel(n_jobs=PARALLEL_BUILDS, backend="multiprocessing")(
delayed(_build_save_container)(platform, registry, load_cache)
delayed(_build_save_container)(platform, registry, load_cache, no_publish)
for platform in platforms)

is_error = False
Expand All @@ -66,22 +66,14 @@ def build_save_containers(platforms, registry, load_cache) -> int:
return 1 if is_error else 0


def _build_save_container(platform, registry, load_cache) -> Optional[str]:
def _build_save_container(platform, registry, load_cache, no_publish) -> Optional[str]:
"""
Build image for passed platform and upload the cache to the specified S3 bucket
:param platform: Platform
:param registry: Docker registry name
:param load_cache: Load cache before building
:return: Platform if failed, None otherwise
"""
# Case 1: docker-compose
if platform in build_util.DOCKER_COMPOSE_WHITELIST:
build_util.build_docker(platform=platform, registry=registry, num_retries=10, no_cache=False)
push_cmd = ['docker-compose', 'push', platform]
subprocess.check_call(push_cmd)
return None

# Case 2: Deprecated way, will be removed
docker_tag = build_util.get_docker_tag(platform=platform, registry=registry)
# Preload cache
if load_cache:
Expand All @@ -95,7 +87,8 @@ def _build_save_container(platform, registry, load_cache) -> Optional[str]:
logging.info('Built %s as %s', docker_tag, image_id)

# Push cache to registry
_upload_image(registry=registry, docker_tag=docker_tag, image_id=image_id)
if not no_publish:
_upload_image(registry=registry, docker_tag=docker_tag, image_id=image_id)
return None
except Exception:
logging.exception('Unexpected exception during build of %s', docker_tag)
Expand Down Expand Up @@ -187,6 +180,8 @@ def script_name() -> str:
help="Docker hub registry name",
type=str,
required=True)
parser.add_argument("--no-publish", help="Only build but don't publish. Used for testing.",
action='store_true')

args = parser.parse_args()

Expand All @@ -197,8 +192,9 @@ def script_name() -> str:
region_name = os.environ['DOCKERHUB_SECRET_ENDPOINT_REGION']

try:
login_dockerhub(secret_name, endpoint_url, region_name)
return build_save_containers(platforms=platforms, registry=args.docker_registry, load_cache=True)
if not args.no_publish:
login_dockerhub(secret_name, endpoint_url, region_name)
return build_save_containers(platforms=platforms, registry=args.docker_registry, load_cache=True, no_publish=args.no_publish)
finally:
logout_dockerhub()

Expand Down
12 changes: 12 additions & 0 deletions ci/jenkins/Jenkins_steps.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -1764,4 +1764,16 @@ def test_artifact_repository() {
}]
}

def misc_test_docker_cache_build() {
return ['Test Docker cache build': {
node(NODE_LINUX_CPU) {
ws('workspace/docker_cache') {
utils.init_git()
sh "python3 ./ci/docker_cache.py --docker-registry ${env.DOCKER_CACHE_REGISTRY} --no-publish"
sh "cd ci && docker-compose -f docker/docker-compose.yml build --parallel"
}
}
}]
}

return this
3 changes: 2 additions & 1 deletion ci/jenkins/Jenkinsfile_miscellaneous
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ core_logic: {
])

utils.parallel_stage('Tests', [
custom_steps.misc_asan_cpu()
custom_steps.misc_asan_cpu(),
custom_steps.misc_test_docker_cache_build()
])
}
,
Expand Down