Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 39 additions & 11 deletions compose/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
'network_mode',
'init',
'scale',
'overwrite_multivals'
]

DOCKER_VALID_URL_PREFIXES = (
Expand Down Expand Up @@ -902,13 +903,22 @@ def __init__(self, base, override):
def needs_merge(self, field):
return field in self.base or field in self.override

def merge_field(self, field, merge_func, default=None):
def needs_overwrite(self, field):
return ('overwrite_multivals' in self.override and
field in self.override['overwrite_multivals'])

def merge_field(self, field, merge_func, overwrite_func=None, default=None):
if not self.needs_merge(field):
return

self[field] = merge_func(
self.base.get(field, default),
self.override.get(field, default))
if self.needs_overwrite(field):
self[field] = overwrite_func(
self.base.get(field, default),
self.override.get(field, default))
else:
self[field] = merge_func(
self.base.get(field, default),
self.override.get(field, default))

def merge_mapping(self, field, parse_func):
if not self.needs_merge(field):
Expand All @@ -924,9 +934,13 @@ def parse_sequence_func(seq):
if not self.needs_merge(field):
return

merged = parse_sequence_func(self.base.get(field, []))
merged.update(parse_sequence_func(self.override.get(field, [])))
self[field] = [item.repr() for item in sorted(merged.values())]
if self.needs_overwrite(field):
result = parse_sequence_func(self.override.get(field, []))
else:
result = parse_sequence_func(self.base.get(field, []))
result.update(parse_sequence_func(self.override.get(field, [])))

self[field] = [item.repr() for item in sorted(result.values())]

def merge_scalar(self, field):
if self.needs_merge(field):
Expand All @@ -949,13 +963,13 @@ def merge_service_dicts(base, override, version):
md.merge_mapping('extra_hosts', parse_extra_hosts)

for field in ['volumes', 'devices']:
md.merge_field(field, merge_path_mappings)
md.merge_field(field, merge_path_mappings, overwrite_path_mappings)

for field in [
'cap_add', 'cap_drop', 'expose', 'external_links',
'security_opt', 'volumes_from',
]:
md.merge_field(field, merge_unique_items_lists, default=[])
md.merge_field(field, merge_unique_items_lists, overwrite_unique_items_lists, default=[])

for field in ['dns', 'dns_search', 'env_file', 'tmpfs']:
md.merge_field(field, merge_list_or_string)
Expand All @@ -976,6 +990,11 @@ def merge_service_dicts(base, override, version):
return dict(md)


def overwrite_unique_items_lists(base, override):
override = [str(o) for o in override]
return sorted(set(override))


def merge_unique_items_lists(base, override):
override = [str(o) for o in override]
base = [str(b) for b in base]
Expand All @@ -1002,8 +1021,12 @@ def parse_sequence_func(seq):
if not md.needs_merge(field):
return

merged = parse_sequence_func(md.base.get(field, []))
merged.update(parse_sequence_func(md.override.get(field, [])))
if md.needs_overwrite(field):
merged = parse_sequence_func(md.override.get(field, []))
else:
merged = parse_sequence_func(md.base.get(field, []))
merged.update(parse_sequence_func(md.override.get(field, [])))

md[field] = [item for item in sorted(merged.values(), key=lambda x: x.target)]


Expand Down Expand Up @@ -1210,6 +1233,11 @@ def validate_paths(service_dict):
"or is not a valid URL." % build_path)


def overwrite_path_mappings(base, override):
d = dict_from_path_mappings(override)
return path_mappings_from_dict(d)


def merge_path_mappings(base, override):
d = dict_from_path_mappings(base)
d.update(dict_from_path_mappings(override))
Expand Down
3 changes: 2 additions & 1 deletion compose/config/config_schema_v2.0.json
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,8 @@
"volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"volume_driver": {"type": "string"},
"volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"working_dir": {"type": "string"}
"working_dir": {"type": "string"},
"overwrite_multivals": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
},

"dependencies": {
Expand Down