Merge branch 'master' into 1.27.x

This commit is contained in:
aiordache 2020-09-16 16:20:52 +02:00
commit 0de595f951
7 changed files with 66 additions and 38 deletions

View File

@ -423,20 +423,39 @@ def load_mapping(config_files, get_func, entity_type, working_dir=None):
elif not config.get('name'):
config['name'] = name
if 'driver_opts' in config:
config['driver_opts'] = build_string_dict(
config['driver_opts']
)
if 'labels' in config:
config['labels'] = parse_labels(config['labels'])
if 'file' in config:
config['file'] = expand_path(working_dir, config['file'])
if 'driver_opts' in config:
config['driver_opts'] = build_string_dict(
config['driver_opts']
)
device = format_device_option(entity_type, config)
if device:
config['driver_opts']['device'] = device
return mapping
def format_device_option(entity_type, config):
if entity_type != 'Volume':
return
# default driver is 'local'
driver = config.get('driver', 'local')
if driver != 'local':
return
o = config['driver_opts'].get('o')
device = config['driver_opts'].get('device')
if o and o == 'bind' and device:
fullpath = os.path.abspath(os.path.expanduser(device))
if not os.path.exists(fullpath):
raise ConfigurationError(
"Device path {} does not exist.".format(fullpath))
return fullpath
def validate_external(entity_type, name, config, version):
for k in config.keys():
if entity_type == 'Network' and k == 'driver':
@ -1114,6 +1133,7 @@ def merge_deploy(base, override):
md['resources'] = dict(resources_md)
if md.needs_merge('placement'):
placement_md = MergeDict(md.base.get('placement') or {}, md.override.get('placement') or {})
placement_md.merge_scalar('max_replicas_per_node')
placement_md.merge_field('constraints', merge_unique_items_lists, default=[])
placement_md.merge_field('preferences', merge_unique_objects_lists, default=[])
md['placement'] = dict(placement_md)

View File

@ -153,7 +153,7 @@
"cpu_period": {"type": ["number", "string"]},
"cpu_rt_period": {"type": ["number", "string"]},
"cpu_rt_runtime": {"type": ["number", "string"]},
"cpus": {"type": "number", "minimum": 0},
"cpus": {"type": ["number", "string"]},
"cpuset": {"type": "string"},
"credential_spec": {
"type": "object",
@ -503,7 +503,7 @@
"limits": {
"type": "object",
"properties": {
"cpus": {"type": "number", "minimum": 0},
"cpus": {"type": ["number", "string"]},
"memory": {"type": "string"}
},
"additionalProperties": false,
@ -512,7 +512,7 @@
"reservations": {
"type": "object",
"properties": {
"cpus": {"type": "number", "minimum": 0},
"cpus": {"type": ["number", "string"]},
"memory": {"type": "string"},
"generic_resources": {"$ref": "#/definitions/generic_resources"}
},

View File

@ -121,11 +121,6 @@ def denormalize_service_dict(service_dict, version, image_digest=None):
if version == V1 and 'network_mode' not in service_dict:
service_dict['network_mode'] = 'bridge'
if 'depends_on' in service_dict:
service_dict['depends_on'] = sorted([
svc for svc in service_dict['depends_on'].keys()
])
if 'healthcheck' in service_dict:
if 'interval' in service_dict['healthcheck']:
service_dict['healthcheck']['interval'] = serialize_ns_time_value(

View File

@ -411,7 +411,7 @@ class Service:
stopped = [c for c in containers if not c.is_running]
if stopped:
return ConvergencePlan('start', stopped)
return ConvergencePlan('start', containers)
return ConvergencePlan('noop', containers)
@ -514,8 +514,9 @@ class Service:
self._downscale(containers[scale:], timeout)
containers = containers[:scale]
if start:
stopped = [c for c in containers if not c.is_running]
_, errors = parallel_execute(
containers,
stopped,
lambda c: self.start_container_if_stopped(c, attach_logs=not detached, quiet=True),
lambda c: c.name,
"Starting",

View File

@ -1347,6 +1347,36 @@ class ProjectTest(DockerClientTestCase):
project.up()
assert len(project.containers()) == 3
def test_project_up_scale_with_stopped_containers(self):
config_data = build_config(
services=[{
'name': 'web',
'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'scale': 2
}]
)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
containers = project.containers()
assert len(containers) == 2
self.client.stop(containers[0].id)
project.up(scale_override={'web': 2})
containers = project.containers()
assert len(containers) == 2
self.client.stop(containers[0].id)
project.up(scale_override={'web': 3})
assert len(project.containers()) == 3
self.client.stop(containers[0].id)
project.up(scale_override={'web': 1})
assert len(project.containers()) == 1
def test_initialize_volumes(self):
vol_name = '{:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{}'.format(vol_name)

View File

@ -375,7 +375,7 @@ class ServiceStateTest(DockerClientTestCase):
assert [c.is_running for c in containers] == [False, True]
assert ('start', containers[0:1]) == web.convergence_plan()
assert ('start', containers) == web.convergence_plan()
def test_trigger_recreate_with_config_change(self):
web = self.create_service('web', command=["top"])

View File

@ -2543,6 +2543,7 @@ web:
'labels': ['com.docker.compose.a=1', 'com.docker.compose.b=2'],
'mode': 'replicated',
'placement': {
'max_replicas_per_node': 1,
'constraints': [
'node.role == manager', 'engine.labels.aws == true'
],
@ -2599,6 +2600,7 @@ web:
'com.docker.compose.c': '3'
},
'placement': {
'max_replicas_per_node': 1,
'constraints': [
'engine.labels.aws == true', 'engine.labels.dev == true',
'node.role == manager', 'node.role == worker'
@ -5267,7 +5269,7 @@ def get_config_filename_for_files(filenames, subdir=None):
class SerializeTest(unittest.TestCase):
def test_denormalize_depends_on_v3(self):
def test_denormalize_depends(self):
service_dict = {
'image': 'busybox',
'command': 'true',
@ -5277,27 +5279,7 @@ class SerializeTest(unittest.TestCase):
}
}
assert denormalize_service_dict(service_dict, VERSION) == {
'image': 'busybox',
'command': 'true',
'depends_on': ['service2', 'service3']
}
def test_denormalize_depends_on_v2_1(self):
service_dict = {
'image': 'busybox',
'command': 'true',
'depends_on': {
'service2': {'condition': 'service_started'},
'service3': {'condition': 'service_started'},
}
}
assert denormalize_service_dict(service_dict, VERSION) == {
'image': 'busybox',
'command': 'true',
'depends_on': ['service2', 'service3']
}
assert denormalize_service_dict(service_dict, VERSION) == service_dict
def test_serialize_time(self):
data = {