Merge "Make ansible version configurable"

This commit is contained in:
Zuul 2019-03-18 07:16:38 +00:00 committed by Gerrit Code Review
commit 5a0eed2265
27 changed files with 323 additions and 57 deletions

View File

@ -259,6 +259,11 @@ The following sections of ``zuul.conf`` are used by the scheduler:
config from. This attribute is exclusive with
:attr:`scheduler.tenant_config`.
.. attr:: default_ansible_version
Default ansible version to use for jobs that doesn't specify a version.
See :attr:`job.ansible-version` for details.
.. attr:: log_config
Path to log config file.

View File

@ -234,6 +234,11 @@ configuration. Some examples of tenant definitions are:
implement local policies such as node setup and artifact
publishing.
.. attr:: default-ansible-version
Default ansible version to use for jobs that doesn't specify a version.
See :attr:`job.ansible-version` for details.
.. attr:: allowed-triggers
:default: all connections

View File

@ -895,6 +895,21 @@ Here is an example of two job definitions:
run: playbooks/job-playbook.yaml
.. attr:: ansible-version
The ansible version to use for all playbooks of the job. This can be
defined at the following layers of configuration where the first match
takes precedence:
* :attr:`job.ansible-version`
* :attr:`tenant.default-ansible-version`
* :attr:`scheduler.default_ansible_version`
* Zuul default version
The supported ansible versions are:
.. program-output:: zuul-manage-ansible -l
.. attr:: roles
A list of Ansible roles to prepare for the job. Because a job

View File

@ -0,0 +1,14 @@
---
features:
- |
Jobs may now specify which ansible version is used to run them.
The ansible version to use can now be specified by
:attr:`job.ansible-version`.
upgrade:
- |
In order to support several ansible versions installed simultaneously
Zuul now handles them itself in virtual environments. By default Zuul
installs the needed ansible versions on startup so there is no further
user action required. However it is recommended to pre-install the
ansible environments during installation by invoking
``zuul-manage-ansible``.

View File

@ -1524,7 +1524,8 @@ class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
self.recordResult(result)
return result
def runAnsible(self, cmd, timeout, playbook, wrapped=True):
def runAnsible(self, cmd, timeout, playbook, ansible_version,
wrapped=True):
build = self.executor_server.job_builds[self.job.unique]
if self.executor_server._run_ansible:
@ -1532,7 +1533,7 @@ class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
# hold real ansible jobs.
build.run()
result = super(RecordingAnsibleJob, self).runAnsible(
cmd, timeout, playbook, wrapped)
cmd, timeout, playbook, ansible_version, wrapped)
else:
if playbook.path:
result = build.run()

View File

@ -0,0 +1,15 @@
- hosts: localhost
tasks:
- name: Print ansible version
debug:
msg: "{{ ansible_version }}"
- name: Print expected ansible version
debug:
msg: "{{ test_ansible_version_major }}.{{ test_ansible_version_minor }}"
- name: Check ansible version
assert:
that:
- test_ansible_version_major == ansible_version.major
- test_ansible_version_minor == ansible_version.minor

View File

@ -0,0 +1,44 @@
- pipeline:
name: check
manager: independent
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
Verified: 1
failure:
gerrit:
Verified: -1
- job:
name: base
parent: null
- job:
name: ansible-version
run: playbooks/ansible-version.yaml
- job:
name: ansible-default
parent: ansible-version
vars:
test_ansible_version_major: 2
test_ansible_version_minor: 5
- job:
name: ansible-25
parent: ansible-version
ansible-version: 2.5
vars:
test_ansible_version_major: 2
test_ansible_version_minor: 5
- project:
name: common-config
check:
jobs:
- ansible-default
- ansible-25

View File

@ -0,0 +1,6 @@
- tenant:
name: tenant-one
source:
gerrit:
config-projects:
- common-config

View File

@ -1,3 +1,8 @@
- hosts: localhost
tasks:
- debug:
msg: Ansible version={{ ansible_version.major }}.{{ ansible_version.minor }}
- hosts: all
tasks:
# Create unwritable /tmp/console-None.log

View File

@ -0,0 +1,20 @@
- tenant:
name: tenant-no-default
source:
gerrit:
config-projects:
- common-config
untrusted-projects:
- org/project1
- org/project2
- tenant:
name: tenant-default-2-5
default-ansible-version: "2.5"
source:
gerrit:
config-projects:
- common-config
untrusted-projects:
- org/project1
- org/project2

View File

@ -0,0 +1,4 @@
- hosts: all
tasks:
- name: test
debug: test

View File

@ -17,6 +17,7 @@
- job:
name: common-config-job
run: playbooks/common.yaml
# Use the canonical name here. This should be merged with the org/project1 in
# the other repo.

View File

@ -22,11 +22,12 @@ ERROR_SYNC_TO_OUTSIDE = "Syncing files to outside the working dir"
ERROR_SYNC_FROM_OUTSIDE = "Syncing files from outside the working dir"
class TestActionModules(AnsibleZuulTestCase):
class TestActionModules25(AnsibleZuulTestCase):
tenant_config_file = 'config/remote-action-modules/main.yaml'
ansible_version = '2.5'
def setUp(self):
super(TestActionModules, self).setUp()
super().setUp()
self.fake_nodepool.remote_ansible = True
ansible_remote = os.environ.get('ZUUL_REMOTE_IPV4')
@ -50,6 +51,7 @@ class TestActionModules(AnsibleZuulTestCase):
- job:
name: {job_name}
run: playbooks/{job_name}.yaml
ansible-version: {version}
roles:
- zuul: org/common-config
nodeset:
@ -61,7 +63,7 @@ class TestActionModules(AnsibleZuulTestCase):
check:
jobs:
- {job_name}
""".format(job_name=job_name))
""".format(job_name=job_name, version=self.ansible_version))
file_dict = {'zuul.yaml': conf}
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',

View File

@ -20,11 +20,12 @@ import textwrap
from tests.base import AnsibleZuulTestCase
class TestZuulJSON(AnsibleZuulTestCase):
class TestZuulJSON25(AnsibleZuulTestCase):
tenant_config_file = 'config/remote-zuul-json/main.yaml'
ansible_version = '2.5'
def setUp(self):
super(TestZuulJSON, self).setUp()
super().setUp()
self.fake_nodepool.remote_ansible = True
ansible_remote = os.environ.get('ZUUL_REMOTE_IPV4')
@ -43,6 +44,7 @@ class TestZuulJSON(AnsibleZuulTestCase):
- job:
name: {job_name}
run: playbooks/{job_name}.yaml
ansible-version: {version}
roles:
- zuul: org/common-config
nodeset:
@ -54,7 +56,7 @@ class TestZuulJSON(AnsibleZuulTestCase):
check:
jobs:
- {job_name}
""".format(job_name=job_name))
""".format(job_name=job_name, version=self.ansible_version))
file_dict = {'zuul.yaml': conf}
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',

View File

@ -19,11 +19,12 @@ import textwrap
from tests.base import AnsibleZuulTestCase
class TestZuulStream(AnsibleZuulTestCase):
class TestZuulStream25(AnsibleZuulTestCase):
tenant_config_file = 'config/remote-zuul-stream/main.yaml'
ansible_version = '2.5'
def setUp(self):
super(TestZuulStream, self).setUp()
super().setUp()
self.fake_nodepool.remote_ansible = True
ansible_remote = os.environ.get('ZUUL_REMOTE_IPV4')
@ -45,6 +46,7 @@ class TestZuulStream(AnsibleZuulTestCase):
- job:
name: {job_name}
run: playbooks/{job_name}.yaml
ansible-version: {version}
roles:
- zuul: org/common-config
nodeset:
@ -58,7 +60,7 @@ class TestZuulStream(AnsibleZuulTestCase):
check:
jobs:
- {job_name}
""".format(job_name=job_name))
""".format(job_name=job_name, version=self.ansible_version))
file_dict = {'zuul.yaml': conf}
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A',
@ -94,6 +96,8 @@ class TestZuulStream(AnsibleZuulTestCase):
r'RUN START: \[untrusted : review.example.com/org/project/'
r'playbooks/command.yaml@master\]', text)
self.assertLogLine(r'PLAY \[all\]', text)
self.assertLogLine(
r'Ansible version={}'.format(self.ansible_version), text)
self.assertLogLine(r'TASK \[Show contents of first file\]', text)
self.assertLogLine(r'controller \| command test one', text)
self.assertLogLine(

View File

@ -33,6 +33,7 @@ from tests.base import (
from zuul.executor.sensors.startingbuilds import StartingBuildsSensor
from zuul.executor.sensors.ram import RAMSensor
from zuul.lib.ansible import AnsibleManager
class TestExecutorRepos(ZuulTestCase):
@ -428,7 +429,9 @@ class TestAnsibleJob(ZuulTestCase):
def setUp(self):
super(TestAnsibleJob, self).setUp()
job = gear.TextJob('executor:execute', '{}', unique='test')
ansible_version = AnsibleManager().default_version
args = '{"ansible_version": "%s"}' % ansible_version
job = gear.TextJob('executor:execute', args, unique='test')
self.test_job = zuul.executor.server.AnsibleJob(self.executor_server,
job)

View File

@ -28,6 +28,7 @@ from zuul.lib import yamlutil as yaml
import zuul.lib.connections
from tests.base import BaseTestCase, FIXTURE_DIR
from zuul.lib.ansible import AnsibleManager
class Dummy(object):
@ -45,6 +46,7 @@ class TestJob(BaseTestCase):
self.source = Dummy(canonical_hostname='git.example.com',
connection=self.connection)
self.tenant = model.Tenant('tenant')
self.tenant.default_ansible_version = AnsibleManager().default_version
self.layout = model.Layout(self.tenant)
self.project = model.Project('project', self.source)
self.context = model.SourceContext(self.project, 'master',
@ -58,7 +60,7 @@ class TestJob(BaseTestCase):
self.layout.addPipeline(self.pipeline)
self.queue = model.ChangeQueue(self.pipeline)
self.pcontext = configloader.ParseContext(
self.connections, None, self.tenant)
self.connections, None, self.tenant, AnsibleManager())
private_key_file = os.path.join(FIXTURE_DIR, 'private.pem')
with open(private_key_file, "rb") as f:

View File

@ -2264,10 +2264,11 @@ class TestInRepoJoin(ZuulTestCase):
self.assertHistory([])
class TestAnsible(AnsibleZuulTestCase):
class TestAnsible25(AnsibleZuulTestCase):
# A temporary class to hold new tests while others are disabled
tenant_config_file = 'config/ansible/main.yaml'
ansible_version = '2.5'
def test_playbook(self):
# This test runs a bit long and needs extra time.
@ -2381,15 +2382,17 @@ class TestAnsible(AnsibleZuulTestCase):
conf = textwrap.dedent(
"""
- job:
name: %s
run: playbooks/%s.yaml
name: {job_name}
run: playbooks/{job_name}.yaml
ansible-version: {ansible_version}
- project:
name: org/plugin-project
check:
jobs:
- %s
""" % (job_name, job_name, job_name))
- {job_name}
""".format(job_name=job_name,
ansible_version=self.ansible_version))
file_dict = {'.zuul.yaml': conf}
A = self.fake_gerrit.addFakeChange('org/plugin-project', 'master', 'A',
@ -5296,3 +5299,20 @@ class TestJobPausePriority(AnsibleZuulTestCase):
self.fake_nodepool.unpause()
self.waitUntilSettled()
class TestAnsibleVersion(AnsibleZuulTestCase):
tenant_config_file = 'config/ansible-versions/main.yaml'
def test_ansible_versions(self):
"""
Tests that jobs run with the requested ansible version.
"""
A = self.fake_gerrit.addFakeChange('common-config', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
self.assertHistory([
dict(name='ansible-default', result='SUCCESS', changes='1,1'),
dict(name='ansible-25', result='SUCCESS', changes='1,1'),
], ordered=False)

View File

@ -296,6 +296,7 @@ class TestWeb(BaseTestWeb):
{
'name': 'project-test1',
'abstract': False,
'ansible_version': None,
'attempts': 4,
'branches': [],
'dependencies': [],
@ -334,6 +335,7 @@ class TestWeb(BaseTestWeb):
}, {
'name': 'project-test1',
'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': ['stable'],
'dependencies': [],
@ -376,6 +378,7 @@ class TestWeb(BaseTestWeb):
self.assertEqual([
{
'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': [],
'dependencies': [],
@ -485,6 +488,7 @@ class TestWeb(BaseTestWeb):
'api/tenant/tenant-one/project/org/project1').json()
jobs = [[{'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': [],
'dependencies': [],
@ -515,6 +519,7 @@ class TestWeb(BaseTestWeb):
'variant_description': '',
'voting': True}],
[{'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': [],
'dependencies': [{'name': 'project-merge',
@ -546,6 +551,7 @@ class TestWeb(BaseTestWeb):
'variant_description': '',
'voting': True}],
[{'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': [],
'dependencies': [{'name': 'project-merge',
@ -577,6 +583,7 @@ class TestWeb(BaseTestWeb):
'variant_description': '',
'voting': True}],
[{'abstract': False,
'ansible_version': None,
'attempts': 3,
'branches': [],
'dependencies': [{'name': 'project-merge',

View File

@ -30,6 +30,8 @@ class ManageAnsible(zuul.cmd.ZuulApp):
help='verbose output')
parser.add_argument('-u', dest='upgrade', action='store_true',
help='upgrade ansible versions')
parser.add_argument('-l', dest='list_supported', action='store_true',
help='list supported versions')
return parser
def _setup_logging(self):
@ -51,6 +53,15 @@ class ManageAnsible(zuul.cmd.ZuulApp):
manager = AnsibleManager()
if self.args.list_supported:
versions = []
for version, default in manager.getSupportedVersions():
if default:
version = version + ' (default)'
versions.append(version)
print('\n'.join(versions))
return
manager.install(upgrade=self.args.upgrade)

View File

@ -570,6 +570,7 @@ class JobParser(object):
'pre-run': to_list(str),
'post-run': to_list(str),
'run': to_list(str),
'ansible-version': vs.Any(str, float),
'_source_context': model.SourceContext,
'_start_mark': ZuulMark,
'roles': to_list(role),
@ -693,6 +694,14 @@ class JobParser(object):
raise Exception("Once set, the post-review attribute "
"may not be unset")
# Configure and validate ansible version
if 'ansible-version' in conf:
# The ansible-version can be treated by yaml as a float so convert
# it to a string.
ansible_version = str(conf['ansible-version'])
self.pcontext.ansible_manager.requestVersion(ansible_version)
job.ansible_version = ansible_version
# Roles are part of the playbook context so we must establish
# them earlier than playbooks.
roles = []
@ -1251,10 +1260,11 @@ class SemaphoreParser(object):
class ParseContext(object):
"""Hold information about a particular run of the parser"""
def __init__(self, connections, scheduler, tenant):
def __init__(self, connections, scheduler, tenant, ansible_manager):
self.connections = connections
self.scheduler = scheduler
self.tenant = tenant
self.ansible_manager = ansible_manager
self.pragma_parser = PragmaParser(self)
self.pipeline_parser = PipelineParser(self)
self.nodeset_parser = NodeSetParser(self)
@ -1347,10 +1357,11 @@ class TenantParser(object):
'allowed-reporters': to_list(str),
'allowed-labels': to_list(str),
'default-parent': str,
'default-ansible-version': vs.Any(str, float),
}
return vs.Schema(tenant)
def fromYaml(self, abide, conf):
def fromYaml(self, abide, conf, ansible_manager):
self.getSchema()(conf)
tenant = model.Tenant(conf['name'])
if conf.get('max-nodes-per-job') is not None:
@ -1380,6 +1391,17 @@ class TenantParser(object):
# We prepare a stack to store config loading issues
loading_errors = model.LoadingErrors()
# Set default ansible version
default_ansible_version = conf.get('default-ansible-version')
if default_ansible_version is not None:
# The ansible version can be interpreted as float by yaml so make
# sure it's a string.
default_ansible_version = str(default_ansible_version)
ansible_manager.requestVersion(default_ansible_version)
else:
default_ansible_version = ansible_manager.default_version
tenant.default_ansible_version = default_ansible_version
# Start by fetching any YAML needed by this tenant which isn't
# already cached. Full reconfigurations start with an empty
# cache.
@ -1393,9 +1415,9 @@ class TenantParser(object):
# Then convert the YAML to configuration objects which we
# cache on the tenant.
tenant.config_projects_config = self.parseConfig(
tenant, config_projects_config, loading_errors)
tenant, config_projects_config, loading_errors, ansible_manager)
tenant.untrusted_projects_config = self.parseConfig(
tenant, untrusted_projects_config, loading_errors)
tenant, untrusted_projects_config, loading_errors, ansible_manager)
# Combine the trusted and untrusted config objects
parsed_config = model.ParsedConfig()
@ -1694,8 +1716,10 @@ class TenantParser(object):
tpc = tenant.project_configs[project.canonical_name]
return tpc.load_classes
def parseConfig(self, tenant, unparsed_config, loading_errors):
pcontext = ParseContext(self.connections, self.scheduler, tenant)
def parseConfig(self, tenant, unparsed_config, loading_errors,
ansible_manager):
pcontext = ParseContext(self.connections, self.scheduler, tenant,
ansible_manager)
parsed_config = model.ParsedConfig()
# Handle pragma items first since they modify the source context
@ -1990,11 +2014,12 @@ class ConfigLoader(object):
unparsed_abide.extend(data)
return unparsed_abide
def loadConfig(self, unparsed_abide):
def loadConfig(self, unparsed_abide, ansible_manager):
abide = model.Abide()
for conf_tenant in unparsed_abide.tenants:
# When performing a full reload, do not use cached data.
tenant = self.tenant_parser.fromYaml(abide, conf_tenant)
tenant = self.tenant_parser.fromYaml(
abide, conf_tenant, ansible_manager)
abide.tenants[tenant.name] = tenant
if len(tenant.layout.loading_errors):
self.log.warning(
@ -2006,7 +2031,7 @@ class ConfigLoader(object):
self.log.warning(err.error)
return abide
def reloadTenant(self, abide, tenant):
def reloadTenant(self, abide, tenant, ansible_manager):
new_abide = model.Abide()
new_abide.tenants = abide.tenants.copy()
new_abide.unparsed_project_branch_config = \
@ -2015,7 +2040,7 @@ class ConfigLoader(object):
# When reloading a tenant only, use cached data if available.
new_tenant = self.tenant_parser.fromYaml(
new_abide,
tenant.unparsed_config)
tenant.unparsed_config, ansible_manager)
new_abide.tenants[tenant.name] = new_tenant
if len(new_tenant.layout.loading_errors):
self.log.warning(
@ -2028,7 +2053,8 @@ class ConfigLoader(object):
return new_abide
def _loadDynamicProjectData(self, config, project,
files, trusted, tenant, loading_errors):
files, trusted, tenant, loading_errors,
ansible_manager):
tpc = tenant.project_configs[project.canonical_name]
if trusted:
branches = ['master']
@ -2086,9 +2112,9 @@ class ConfigLoader(object):
filterUntrustedProjectYAML(incdata, loading_errors)
config.extend(self.tenant_parser.parseConfig(
tenant, incdata, loading_errors))
tenant, incdata, loading_errors, ansible_manager))
def createDynamicLayout(self, tenant, files,
def createDynamicLayout(self, tenant, files, ansible_manager,
include_config_projects=False,
scheduler=None, connections=None):
loading_errors = model.LoadingErrors()
@ -2096,13 +2122,15 @@ class ConfigLoader(object):
config = model.ParsedConfig()
for project in tenant.config_projects:
self._loadDynamicProjectData(
config, project, files, True, tenant, loading_errors)
config, project, files, True, tenant, loading_errors,
ansible_manager)
else:
config = tenant.config_projects_config.copy()
for project in tenant.untrusted_projects:
self._loadDynamicProjectData(
config, project, files, False, tenant, loading_errors)
config, project, files, False, tenant, loading_errors,
ansible_manager)
layout = model.Layout(tenant)
layout.loading_errors = loading_errors

View File

@ -205,6 +205,7 @@ class ExecutorClient(object):
params['override_branch'] = job.override_branch
params['override_checkout'] = job.override_checkout
params['repo_state'] = item.current_build_set.repo_state
params['ansible_version'] = job.ansible_version
def make_playbook(playbook):
d = playbook.toDict()

View File

@ -706,8 +706,8 @@ class AnsibleJob(object):
self.executor_variables_file = self.executor_server.config.get(
'executor', 'variables')
# TODO(tobiash): choose correct ansible version as specified by the job
plugin_dir = self.executor_server.ansible_manager.getAnsiblePluginDir()
plugin_dir = self.executor_server.ansible_manager.getAnsiblePluginDir(
self.arguments['ansible_version'])
self.library_dir = os.path.join(plugin_dir, 'library')
self.action_dir = os.path.join(plugin_dir, 'action')
self.action_dir_general = os.path.join(plugin_dir, 'actiongeneral')
@ -1118,6 +1118,7 @@ class AnsibleJob(object):
def runPlaybooks(self, args):
result = None
ansible_version = args['ansible_version']
with open(self.jobdir.job_output_file, 'a') as job_output:
job_output.write("{now} | Running Ansible setup...\n".format(
@ -1130,7 +1131,7 @@ class AnsibleJob(object):
# between here and the hosts in the inventory; return them and
# reschedule the job.
setup_status, setup_code = self.runAnsibleSetup(
self.jobdir.setup_playbook)
self.jobdir.setup_playbook, ansible_version)
if setup_status != self.RESULT_NORMAL or setup_code != 0:
return result
@ -1152,7 +1153,8 @@ class AnsibleJob(object):
# TODOv3(pabelanger): Implement pre-run timeout setting.
ansible_timeout = self.getAnsibleTimeout(time_started, job_timeout)
pre_status, pre_code = self.runAnsiblePlaybook(
playbook, ansible_timeout, phase='pre', index=index)
playbook, ansible_timeout, ansible_version, phase='pre',
index=index)
if pre_status != self.RESULT_NORMAL or pre_code != 0:
# These should really never fail, so return None and have
# zuul try again
@ -1171,7 +1173,8 @@ class AnsibleJob(object):
ansible_timeout = self.getAnsibleTimeout(
time_started, job_timeout)
job_status, job_code = self.runAnsiblePlaybook(
playbook, ansible_timeout, phase='run', index=index)
playbook, ansible_timeout, ansible_version, phase='run',
index=index)
if job_status == self.RESULT_ABORTED:
return 'ABORTED'
elif job_status == self.RESULT_TIMED_OUT:
@ -1207,7 +1210,8 @@ class AnsibleJob(object):
# which are vital to understanding why timeouts have happened in
# the first place.
post_status, post_code = self.runAnsiblePlaybook(
playbook, post_timeout, success, phase='post', index=index)
playbook, post_timeout, ansible_version, success, phase='post',
index=index)
if post_status == self.RESULT_ABORTED:
return 'ABORTED'
if post_status == self.RESULT_UNREACHABLE:
@ -1822,7 +1826,8 @@ class AnsibleJob(object):
except Exception:
self.log.exception("Exception while killing ansible process:")
def runAnsible(self, cmd, timeout, playbook, wrapped=True):
def runAnsible(self, cmd, timeout, playbook, ansible_version,
wrapped=True):
config_file = playbook.ansible_config
env_copy = os.environ.copy()
env_copy.update(self.ssh_agent.env)
@ -1837,8 +1842,8 @@ class AnsibleJob(object):
else:
pythonpath = []
# TODO(tobiash): choose correct ansible version
ansible_dir = self.executor_server.ansible_manager.getAnsibleDir()
ansible_dir = self.executor_server.ansible_manager.getAnsibleDir(
ansible_version)
pythonpath = [ansible_dir] + pythonpath
env_copy['PYTHONPATH'] = os.path.pathsep.join(pythonpath)
@ -2022,7 +2027,7 @@ class AnsibleJob(object):
return (self.RESULT_NORMAL, ret)
def runAnsibleSetup(self, playbook):
def runAnsibleSetup(self, playbook, ansible_version):
if self.executor_server.verbose:
verbose = '-vvv'
else:
@ -2030,6 +2035,7 @@ class AnsibleJob(object):
# TODO: select correct ansible version from job
ansible = self.executor_server.ansible_manager.getAnsibleCommand(
ansible_version,
command='ansible')
cmd = [ansible, '*', verbose, '-m', 'setup',
'-i', self.jobdir.setup_inventory,
@ -2039,7 +2045,7 @@ class AnsibleJob(object):
result, code = self.runAnsible(
cmd=cmd, timeout=self.executor_server.setup_timeout,
playbook=playbook, wrapped=False)
playbook=playbook, ansible_version=ansible_version, wrapped=False)
self.log.debug("Ansible complete, result %s code %s" % (
self.RESULT_MAP[result], code))
if self.executor_server.statsd:
@ -2103,16 +2109,15 @@ class AnsibleJob(object):
now=datetime.datetime.now(),
msg=msg))
def runAnsiblePlaybook(self, playbook, timeout, success=None,
phase=None, index=None):
def runAnsiblePlaybook(self, playbook, timeout, ansible_version,
success=None, phase=None, index=None):
if self.executor_server.verbose:
verbose = '-vvv'
else:
verbose = '-v'
# TODO: Select ansible version based on job
cmd = [self.executor_server.ansible_manager.getAnsibleCommand(),
verbose, playbook.path]
cmd = [self.executor_server.ansible_manager.getAnsibleCommand(
ansible_version), verbose, playbook.path]
if playbook.secrets_content:
cmd.extend(['-e', '@' + playbook.secrets])
@ -2139,8 +2144,7 @@ class AnsibleJob(object):
self.emitPlaybookBanner(playbook, 'START', phase)
result, code = self.runAnsible(
cmd=cmd, timeout=timeout, playbook=playbook)
result, code = self.runAnsible(cmd, timeout, playbook, ansible_version)
self.log.debug("Ansible complete, result %s code %s" % (
self.RESULT_MAP[result], code))
if self.executor_server.statsd:

View File

@ -123,13 +123,18 @@ class ManagedAnsible:
class AnsibleManager:
log = logging.getLogger('zuul.ansible_manager')
def __init__(self, zuul_ansible_dir=None):
def __init__(self, zuul_ansible_dir=None, default_version=None):
self._supported_versions = {}
self.default_version = None
self.zuul_ansible_dir = zuul_ansible_dir
self.load_ansible_config()
# If configured, override the default version
if default_version:
self.requestVersion(default_version)
self.default_version = default_version
def load_ansible_config(self):
c = resource_string(__name__, 'ansible-config.conf').decode()
config = configparser.ConfigParser()
@ -193,17 +198,30 @@ class AnsibleManager:
raise Exception('Requested ansible version %s not found' % version)
return ansible
def getAnsibleCommand(self, version=None, command='ansible-playbook'):
def getAnsibleCommand(self, version, command='ansible-playbook'):
ansible = self._getAnsible(version)
return os.path.join(ansible.venv_path, 'bin', command)
def getAnsibleDir(self, version=None):
def getAnsibleDir(self, version):
ansible = self._getAnsible(version)
return os.path.join(self.zuul_ansible_dir, ansible.version)
def getAnsiblePluginDir(self, version=None):
def getAnsiblePluginDir(self, version):
return os.path.join(self.getAnsibleDir(version), 'zuul', 'ansible')
def requestVersion(self, version):
if version not in self._supported_versions:
raise Exception(
'Requested ansible version \'%s\' is unknown. Supported '
'versions are %s' % (
version, ', '.join(self._supported_versions)))
def getSupportedVersions(self):
versions = []
for version in self._supported_versions:
versions.append((version, version == self.default_version))
return versions
def copyAnsibleFiles(self):
if os.path.exists(self.zuul_ansible_dir):
shutil.rmtree(self.zuul_ansible_dir)

View File

@ -498,6 +498,7 @@ class PipelineManager(object):
layout = loader.createDynamicLayout(
item.pipeline.tenant,
build_set.files,
self.sched.ansible_manager,
include_config_projects=True)
if not len(layout.loading_errors):
trusted_layout_verified = True
@ -509,6 +510,7 @@ class PipelineManager(object):
layout = loader.createDynamicLayout(
item.pipeline.tenant,
build_set.files,
self.sched.ansible_manager,
include_config_projects=False)
else:
# We're a change to a config repo (with no untrusted

View File

@ -1139,6 +1139,7 @@ class Job(ConfigObject):
pre_run=(),
post_run=(),
run=(),
ansible_version=None,
semaphore=None,
attempts=3,
final=False,
@ -1233,6 +1234,10 @@ class Job(ConfigObject):
ns = self.nodeset
if ns:
d['nodeset'] = ns.toDict()
if self.ansible_version:
d['ansible_version'] = self.ansible_version
else:
d['ansible_version'] = None
return d
def __ne__(self, other):
@ -3860,6 +3865,12 @@ class Layout(object):
# (i.e. project+templates) directly into the job vars
frozen_job.updateProjectVariables(ppc.variables)
# If the job does not specify an ansible version default to the
# tenant default.
if not frozen_job.ansible_version:
frozen_job.ansible_version = \
item.layout.tenant.default_ansible_version
job_graph.addJob(frozen_job)
def createJobGraph(self, item, ppc):
@ -4025,6 +4036,9 @@ class Tenant(object):
self.projects = {}
self.canonical_hostnames = set()
# The per tenant default ansible version
self.default_ansible_version = None
def _addProject(self, tpc):
"""Add a project to the project index

View File

@ -32,6 +32,7 @@ from zuul import exceptions
from zuul import version as zuul_version
from zuul import rpclistener
from zuul.lib import commandsocket
from zuul.lib.ansible import AnsibleManager
from zuul.lib.config import get_default
from zuul.lib.gear_utils import getGearmanFunctions
from zuul.lib.statsd import get_statsd
@ -323,6 +324,10 @@ class Scheduler(threading.Thread):
if self.config.getboolean('scheduler', 'relative_priority'):
self.use_relative_priority = True
default_ansible_version = get_default(
self.config, 'scheduler', 'default_ansible_version', None)
self.ansible_manager = AnsibleManager(default_ansible_version)
def start(self):
super(Scheduler, self).start()
self._command_running = True
@ -662,6 +667,13 @@ class Scheduler(threading.Thread):
self.config = event.config
try:
self.log.info("Full reconfiguration beginning")
# Reload the ansible manager in case the default ansible version
# changed.
default_ansible_version = get_default(
self.config, 'scheduler', 'default_ansible_version', None)
self.ansible_manager = AnsibleManager(default_ansible_version)
for connection in self.connections.connections.values():
self.log.debug("Clear branch cache for: %s" % connection)
connection.clearBranchCache()
@ -672,7 +684,8 @@ class Scheduler(threading.Thread):
tenant_config, script = self._checkTenantSourceConf(self.config)
self.unparsed_abide = loader.readConfig(
tenant_config, from_script=script)
abide = loader.loadConfig(self.unparsed_abide)
abide = loader.loadConfig(
self.unparsed_abide, self.ansible_manager)
for tenant in abide.tenants.values():
self._reconfigureTenant(tenant)
self.abide = abide
@ -700,7 +713,7 @@ class Scheduler(threading.Thread):
self.connections, self, self.merger,
self._get_key_dir())
abide = loader.reloadTenant(
self.abide, old_tenant)
self.abide, old_tenant, self.ansible_manager)
tenant = abide.tenants[event.tenant_name]
self._reconfigureTenant(tenant)
self.abide = abide