Merge branch 'master' into feature/zuulv3

Change-Id: I37a3c5d4f12917b111b7eb624f8b68689687ebc4
This commit is contained in:
Joshua Hesketh 2017-03-06 12:50:04 +11:00 committed by James E. Blair
commit 25695cbb51
41 changed files with 1276 additions and 103 deletions

1
.gitignore vendored
View File

@ -2,6 +2,7 @@
*.egg
*.egg-info
*.pyc
.idea
.test
.testrepository
.tox

View File

@ -1,2 +1,7 @@
# This is a cross-platform list tracking distribution packages needed by tests;
# see http://docs.openstack.org/infra/bindep/ for additional information.
mysql-client [test]
mysql-server [test]
libjpeg-dev [test]
zookeeperd [platform:dpkg]

View File

@ -38,6 +38,9 @@ Create a connection with gerrit.
Path to SSH key to use when logging into above server.
``sshkey=/home/zuul/.ssh/id_rsa``
**keepalive**
Optional: Keepalive timeout, 0 means no keepalive.
``keepalive=60``
Gerrit Configuration
~~~~~~~~~~~~~~~~~~~~
@ -77,3 +80,15 @@ SMTP
Who the report should be emailed to by default.
This can be overridden by individual pipelines.
``default_to=you@example.com``
SQL
----
Only one connection per a database is permitted.
**driver=sql**
**dburi**
Database connection information in the form of a URI understood by
sqlalchemy. eg http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls
``dburi=mysql://user:pass@localhost/db``

View File

@ -34,7 +34,7 @@ SMTP
A simple email reporter is also available.
A :ref:`connection` that uses the smtp driver must be supplied to the
trigger.
reporter.
SMTP Configuration
~~~~~~~~~~~~~~~~~~
@ -60,3 +60,42 @@ providing alternatives as arguments to the reporter. For example, ::
to: you@example.com
from: alternative@example.com
subject: Change {change} failed
SQL
---
This reporter is used to store results in a database.
A :ref:`connection` that uses the sql driver must be supplied to the
reporter.
SQL Configuration
~~~~~~~~~~~~~~~~~
zuul.conf contains the database connection and credentials. To store different
reports in different databases you'll need to create a new connection per
database.
The sql reporter is used to store the results from individual builds rather
than the change. As such the sql reporter does nothing on "start" or
"merge-failure".
**score**
A score to store for the result of the build. eg: -1 might indicate a failed
build similar to the vote posted back via the gerrit reporter.
For example ::
pipelines:
- name: post-merge
manager: IndependentPipelineManager
source: my_gerrit
trigger:
my_gerrit:
- event: change-merged
success:
mydb_conn:
score: 1
failure:
mydb_conn:
score: -1

View File

@ -148,11 +148,9 @@
case 'skipped':
$status.addClass('label-info');
break;
case 'in progress':
case 'queued':
case 'lost':
// 'in progress' 'queued' 'lost' 'aborted' ...
default:
$status.addClass('label-default');
break;
}
$status.text(result);
return $status;

View File

@ -37,6 +37,7 @@ server=review.example.com
;baseurl=https://review.example.com/r
user=jenkins
sshkey=/home/jenkins/.ssh/id_rsa
;keepalive=60
[connection smtp]
driver=smtp
@ -44,3 +45,7 @@ server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
[connection mydatabase]
driver=sql
dburi=mysql+pymysql://user@localhost/zuul

View File

@ -17,3 +17,5 @@ babel>=1.0
six>=1.6.0
ansible>=2.0.0.1
kazoo
sqlalchemy
alembic

View File

@ -31,3 +31,7 @@ console_scripts =
source-dir = doc/source
build-dir = doc/build
all_files = 1
[extras]
mysql_reporter=
PyMySQL

View File

@ -11,3 +11,4 @@ testrepository>=0.0.17
testtools>=0.9.32
sphinxcontrib-programoutput
mock
PyMySQL

View File

@ -37,12 +37,15 @@ import sys
import tempfile
import threading
import time
import uuid
import git
import gear
import fixtures
import kazoo.client
import kazoo.exceptions
import pymysql
import statsd
import testtools
import testtools.content
@ -51,6 +54,7 @@ from git.exc import NoSuchPathError
import zuul.driver.gerrit.gerritsource as gerritsource
import zuul.driver.gerrit.gerritconnection as gerritconnection
import zuul.connection.sql
import zuul.scheduler
import zuul.webapp
import zuul.rpclistener
@ -273,6 +277,25 @@ class FakeChange(object):
"eventCreatedOn": 1487613810}
return event
def getRefUpdatedEvent(self):
path = os.path.join(self.upstream_root, self.project)
repo = git.Repo(path)
oldrev = repo.heads[self.branch].commit.hexsha
event = {
"type": "ref-updated",
"submitter": {
"name": "User Name",
},
"refUpdate": {
"oldRev": oldrev,
"newRev": self.patchsets[-1]['revision'],
"refName": self.branch,
"project": self.project,
}
}
return event
def addApproval(self, category, value, username='reviewer_john',
granted_on=None, message=''):
if not granted_on:
@ -1067,6 +1090,43 @@ class ChrootedKazooFixture(fixtures.Fixture):
_tmp_client.stop()
class MySQLSchemaFixture(fixtures.Fixture):
def setUp(self):
super(MySQLSchemaFixture, self).setUp()
random_bits = ''.join(random.choice(string.ascii_lowercase +
string.ascii_uppercase)
for x in range(8))
self.name = '%s_%s' % (random_bits, os.getpid())
self.passwd = uuid.uuid4().hex
db = pymysql.connect(host="localhost",
user="openstack_citest",
passwd="openstack_citest",
db="openstack_citest")
cur = db.cursor()
cur.execute("create database %s" % self.name)
cur.execute(
"grant all on %s.* to '%s'@'localhost' identified by '%s'" %
(self.name, self.name, self.passwd))
cur.execute("flush privileges")
self.dburi = 'mysql+pymysql://%s:%s@localhost/%s' % (self.name,
self.passwd,
self.name)
self.addDetail('dburi', testtools.content.text_content(self.dburi))
self.addCleanup(self.cleanup)
def cleanup(self):
db = pymysql.connect(host="localhost",
user="openstack_citest",
passwd="openstack_citest",
db="openstack_citest")
cur = db.cursor()
cur.execute("drop database %s" % self.name)
cur.execute("drop user '%s'@'localhost'" % self.name)
cur.execute("flush privileges")
class BaseTestCase(testtools.TestCase):
log = logging.getLogger("zuul.test")
wait_timeout = 20
@ -1358,6 +1418,9 @@ class ZuulTestCase(BaseTestCase):
getGerritConnection))
# Set up smtp related fakes
# TODO(jhesketh): This should come from lib.connections for better
# coverage
# Register connections from the config
self.smtp_messages = []
def FakeSMTPFactory(*args, **kw):
@ -1868,3 +1931,20 @@ class ZuulTestCase(BaseTestCase):
class AnsibleZuulTestCase(ZuulTestCase):
"""ZuulTestCase but with an actual ansible launcher running"""
run_ansible = True
class ZuulDBTestCase(ZuulTestCase):
def setup_config(self, config_file='zuul-connections-same-gerrit.conf'):
super(ZuulDBTestCase, self).setup_config(config_file)
for section_name in self.config.sections():
con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
section_name, re.I)
if not con_match:
continue
if self.config.get(section_name, 'driver') == 'sql':
f = MySQLSchemaFixture()
self.useFixture(f)
if (self.config.get(section_name, 'dburi') ==
'$MYSQL_FIXTURE_DBURI$'):
self.config.set(section_name, 'dburi', f.dburi)

View File

@ -1,4 +1,16 @@
pipelines:
- name: check
manager: IndependentPipelineManager
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
- name: gate
manager: DependentPipelineManager
failure-message: Build failed. For information on how to proceed, see http://wiki.example.org/Test_Failures
@ -18,28 +30,54 @@ pipelines:
gerrit:
verified: -2
- name: post
manager: IndependentPipelineManager
trigger:
gerrit:
- event: ref-updated
ref: ^(?!refs/).*$
projects:
- name: org/project
check:
- integration
gate:
- integration
- name: org/project1
check:
- integration
gate:
- integration
- integration
post:
- postjob
- name: org/project2
check:
- integration
gate:
- integration
- integration
- name: org/project3
check:
- integration
gate:
- integration
- integration
- name: org/project4
check:
- integration
gate:
- integration
- integration
- name: org/project5
check:
- integration
gate:
- integration
- integration
- name: org/project6
check:
- integration
gate:
- integration
- integration

View File

@ -0,0 +1,23 @@
pipelines:
- name: check
manager: IndependentPipelineManager
trigger:
gerrit:
- event: patchset-created
success:
gerrit:
verified: 1
failure:
gerrit:
verified: -1
jobs:
- name: mutex-one
mutex: test-mutex
- name: mutex-two
mutex: test-mutex
projects:
- name: org/project
check:
- project-test1

27
tests/fixtures/layout-sql-reporter.yaml vendored Normal file
View File

@ -0,0 +1,27 @@
pipelines:
- name: check
manager: IndependentPipelineManager
source:
review_gerrit
trigger:
review_gerrit:
- event: patchset-created
success:
review_gerrit:
verified: 1
resultsdb:
score: 1
failure:
review_gerrit:
verified: -1
resultsdb:
score: -1
resultsdb_failures:
score: -1
projects:
- name: org/project
check:
- project-merge:
- project-test1
- project-test2

View File

@ -0,0 +1,50 @@
[gearman]
server=127.0.0.1
[zuul]
layout_config=layout-connections-multiple-voters.yaml
url_pattern=http://logs.example.com/{change.number}/{change.patchset}/{pipeline.name}/{job.name}/{build.number}
job_name_in_report=true
[merger]
git_dir=/tmp/zuul-test/git
git_user_email=zuul@example.com
git_user_name=zuul
zuul_url=http://zuul.example.com/p
[swift]
authurl=https://identity.api.example.org/v2.0/
user=username
key=password
tenant_name=" "
default_container=logs
region_name=EXP
logserver_prefix=http://logs.example.org/server.app/
[connection review_gerrit]
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
[connection alt_voting_gerrit]
driver=gerrit
server=alt_review.example.com
user=civoter
sshkey=none
[connection outgoing_smtp]
driver=smtp
server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
[connection resultsdb]
driver=sql
dburi=mysql+pymysql://bad:creds@host/db
[connection resultsdb_failures]
driver=sql
dburi=mysql+pymysql://bad:creds@host/db

View File

@ -29,13 +29,13 @@ logserver_prefix=http://logs.example.org/server.app/
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
sshkey=fake_id_rsa1
[connection alt_voting_gerrit]
driver=gerrit
server=review.example.com
user=civoter
sshkey=none
sshkey=fake_id_rsa2
[connection outgoing_smtp]
driver=smtp
@ -43,3 +43,12 @@ server=localhost
port=25
default_from=zuul@example.com
default_to=you@example.com
# TODOv3(jeblair): commented out until sqlalchemy conenction ported to
# v3 driver syntax
#[connection resultsdb] driver=sql
#dburi=$MYSQL_FIXTURE_DBURI$
#[connection resultsdb_failures]
#driver=sql
#dburi=$MYSQL_FIXTURE_DBURI$

View File

@ -29,7 +29,7 @@ logserver_prefix=http://logs.example.org/server.app/
driver=gerrit
server=review.example.com
user=jenkins
sshkey=none
sshkey=fake_id_rsa_path
[connection smtp]
driver=smtp

View File

@ -89,6 +89,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -105,11 +106,34 @@ class TestCloner(ZuulTestCase):
'be correct' % (project, number))
work = self.getWorkspaceRepos(projects)
upstream_repo_path = os.path.join(self.upstream_root, 'org/project1')
self.assertEquals(
# project1 is the zuul_project so the origin should be set to the
# zuul_url since that is the most up to date.
cache_repo_path = os.path.join(cache_root, 'org/project1')
self.assertNotEqual(
work['org/project1'].remotes.origin.url,
cache_repo_path,
'workspace repo origin should not be the cache'
)
zuul_url_repo_path = os.path.join(self.git_root, 'org/project1')
self.assertEqual(
work['org/project1'].remotes.origin.url,
zuul_url_repo_path,
'workspace repo origin should be the zuul url'
)
# project2 is not the zuul_project so the origin should be set
# to upstream since that is the best we can do
cache_repo_path = os.path.join(cache_root, 'org/project2')
self.assertNotEqual(
work['org/project2'].remotes.origin.url,
cache_repo_path,
'workspace repo origin should not be the cache'
)
upstream_repo_path = os.path.join(self.upstream_root, 'org/project2')
self.assertEqual(
work['org/project2'].remotes.origin.url,
upstream_repo_path,
'workspace repo origin should be upstream, not cache'
'workspace repo origin should be the upstream url'
)
self.worker.hold_jobs_in_build = False
@ -147,6 +171,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -217,6 +242,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -331,6 +357,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -393,6 +420,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -479,6 +507,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters['ZUUL_BRANCH'],
zuul_ref=build.parameters['ZUUL_REF'],
zuul_url=self.src_root,
@ -544,6 +573,7 @@ class TestCloner(ZuulTestCase):
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_url=self.src_root,
@ -565,56 +595,158 @@ class TestCloner(ZuulTestCase):
self.worker.release()
self.waitUntilSettled()
def test_periodic_update(self):
# Test that the merger correctly updates its local repository
# before running a periodic job.
# Prime the merger with the current state
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Merge a different change
B = self.fake_gerrit.addFakeChange('org/project', 'master', 'B')
B.setMerged()
# Start a periodic job
self.worker.hold_jobs_in_build = True
self.launcher.negative_function_cache_ttl = 0
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
# The pipeline triggers every second, so we should have seen
# several by now.
time.sleep(5)
self.waitUntilSettled()
builds = self.builds[:]
self.worker.hold_jobs_in_build = False
# Stop queuing timer triggered jobs so that the assertions
# below don't race against more jobs being queued.
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-no-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
self.worker.release()
self.waitUntilSettled()
projects = ['org/project']
self.assertEquals(2, len(builds), "Two builds are running")
upstream = self.getUpstreamRepos(projects)
self.assertEqual(upstream['org/project'].commit('master').hexsha,
B.patchsets[0]['revision'])
states = [
{'org/project':
str(upstream['org/project'].commit('master')),
},
{'org/project':
str(upstream['org/project'].commit('master')),
},
]
for number, build in enumerate(builds):
self.log.debug("Build parameters: %s", build.parameters)
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=projects,
workspace=self.workspace_root,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_url=self.git_root,
)
cloner.execute()
work = self.getWorkspaceRepos(projects)
state = states[number]
for project in projects:
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, number))
shutil.rmtree(self.workspace_root)
self.worker.hold_jobs_in_build = False
self.worker.release()
self.waitUntilSettled()
def test_post_checkout(self):
project = "org/project"
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
repo.head.reference = repo.heads['master']
commits = []
for i in range(0, 3):
commits.append(self.create_commit(project))
newRev = commits[1]
self.worker.hold_jobs_in_build = True
project = "org/project1"
A = self.fake_gerrit.addFakeChange(project, 'master', 'A')
event = A.getRefUpdatedEvent()
A.setMerged()
self.fake_gerrit.addEvent(event)
self.waitUntilSettled()
build = self.builds[0]
state = {'org/project1': build.parameters['ZUUL_COMMIT']}
build.release()
self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=[project],
workspace=self.workspace_root,
zuul_branch=None,
zuul_ref='master',
zuul_url=self.src_root,
zuul_project=project,
zuul_newrev=newRev,
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
)
cloner.execute()
repos = self.getWorkspaceRepos([project])
cloned_sha = repos[project].rev_parse('HEAD').hexsha
self.assertEqual(newRev, cloned_sha)
work = self.getWorkspaceRepos([project])
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, 0))
shutil.rmtree(self.workspace_root)
def test_post_and_master_checkout(self):
project = "org/project1"
master_project = "org/project2"
path = os.path.join(self.upstream_root, project)
repo = git.Repo(path)
repo.head.reference = repo.heads['master']
commits = []
for i in range(0, 3):
commits.append(self.create_commit(project))
newRev = commits[1]
self.worker.hold_jobs_in_build = True
projects = ["org/project1", "org/project2"]
A = self.fake_gerrit.addFakeChange(projects[0], 'master', 'A')
event = A.getRefUpdatedEvent()
A.setMerged()
self.fake_gerrit.addEvent(event)
self.waitUntilSettled()
build = self.builds[0]
upstream = self.getUpstreamRepos(projects)
state = {'org/project1':
build.parameters['ZUUL_COMMIT'],
'org/project2':
str(upstream['org/project2'].commit('master')),
}
build.release()
self.waitUntilSettled()
cloner = zuul.lib.cloner.Cloner(
git_base_url=self.upstream_root,
projects=[project, master_project],
projects=projects,
workspace=self.workspace_root,
zuul_branch=None,
zuul_ref='master',
zuul_url=self.src_root,
zuul_project=project,
zuul_newrev=newRev
zuul_project=build.parameters.get('ZUUL_PROJECT', None),
zuul_branch=build.parameters.get('ZUUL_BRANCH', None),
zuul_ref=build.parameters.get('ZUUL_REF', None),
zuul_newrev=build.parameters.get('ZUUL_NEWREV', None),
zuul_url=self.git_root,
)
cloner.execute()
repos = self.getWorkspaceRepos([project, master_project])
cloned_sha = repos[project].rev_parse('HEAD').hexsha
self.assertEqual(newRev, cloned_sha)
self.assertEqual(
repos[master_project].rev_parse('HEAD').hexsha,
repos[master_project].rev_parse('master').hexsha)
work = self.getWorkspaceRepos(projects)
for project in projects:
self.assertEquals(state[project],
str(work[project].commit('HEAD')),
'Project %s commit for build %s should '
'be correct' % (project, 0))
shutil.rmtree(self.workspace_root)

View File

@ -12,14 +12,26 @@
# License for the specific language governing permissions and limitations
# under the License.
from tests.base import ZuulTestCase
import sqlalchemy as sa
from unittest import skip
from tests.base import ZuulTestCase, ZuulDBTestCase
def _get_reporter_from_connection_name(reporters, connection_name):
# Reporters are placed into lists for each action they may exist in.
# Search through the given list for the correct reporter by its conncetion
# name
for r in reporters:
if r.connection.connection_name == connection_name:
return r
class TestConnections(ZuulTestCase):
config_file = 'zuul-connections-same-gerrit.conf'
tenant_config_file = 'config/zuul-connections-same-gerrit/main.yaml'
def test_multiple_connections(self):
def test_multiple_gerrit_connections(self):
"Test multiple connections to the one gerrit"
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
@ -45,9 +57,184 @@ class TestConnections(ZuulTestCase):
self.assertEqual(B.patchsets[-1]['approvals'][0]['by']['username'],
'civoter')
def _test_sql_tables_created(self, metadata_table=None):
"Test the tables for storing results are created properly"
buildset_table = 'zuul_buildset'
build_table = 'zuul_build'
insp = sa.engine.reflection.Inspector(
self.connections['resultsdb'].engine)
self.assertEqual(9, len(insp.get_columns(buildset_table)))
self.assertEqual(10, len(insp.get_columns(build_table)))
@skip("Disabled for early v3 development")
def test_sql_tables_created(self):
"Test the default table is created"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
self._test_sql_tables_created()
def _test_sql_results(self):
"Test results are entered into an sql table"
# Grab the sa tables
reporter = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].success_actions,
'resultsdb'
)
# Add a success result
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Add a failed result for a negative score
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
self.worker.addFailTest('project-test1', B)
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
conn = self.connections['resultsdb'].engine.connect()
result = conn.execute(
sa.sql.select([reporter.connection.zuul_buildset_table]))
buildsets = result.fetchall()
self.assertEqual(2, len(buildsets))
buildset0 = buildsets[0]
buildset1 = buildsets[1]
self.assertEqual('check', buildset0['pipeline'])
self.assertEqual('org/project', buildset0['project'])
self.assertEqual(1, buildset0['change'])
self.assertEqual(1, buildset0['patchset'])
self.assertEqual(1, buildset0['score'])
self.assertEqual('Build succeeded.', buildset0['message'])
buildset0_builds = conn.execute(
sa.sql.select([reporter.connection.zuul_build_table]).
where(
reporter.connection.zuul_build_table.c.buildset_id ==
buildset0['id']
)
).fetchall()
# Check the first result, which should be the project-merge job
self.assertEqual('project-merge', buildset0_builds[0]['job_name'])
self.assertEqual("SUCCESS", buildset0_builds[0]['result'])
self.assertEqual('http://logs.example.com/1/1/check/project-merge/0',
buildset0_builds[0]['log_url'])
self.assertEqual('check', buildset1['pipeline'])
self.assertEqual('org/project', buildset1['project'])
self.assertEqual(2, buildset1['change'])
self.assertEqual(1, buildset1['patchset'])
self.assertEqual(-1, buildset1['score'])
self.assertEqual('Build failed.', buildset1['message'])
buildset1_builds = conn.execute(
sa.sql.select([reporter.connection.zuul_build_table]).
where(
reporter.connection.zuul_build_table.c.buildset_id ==
buildset1['id']
)
).fetchall()
# Check the second last result, which should be the project-test1 job
# which failed
self.assertEqual('project-test1', buildset1_builds[-2]['job_name'])
self.assertEqual("FAILURE", buildset1_builds[-2]['result'])
self.assertEqual('http://logs.example.com/2/1/check/project-test1/4',
buildset1_builds[-2]['log_url'])
@skip("Disabled for early v3 development")
def test_sql_results(self):
"Test results are entered into the default sql table"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
self._test_sql_results()
@skip("Disabled for early v3 development")
def test_multiple_sql_connections(self):
"Test putting results in different databases"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
# Add a successful result
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Add a failed result
B = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'B')
self.worker.addFailTest('project-test1', B)
self.fake_review_gerrit.addEvent(B.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
# Grab the sa tables for resultsdb
reporter1 = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].success_actions,
'resultsdb'
)
conn = self.connections['resultsdb'].engine.connect()
buildsets_resultsdb = conn.execute(sa.sql.select(
[reporter1.connection.zuul_buildset_table])).fetchall()
# Should have been 2 buildset reported to the resultsdb (both success
# and failure report)
self.assertEqual(2, len(buildsets_resultsdb))
# The first one should have passed
self.assertEqual('check', buildsets_resultsdb[0]['pipeline'])
self.assertEqual('org/project', buildsets_resultsdb[0]['project'])
self.assertEqual(1, buildsets_resultsdb[0]['change'])
self.assertEqual(1, buildsets_resultsdb[0]['patchset'])
self.assertEqual(1, buildsets_resultsdb[0]['score'])
self.assertEqual('Build succeeded.', buildsets_resultsdb[0]['message'])
# Grab the sa tables for resultsdb_failures
reporter2 = _get_reporter_from_connection_name(
self.sched.layout.pipelines['check'].failure_actions,
'resultsdb_failures'
)
conn = self.connections['resultsdb_failures'].engine.connect()
buildsets_resultsdb_failures = conn.execute(sa.sql.select(
[reporter2.connection.zuul_buildset_table])).fetchall()
# The failure db should only have 1 buildset failed
self.assertEqual(1, len(buildsets_resultsdb_failures))
self.assertEqual('check', buildsets_resultsdb_failures[0]['pipeline'])
self.assertEqual(
'org/project', buildsets_resultsdb_failures[0]['project'])
self.assertEqual(2, buildsets_resultsdb_failures[0]['change'])
self.assertEqual(1, buildsets_resultsdb_failures[0]['patchset'])
self.assertEqual(-1, buildsets_resultsdb_failures[0]['score'])
self.assertEqual(
'Build failed.', buildsets_resultsdb_failures[0]['message'])
class TestConnectionsBadSQL(ZuulDBTestCase):
def setup_config(self, config_file='zuul-connections-bad-sql.conf'):
super(TestConnectionsBadSQL, self).setup_config(config_file)
@skip("Disabled for early v3 development")
def test_unable_to_connect(self):
"Test the SQL reporter fails gracefully when unable to connect"
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-sql-reporter.yaml')
self.sched.reconfigure(self.config)
# Trigger a reporter. If no errors are raised, the reporter has been
# disabled correctly
A = self.fake_review_gerrit.addFakeChange('org/project', 'master', 'A')
self.fake_review_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
self.waitUntilSettled()
class TestMultipleGerrits(ZuulTestCase):
config_file = 'zuul-connections-multiple-gerrits.conf'
tenant_config_file = 'config/zuul-connections-multiple-gerrits/main.yaml'

View File

@ -2920,6 +2920,50 @@ class TestScheduler(ZuulTestCase):
self.launch_server.release('.*')
self.waitUntilSettled()
@skip("Disabled for early v3 development")
def test_timer_sshkey(self):
"Test that a periodic job can setup SSH key authentication"
self.worker.hold_jobs_in_build = True
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
# The pipeline triggers every second, so we should have seen
# several by now.
time.sleep(5)
self.waitUntilSettled()
self.assertEqual(len(self.builds), 2)
ssh_wrapper = os.path.join(self.git_root, ".ssh_wrapper_gerrit")
self.assertTrue(os.path.isfile(ssh_wrapper))
with open(ssh_wrapper) as f:
ssh_wrapper_content = f.read()
self.assertIn("fake_id_rsa", ssh_wrapper_content)
# In the unit tests Merger runs in the same process,
# so we see its' environment variables
self.assertEqual(os.environ['GIT_SSH'], ssh_wrapper)
self.worker.release('.*')
self.waitUntilSettled()
self.assertEqual(len(self.history), 2)
self.assertEqual(self.getJobFromHistory(
'project-bitrot-stable-old').result, 'SUCCESS')
self.assertEqual(self.getJobFromHistory(
'project-bitrot-stable-older').result, 'SUCCESS')
# Stop queuing timer triggered jobs and let any that may have
# queued through so that end of test assertions pass.
self.config.set('zuul', 'layout_config',
'tests/fixtures/layout-no-timer.yaml')
self.sched.reconfigure(self.config)
self.registerJobs()
self.waitUntilSettled()
self.worker.release('.*')
self.waitUntilSettled()
def test_client_enqueue_change(self):
"Test that the RPC client can enqueue a change"
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')

33
tools/test-setup.sh Executable file
View File

@ -0,0 +1,33 @@
#!/bin/bash -xe
# This script will be run by OpenStack CI before unit tests are run,
# it sets up the test system as needed.
# Developers should setup their test systems in a similar way.
# This setup needs to be run as a user that can run sudo.
# The root password for the MySQL database; pass it in via
# MYSQL_ROOT_PW.
DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
# This user and its password are used by the tests, if you change it,
# your tests might fail.
DB_USER=openstack_citest
DB_PW=openstack_citest
sudo -H mysqladmin -u root password $DB_ROOT_PW
# It's best practice to remove anonymous users from the database. If
# a anonymous user exists, then it matches first for connections and
# other connections from that host will not work.
sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
DELETE FROM mysql.user WHERE User='';
FLUSH PRIVILEGES;
GRANT ALL PRIVILEGES ON *.*
TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"
# Now create our database.
mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e "
SET default_storage_engine=MYISAM;
DROP DATABASE IF EXISTS openstack_citest;
CREATE DATABASE openstack_citest CHARACTER SET utf8;"

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,70 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
# from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,53 @@
"""Set up initial reporter tables
Revision ID: 4d3ebd7f06b9
Revises:
Create Date: 2015-12-06 15:27:38.080020
"""
# revision identifiers, used by Alembic.
revision = '4d3ebd7f06b9'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build'
def upgrade():
op.create_table(
BUILDSET_TABLE,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
sa.Column('pipeline', sa.String(255)),
sa.Column('project', sa.String(255)),
sa.Column('change', sa.Integer, nullable=True),
sa.Column('patchset', sa.Integer, nullable=True),
sa.Column('ref', sa.String(255)),
sa.Column('score', sa.Integer),
sa.Column('message', sa.TEXT()),
)
op.create_table(
BUILD_TABLE,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
sa.ForeignKey(BUILDSET_TABLE + ".id")),
sa.Column('uuid', sa.String(36)),
sa.Column('job_name', sa.String(255)),
sa.Column('result', sa.String(255)),
sa.Column('start_time', sa.DateTime()),
sa.Column('end_time', sa.DateTime()),
sa.Column('voting', sa.Boolean),
sa.Column('log_url', sa.String(255)),
sa.Column('node_name', sa.String(255)),
)
def downgrade():
raise Exception("Downgrades not supported")

69
zuul/alembic_reporter.ini Normal file
View File

@ -0,0 +1,69 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# NOTE(jhesketh): We may use alembic for other db components of zuul in the
# future. Use a sub-folder for the reporters own versions.
script_location = alembic/sql_reporter
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = mysql+pymysql://user@localhost/database
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

104
zuul/connection/sql.py Normal file
View File

@ -0,0 +1,104 @@
# Copyright 2014 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import alembic
import alembic.config
import sqlalchemy as sa
import voluptuous as v
from zuul.connection import BaseConnection
BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build'
class SQLConnection(BaseConnection):
driver_name = 'sql'
log = logging.getLogger("connection.sql")
def __init__(self, connection_name, connection_config):
super(SQLConnection, self).__init__(connection_name, connection_config)
self.dburi = None
self.engine = None
self.connection = None
self.tables_established = False
try:
self.dburi = self.connection_config.get('dburi')
self.engine = sa.create_engine(self.dburi)
self._migrate()
self._setup_tables()
self.tables_established = True
except sa.exc.NoSuchModuleError:
self.log.exception(
"The required module for the dburi dialect isn't available. "
"SQL connection %s will be unavailable." % connection_name)
except sa.exc.OperationalError:
self.log.exception(
"Unable to connect to the database or establish the required "
"tables. Reporter %s is disabled" % self)
def _migrate(self):
"""Perform the alembic migrations for this connection"""
with self.engine.begin() as conn:
context = alembic.migration.MigrationContext.configure(conn)
current_rev = context.get_current_revision()
self.log.debug('Current migration revision: %s' % current_rev)
config = alembic.config.Config()
config.set_main_option("script_location",
"zuul:alembic/sql_reporter")
config.set_main_option("sqlalchemy.url",
self.connection_config.get('dburi'))
alembic.command.upgrade(config, 'head')
def _setup_tables(self):
metadata = sa.MetaData()
self.zuul_buildset_table = sa.Table(
BUILDSET_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
sa.Column('pipeline', sa.String(255)),
sa.Column('project', sa.String(255)),
sa.Column('change', sa.Integer, nullable=True),
sa.Column('patchset', sa.Integer, nullable=True),
sa.Column('ref', sa.String(255)),
sa.Column('score', sa.Integer),
sa.Column('message', sa.TEXT()),
)
self.zuul_build_table = sa.Table(
BUILD_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
sa.ForeignKey(BUILDSET_TABLE + ".id")),
sa.Column('uuid', sa.String(36)),
sa.Column('job_name', sa.String(255)),
sa.Column('result', sa.String(255)),
sa.Column('start_time', sa.DateTime()),
sa.Column('end_time', sa.DateTime()),
sa.Column('voting', sa.Boolean),
sa.Column('log_url', sa.String(255)),
sa.Column('node_name', sa.String(255)),
)
def getSchema():
sql_connection = v.Any(str, v.Schema({}, extra=True))
return sql_connection

View File

@ -79,7 +79,7 @@ class GerritEventConnector(threading.Thread):
if change:
event.project_name = change.get('project')
event.branch = change.get('branch')
event.change_number = change.get('number')
event.change_number = str(change.get('number'))
event.change_url = change.get('url')
patchset = data.get('patchSet')
if patchset:
@ -155,13 +155,14 @@ class GerritWatcher(threading.Thread):
poll_timeout = 500
def __init__(self, gerrit_connection, username, hostname, port=29418,
keyfile=None):
keyfile=None, keepalive=60):
threading.Thread.__init__(self)
self.username = username
self.keyfile = keyfile
self.hostname = hostname
self.port = port
self.gerrit_connection = gerrit_connection
self.keepalive = keepalive
self._stopped = False
def _read(self, fd):
@ -192,6 +193,8 @@ class GerritWatcher(threading.Thread):
username=self.username,
port=self.port,
key_filename=self.keyfile)
transport = client.get_transport()
transport.set_keepalive(self.keepalive)
stdin, stdout, stderr = client.exec_command("gerrit stream-events")
@ -228,7 +231,7 @@ class GerritWatcher(threading.Thread):
class GerritConnection(BaseConnection):
driver_name = 'gerrit'
log = logging.getLogger("connection.gerrit")
log = logging.getLogger("zuul.GerritConnection")
depends_on_re = re.compile(r"^Depends-On: (I[0-9a-f]{40})\s*$",
re.MULTILINE | re.IGNORECASE)
replication_timeout = 300
@ -248,6 +251,7 @@ class GerritConnection(BaseConnection):
self.server = self.connection_config.get('server')
self.port = int(self.connection_config.get('port', 29418))
self.keyfile = self.connection_config.get('sshkey', None)
self.keepalive = int(self.connection_config.get('keepalive', 60))
self.watcher_thread = None
self.event_queue = Queue.Queue()
self.client = None
@ -682,6 +686,8 @@ class GerritConnection(BaseConnection):
username=self.user,
port=self.port,
key_filename=self.keyfile)
transport = client.get_transport()
transport.set_keepalive(self.keepalive)
self.client = client
def _ssh(self, command, stdin_data=None):
@ -786,7 +792,8 @@ class GerritConnection(BaseConnection):
self.user,
self.server,
self.port,
keyfile=self.keyfile)
keyfile=self.keyfile,
keepalive=self.keepalive)
self.watcher_thread.start()
def _stop_event_connector(self):

View File

@ -23,7 +23,7 @@ class GerritReporter(BaseReporter):
"""Sends off reports to Gerrit."""
name = 'gerrit'
log = logging.getLogger("zuul.reporter.gerrit.Reporter")
log = logging.getLogger("zuul.GerritReporter")
def report(self, source, pipeline, item):
"""Send a message to gerrit."""

View File

@ -20,7 +20,7 @@ from zuul.trigger import BaseTrigger
class GerritTrigger(BaseTrigger):
name = 'gerrit'
log = logging.getLogger("zuul.trigger.Gerrit")
log = logging.getLogger("zuul.GerritTrigger")
def getEventFilters(self, trigger_conf):
def toList(item):

View File

@ -23,7 +23,7 @@ from zuul.connection import BaseConnection
class SMTPConnection(BaseConnection):
driver_name = 'smtp'
log = logging.getLogger("connection.smtp")
log = logging.getLogger("zuul.SMTPConnection")
def __init__(self, driver, connection_name, connection_config):
super(SMTPConnection, self).__init__(driver, connection_name,

View File

@ -22,7 +22,7 @@ class SMTPReporter(BaseReporter):
"""Sends off reports to emails via SMTP."""
name = 'smtp'
log = logging.getLogger("zuul.reporter.smtp.Reporter")
log = logging.getLogger("zuul.SMTPReporter")
def report(self, source, pipeline, item):
"""Send the compiled report message via smtp."""

View File

@ -26,8 +26,7 @@ import timertrigger
class TimerDriver(Driver, TriggerInterface):
name = 'timer'
log = logging.getLogger("zuul.Timer")
log = logging.getLogger("zuul.TimerDriver")
def __init__(self):
self.apsched = BackgroundScheduler()

View File

@ -46,7 +46,7 @@ from zuul.lib import commandsocket
ANSIBLE_WATCHDOG_GRACE = 5 * 60
ANSIBLE_DEFAULT_TIMEOUT = 2 * 60 * 60
ANSIBLE_DEFAULT_PRE_TIMEOUT = 10 * 60
ANSIBLE_DEFAULT_POST_TIMEOUT = 10 * 60
ANSIBLE_DEFAULT_POST_TIMEOUT = 30 * 60
COMMANDS = ['reconfigure', 'stop', 'pause', 'unpause', 'release', 'graceful',
@ -822,7 +822,7 @@ class NodeWorker(object):
result = None
self._sent_complete_event = False
self._aborted_job = False
self._watchog_timeout = False
self._watchdog_timeout = False
try:
self.sendStartEvent(job_name, args)
@ -1351,7 +1351,10 @@ class NodeWorker(object):
when='success|bool')
blocks[0].insert(0, task)
task = dict(zuul_log=dict(msg="Job complete, result: FAILURE"),
when='not success|bool')
when='not success|bool and not timedout|bool')
blocks[0].insert(0, task)
task = dict(zuul_log=dict(msg="Job timed out, result: FAILURE"),
when='not success|bool and timedout|bool')
blocks[0].insert(0, task)
tasks.append(dict(block=blocks[0],
@ -1509,6 +1512,7 @@ class NodeWorker(object):
cmd = ['ansible-playbook', jobdir.post_playbook,
'-e', 'success=%s' % success,
'-e', 'timedout=%s' % self._watchdog_timeout,
'-e@%s' % jobdir.vars,
verbose]
self.log.debug("Ansible post command: %s" % (cmd,))

View File

@ -187,6 +187,7 @@ class LayoutSchema(object):
'reporter': {
'gerrit': 'zuul.reporter.gerrit',
'smtp': 'zuul.reporter.smtp',
'sql': 'zuul.reporter.sql',
},
}
standard_drivers = {

View File

@ -46,6 +46,8 @@ class Cloner(object):
self.zuul_branch = zuul_branch or ''
self.zuul_ref = zuul_ref or ''
self.zuul_url = zuul_url
self.zuul_project = zuul_project
self.project_branches = project_branches or {}
self.project_revisions = {}
@ -77,7 +79,18 @@ class Cloner(object):
def cloneUpstream(self, project, dest):
# Check for a cached git repo first
git_cache = '%s/%s' % (self.cache_dir, project)
git_upstream = '%s/%s' % (self.git_url, project)
# Then, if we are cloning the repo for the zuul_project, then
# set its origin to be the zuul merger, as it is guaranteed to
# be correct and up to date even if mirrors haven't updated
# yet. Otherwise, we can not be sure about the state of the
# project, so our best chance to get the most current state is
# by setting origin to the git_url.
if (self.zuul_url and project == self.zuul_project):
git_upstream = '%s/%s' % (self.zuul_url, project)
else:
git_upstream = '%s/%s' % (self.git_url, project)
repo_is_cloned = os.path.exists(os.path.join(dest, '.git'))
if (self.cache_dir and
os.path.exists(git_cache) and
@ -104,23 +117,35 @@ class Cloner(object):
return repo
def fetchFromZuul(self, repo, project, ref):
zuul_remote = '%s/%s' % (self.zuul_url, project)
def fetchRef(self, repo, project, ref):
# If we are fetching a zuul ref, the only place to get it is
# from the zuul merger (and it is guaranteed to be correct).
# Otherwise, the only way we can be certain that the ref
# (which, since it is not a zuul ref, is a branch or tag) is
# correct is in the case that it matches zuul_project. If
# neither of those two conditions are met, we are most likely
# to get the correct state from the git_url.
if (ref.startswith('refs/zuul') or
project == self.zuul_project):
remote = '%s/%s' % (self.zuul_url, project)
else:
remote = '%s/%s' % (self.git_url, project)
try:
repo.fetchFrom(zuul_remote, ref)
self.log.debug("Fetched ref %s from %s", ref, project)
repo.fetchFrom(remote, ref)
self.log.debug("Fetched ref %s from %s", ref, remote)
return True
except ValueError:
self.log.debug("Project %s in Zuul does not have ref %s",
project, ref)
self.log.debug("Repo %s does not have ref %s",
remote, ref)
return False
except GitCommandError as error:
# Bail out if fetch fails due to infrastructure reasons
if error.stderr.startswith('fatal: unable to access'):
raise
self.log.debug("Project %s in Zuul does not have ref %s",
project, ref)
self.log.debug("Repo %s does not have ref %s",
remote, ref)
return False
def prepareRepo(self, project, dest):
@ -192,7 +217,7 @@ class Cloner(object):
self.log.info("Attempting to check out revision %s for "
"project %s", indicated_revision, project)
try:
self.fetchFromZuul(repo, project, self.zuul_ref)
self.fetchRef(repo, project, self.zuul_ref)
commit = repo.checkout(indicated_revision)
except (ValueError, GitCommandError):
raise exceptions.RevNotFound(project, indicated_revision)
@ -201,10 +226,10 @@ class Cloner(object):
# If we have a non empty zuul_ref to use, use it. Otherwise we fall
# back to checking out the branch.
elif ((override_zuul_ref and
self.fetchFromZuul(repo, project, override_zuul_ref)) or
self.fetchRef(repo, project, override_zuul_ref)) or
(fallback_zuul_ref and
fallback_zuul_ref != override_zuul_ref and
self.fetchFromZuul(repo, project, fallback_zuul_ref))):
self.fetchRef(repo, project, fallback_zuul_ref))):
# Work around a bug in GitPython which can not parse FETCH_HEAD
gitcmd = git.Git(dest)
fetch_head = gitcmd.rev_parse('FETCH_HEAD')

View File

@ -12,6 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import logging
import re
import zuul.driver.zuul
@ -29,6 +30,8 @@ class DefaultConnection(BaseConnection):
class ConnectionRegistry(object):
"""A registry of connections"""
log = logging.getLogger("zuul.ConnectionRegistry")
def __init__(self):
self.connections = {}
self.drivers = {}
@ -92,16 +95,26 @@ class ConnectionRegistry(object):
# connection named 'gerrit' or 'smtp' respectfully
if 'gerrit' in config.sections():
driver = self.drivers['gerrit']
connections['gerrit'] = \
driver.getConnection(
'gerrit', dict(config.items('gerrit')))
if 'gerrit' in connections:
self.log.warning(
"The legacy [gerrit] section will be ignored in favour"
" of the [connection gerrit].")
else:
driver = self.drivers['gerrit']
connections['gerrit'] = \
driver.getConnection(
'gerrit', dict(config.items('gerrit')))
if 'smtp' in config.sections():
driver = self.drivers['smtp']
connections['smtp'] = \
driver.getConnection(
'smtp', dict(config.items('smtp')))
if 'smtp' in connections:
self.log.warning(
"The legacy [smtp] section will be ignored in favour"
" of the [connection smtp].")
else:
driver = self.drivers['smtp']
connections['smtp'] = \
driver.getConnection(
'smtp', dict(config.items('smtp')))
# Create default connections for drivers which need no
# connection information (e.g., 'timer' or 'zuul').

View File

@ -24,7 +24,7 @@ import string
class Swift(object):
log = logging.getLogger("zuul.lib.swift")
log = logging.getLogger("zuul.Swift")
def __init__(self, config):
self.config = config

View File

@ -226,6 +226,14 @@ class Merger(object):
else:
return None
def _setGitSsh(self, connection_name):
wrapper_name = '.ssh_wrapper_%s' % connection_name
name = os.path.join(self.working_root, wrapper_name)
if os.path.isfile(name):
os.environ['GIT_SSH'] = name
elif 'GIT_SSH' in os.environ:
del os.environ['GIT_SSH']
def addProject(self, project, url):
repo = None
try:
@ -246,6 +254,10 @@ class Merger(object):
return self.addProject(project, url)
def updateRepo(self, project, url):
# TODOv3(jhesketh): Reimplement
# da90a50b794f18f74de0e2c7ec3210abf79dda24 after merge..
# Likely we'll handle connection context per projects differently.
# self._setGitSsh()
repo = self.getRepo(project, url)
try:
self.log.info("Updating local repository %s", project)

View File

@ -116,7 +116,8 @@ class MergeServer(object):
def update(self, job):
args = json.loads(job.arguments)
self.merger.updateRepo(args['project'], args['url'])
self.merger.updateRepo(args['project'],
args['url'])
result = dict(updated=True,
zuul_url=self.zuul_url)
job.sendWorkComplete(json.dumps(result))

View File

@ -63,24 +63,26 @@ class BaseReporter(object):
# TODOv3(jeblair): Consider removing pipeline argument in favor of
# item.pipeline
def _formatItemReport(self, pipeline, item):
def _formatItemReport(self, pipeline, item, with_jobs=True):
"""Format a report from the given items. Usually to provide results to
a reporter taking free-form text."""
ret = self._getFormatter()(pipeline, item)
ret = self._getFormatter()(pipeline, item, with_jobs)
if pipeline.footer_message:
ret += '\n' + pipeline.footer_message
return ret
def _formatItemReportStart(self, pipeline, item):
def _formatItemReportStart(self, pipeline, item, with_jobs=True):
return pipeline.start_message.format(pipeline=pipeline)
def _formatItemReportSuccess(self, pipeline, item):
return (pipeline.success_message + '\n\n' +
self._formatItemReportJobs(pipeline, item))
def _formatItemReportSuccess(self, pipeline, item, with_jobs=True):
msg = pipeline.success_message
if with_jobs:
msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
return msg
def _formatItemReportFailure(self, pipeline, item):
def _formatItemReportFailure(self, pipeline, item, with_jobs=True):
if item.dequeued_needing_change:
msg = 'This change depends on a change that failed to merge.\n'
elif item.didMergerFail():
@ -88,14 +90,15 @@ class BaseReporter(object):
elif item.getConfigError():
msg = item.getConfigError()
else:
msg = (pipeline.failure_message + '\n\n' +
self._formatItemReportJobs(pipeline, item))
msg = pipeline.failure_message
if with_jobs:
msg += '\n\n' + self._formatItemReportJobs(pipeline, item)
return msg
def _formatItemReportMergeFailure(self, pipeline, item):
def _formatItemReportMergeFailure(self, pipeline, item, with_jobs=True):
return pipeline.merge_failure_message
def _formatItemReportDisabled(self, pipeline, item):
def _formatItemReportDisabled(self, pipeline, item, with_jobs=True):
if item.current_build_set.result == 'SUCCESS':
return self._formatItemReportSuccess(pipeline, item)
elif item.current_build_set.result == 'FAILURE':

94
zuul/reporter/sql.py Normal file
View File

@ -0,0 +1,94 @@
# Copyright 2015 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import logging
import voluptuous as v
from zuul.reporter import BaseReporter
class SQLReporter(BaseReporter):
"""Sends off reports to a database."""
name = 'sql'
log = logging.getLogger("zuul.reporter.mysql.SQLReporter")
def __init__(self, reporter_config={}, sched=None, connection=None):
super(SQLReporter, self).__init__(
reporter_config, sched, connection)
self.result_score = reporter_config.get('score', None)
def report(self, source, pipeline, item):
"""Create an entry into a database."""
if not self.connection.tables_established:
self.log.warn("SQL reporter (%s) is disabled " % self)
return
if self.sched.config.has_option('zuul', 'url_pattern'):
url_pattern = self.sched.config.get('zuul', 'url_pattern')
else:
url_pattern = None
score = self.reporter_config['score']\
if 'score' in self.reporter_config else 0
with self.connection.engine.begin() as conn:
buildset_ins = self.connection.zuul_buildset_table.insert().values(
zuul_ref=item.current_build_set.ref,
pipeline=item.pipeline.name,
project=item.change.project.name,
change=item.change.number,
patchset=item.change.patchset,
ref=item.change.refspec,
score=score,
message=self._formatItemReport(
pipeline, item, with_jobs=False),
)
buildset_ins_result = conn.execute(buildset_ins)
build_inserts = []
for job in pipeline.getJobs(item):
build = item.current_build_set.getBuild(job.name)
if not build:
# build hasn't began. The sql reporter can only send back
# stats about builds. It doesn't understand how to store
# information about the change.
continue
(result, url) = item.formatJobResult(job, url_pattern)
build_inserts.append({
'buildset_id': buildset_ins_result.inserted_primary_key,
'uuid': build.uuid,
'job_name': build.job.name,
'result': result,
'start_time': datetime.datetime.fromtimestamp(
build.start_time),
'end_time': datetime.datetime.fromtimestamp(
build.end_time),
'voting': build.job.voting,
'log_url': url,
'node_name': build.node_name,
})
conn.execute(self.connection.zuul_build_table.insert(),
build_inserts)
def getSchema():
sql_reporter = v.Schema({
'score': int,
})
return sql_reporter