Return artifacts as dicts and add metadata

A recent attempt to use the artifact return feature of zuul_return
exposed some rough edges.  These two changes should make it much
easier to use.

First, return artifacts as a dictionary instead of a list.  This
requires that they have unique names (which is no bad thing -- what
would two artifacts named "docs" mean anyway?).  But mainly it allows
the dict merging behavior of zuul_return to be used, so that one
playbook may use zuul_return with some artifacts, and another playbook
may do the same, without either needing to load in the values of
the other first (assuming, of course, that they use different artifact
names).

Second, add a metadata field.  In the database and API, this is JSON
serialized, but in zuul_return and zuul.artifacts, it is exploded into
separate fields.  This lets jobs do things like associate versions or
tags with artifacts without having to abuse the url field.

Change-Id: I228687c1bd1c74ebc33b088ffd43f30c7309990d
This commit is contained in:
James E. Blair 2019-02-04 14:22:08 -08:00
parent 6ea6885e21
commit f12453f6cb
7 changed files with 98 additions and 19 deletions

View File

@ -229,7 +229,7 @@ of item.
under the ``zuul`` key:
.. var:: artifacts
:type: list
:type: dict
If the job has a :attr:`job.requires` attribute, and Zuul has
found changes ahead of this change in the pipeline with matching
@ -263,6 +263,10 @@ of item.
The URL of the artifact (as supplied to :ref:`return_artifacts`).
.. var:: metadata
The metadata of the artifact (as supplied to :ref:`return_artifacts`).
.. var:: build
The UUID of the build. A build is a single execution of a job.
@ -779,7 +783,7 @@ Returning artifact URLs
If a build produces artifacts, any number of URLs may be returned to
Zuul and stored in the SQL database. These will then be available via
the web interface.
the web interface and subsequent jobs.
To provide artifact URLs for a build, use *zuul_return* to set keys
under the **zuul.artifacts** dictionary. For example:
@ -791,13 +795,17 @@ under the **zuul.artifacts** dictionary. For example:
data:
zuul:
artifacts:
- name: tarball
tarball:
url: http://example.com/path/to/package.tar.gz
- name: docs
metadata:
version: 3.0
docs:
url: build/docs/
If the value of **url** is a relative URL, it will be combined with
the **zuul.log_url** value if set to create an absolute URL.
the **zuul.log_url** value if set to create an absolute URL. The
**metadata** key is optional; if it is provided, it must be a
dictionary; its keys and values may be anything.
Skipping child jobs
~~~~~~~~~~~~~~~~~~~

View File

@ -0,0 +1,12 @@
---
features:
- Artifacts may now include a metadata field for storing arbitrary
metadata about the artifacts in the SQL database.
deprecations:
- Artifacts should now be supplied to zuul_return in dictionary form
instead of a list. See :ref:`return_artifacts`.
This is to aid in multiple playbooks providing information back to
Zuul without requiring coordination with each other.
Support for the list format will be removed in a future version.

View File

@ -0,0 +1,37 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""artifact_metadata
Revision ID: c18b1277dfb5
Revises: 39d302d34d38
Create Date: 2019-02-04 14:02:44.291890
"""
# revision identifiers, used by Alembic.
revision = 'c18b1277dfb5'
down_revision = '39d302d34d38'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(table_prefix=''):
op.add_column(
table_prefix + 'zuul_artifact', sa.Column('metadata', sa.TEXT()))
def downgrade():
raise Exception("Downgrades not supported")

View File

@ -223,6 +223,9 @@ class SQLConnection(BaseConnection):
def createArtifact(self, *args, **kw):
session = orm.session.Session.object_session(self)
if 'metadata' in kw:
kw['meta'] = kw['metadata']
del kw['metadata']
a = ArtifactModel(*args, **kw)
a.build_id = self.id
self.artifacts.append(a)
@ -246,6 +249,7 @@ class SQLConnection(BaseConnection):
self.table_prefix + BUILD_TABLE + ".id"))
name = sa.Column(sa.String(255))
url = sa.Column(sa.TEXT())
meta = sa.Column('metadata', sa.TEXT())
build = orm.relationship(BuildModel, backref="artifacts")
class ProvidesModel(Base):

View File

@ -13,6 +13,7 @@
# under the License.
import datetime
import json
import logging
import time
import voluptuous as v
@ -90,6 +91,8 @@ class SQLReporter(BaseReporter):
for artifact in get_artifacts_from_result_data(
build.result_data,
logger=self.log):
if 'metadata' in artifact:
artifact['metadata'] = json.dumps(artifact['metadata'])
db_build.createArtifact(**artifact)

View File

@ -15,15 +15,20 @@
import voluptuous as v
import urllib.parse
artifact = {
old_artifact = {
'name': str,
'url': str,
}
new_artifact = {
'url': str,
'metadata': dict,
}
zuul_data = {
'zuul': {
'log_url': str,
'artifacts': [artifact],
'artifacts': v.Any([old_artifact], {str: new_artifact}),
v.Extra: object,
}
}
@ -43,13 +48,18 @@ def get_artifacts_from_result_data(result_data, logger=None):
ret = []
if validate_artifact_schema(result_data):
artifacts = result_data.get('zuul', {}).get(
'artifacts', [])
'artifacts', {})
if isinstance(artifacts, list):
new_artifacts = {}
for a in artifacts:
new_artifacts[a['name']] = {'url': a['url']}
artifacts = new_artifacts
default_url = result_data.get('zuul', {}).get(
'log_url')
if default_url:
if default_url[-1] != '/':
default_url += '/'
for artifact in artifacts:
for artifact_name, artifact in artifacts.items():
url = artifact['url']
if default_url:
# If the artifact url is relative, it will be combined
@ -61,8 +71,10 @@ def get_artifacts_from_result_data(result_data, logger=None):
if logger:
logger.debug("Error parsing URL:",
exc_info=1)
ret.append({'name': artifact['name'],
'url': url})
d = artifact.copy()
d['name'] = artifact_name
d['url'] = url
ret.append(d)
else:
logger.debug("Result data did not pass artifact schema "
"validation: %s", result_data)

View File

@ -15,6 +15,7 @@
import abc
from collections import OrderedDict
import copy
import json
import logging
import os
import re2
@ -2197,14 +2198,16 @@ class QueueItem(object):
"Requirements %s not met by build %s" % (
requirement, build.uuid))
else:
artifacts = [{'name': a.name,
'url': a.url,
'project': build.buildset.project,
'change': str(build.buildset.change),
'patchset': build.buildset.patchset,
'job': build.job_name}
for a in build.artifacts]
data += artifacts
for a in build.artifacts:
artifact = {'name': a.name,
'url': a.url,
'project': build.buildset.project,
'change': str(build.buildset.change),
'patchset': build.buildset.patchset,
'job': build.job_name}
if a.meta:
artifact['metadata'] = json.loads(a.meta)
data.append(artifact)
return data
def providesRequirements(self, requirements, data):