Convert from legacy to native devstack job

Changes nodepool-functional-py35 and nodepool-functional-py35-src
to use the native devstack job and not the legacy job.

Change-Id: I3d97d83360816001da0f599d95b13eae3abb6c96
This commit is contained in:
David Shrewsbury 2017-11-16 12:39:06 -05:00
parent 4c533c8718
commit 8d910aca97
9 changed files with 72 additions and 250 deletions

View File

@ -1,21 +1,48 @@
- job:
name: nodepool-functional-py35
parent: legacy-dsvm-base
run: playbooks/nodepool-functional-py35/run.yaml
post-run: playbooks/nodepool-functional-py35/post.yaml
name: nodepool-functional-base
parent: devstack
pre-run: playbooks/nodepool-functional-base/pre.yaml
post-run: playbooks/nodepool-functional-base/post.yaml
timeout: 5400
required-projects:
- openstack-infra/devstack-gate
- openstack-infra/nodepool
vars:
devstack_services:
horizon: false
ceilometer-acentral: false
ceilometer-acompute: false
ceilometer-alarm-evaluator: false
ceilometer-alarm-notifier: false
ceilometer-anotification: false
ceilometer-api: false
ceilometer-collector: false
cinder: false
c-bak: false
c-sch: false
c-api: false
c-vol: false
s-account: false
s-container: false
s-object: false
s-proxy: false
devstack_plugins:
nodepool: https://git.openstack.org/openstack-infra/nodepool
- job:
name: nodepool-functional-py35
parent: nodepool-functional-base
description: |
Run nodepool functional tests for py35
run: playbooks/nodepool-functional-py35/run.yaml
- job:
name: nodepool-functional-py35-src
parent: legacy-dsvm-base
parent: nodepool-functional-base
run: playbooks/nodepool-functional-py35-src/run.yaml
post-run: playbooks/nodepool-functional-py35-src/post.yaml
timeout: 5400
vars:
devstack_localrc:
LIBS_FROM_GIT: shade,glean,diskimage-builder
required-projects:
- openstack-infra/devstack-gate
- openstack-infra/glean
- openstack-infra/nodepool
- openstack-infra/shade

View File

@ -69,6 +69,7 @@ function install_nodepool {
setup_develop $DEST/nodepool
$NODEPOOL_INSTALL/bin/pip install $DEST/nodepool
$NODEPOOL_INSTALL/bin/pbr freeze
}
# requires some globals from devstack, which *might* not be stable api

View File

@ -0,0 +1,9 @@
- hosts: all
vars:
nodepool_log_dir: '{{ ansible_user_dir }}/work/logs/nodepool'
tasks:
- name: 'Copy files from {{ nodepool_log_dir }}'
synchronize:
src: '{{ nodepool_log_dir }}'
dest: '{{ zuul.executor.log_root }}/{{ inventory_hostname }}'
mode: pull

View File

@ -0,0 +1,11 @@
- hosts: all
roles:
- run-devstack
- role: bindep
bindep_profile: default
tasks:
- name: Ensure nodepool output log directory
file:
path: '{{ ansible_user_dir }}/work/logs/nodepool'
state: directory

View File

@ -1,15 +0,0 @@
- hosts: primary
tasks:
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
synchronize:
src: '{{ ansible_user_dir }}/workspace/'
dest: '{{ zuul.executor.log_root }}'
mode: pull
copy_links: true
verify_host: true
rsync_opts:
- --include=/logs/**
- --include=*/
- --exclude=*
- --prune-empty-dirs

View File

@ -1,121 +1,6 @@
- hosts: all
name: Autoconverted job legacy-dsvm-nodepool-py35-src from old job gate-dsvm-nodepool-py35-src-nv
tasks:
- name: Ensure legacy workspace directory
file:
path: '{{ ansible_user_dir }}/workspace'
state: directory
- shell:
cmd: |
set -e
set -x
cat > clonemap.yaml << EOF
clonemap:
- name: openstack-infra/devstack-gate
dest: devstack-gate
EOF
/usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \
git://git.openstack.org \
openstack-infra/devstack-gate
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'
- shell:
cmd: |
set -e
set -x
export PYTHONUNBUFFERED=true
# Disable tempest as nodepool is talking to the cloud not tempest.
export DEVSTACK_GATE_TEMPEST=0
# Use neutron as the public clouds in use are neutron based.
export DEVSTACK_GATE_NEUTRON=1
# The nodepool process needs sudo rights in order to
# perform dib image builds
export DEVSTACK_GATE_REMOVE_STACK_SUDO=0
# Disable services we do not need for nodepool
export DEVSTACK_LOCAL_CONFIG="disable_service horizon"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-acentral"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-acompute"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-alarm-evaluator"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-alarm-notifier"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-anotification"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-api"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-collector"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service cinder"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-bak"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-sch"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-api"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-vol"
if [ "-py35" == "-py35" ]; then
export DEVSTACK_GATE_USE_PYTHON3=True
# swift is not ready for python3 yet
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-account"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-container"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-object"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-proxy"
fi
export BRANCH_OVERRIDE=default
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE
fi
# Because we are testing a non standard project, add the
# our project repository. This makes zuul do the right
# reference magic for testing changes.
export PROJECTS="openstack-infra/nodepool $PROJECTS"
# note the actual url here is somewhat irrelevant because it
# caches in nodepool, however make it a valid url for
# documentation purposes.
export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin nodepool git://git.openstack.org/openstack-infra/nodepool"
export DEVSTACK_PROJECT_FROM_GIT="diskimage-builder"
export DEVSTACK_PROJECT_FROM_GIT+=",glean"
export DEVSTACK_PROJECT_FROM_GIT+=",shade"
# Start with all images disabled.
export NODEPOOL_PAUSE_CENTOS_7_DIB=true
export NODEPOOL_PAUSE_DEBIAN_JESSIE_DIB=true
export NODEPOOL_PAUSE_FEDORA_26_DIB=true
export NODEPOOL_PAUSE_OPENSUSE_423_DIB=true
export NODEPOOL_PAUSE_UBUNTU_TRUSTY_DIB=true
export NODEPOOL_PAUSE_UBUNTU_XENIAL_DIB=true
if [ "" == "" ] ; then
# dsvm-nodepool-src
export NODEPOOL_PAUSE_UBUNTU_TRUSTY_DIB=false
elif [ "" == "-debian" ] ; then
# dsvm-nodepool-debian-src
export NODEPOOL_PAUSE_DEBIAN_JESSIE_DIB=false
elif [ "" == "-opensuse" ] ; then
# dsvm-nodepool-opensuse-src
export NODEPOOL_PAUSE_OPENSUSE_423_DIB=false
elif [ "" == "-redhat" ] ; then
# dsvm-nodepool-redhat-src
export NODEPOOL_PAUSE_CENTOS_7_DIB=false
export NODEPOOL_PAUSE_FEDORA_26_DIB=false
elif [ "" == "-ubuntu" ] ; then
# dsvm-nodepool-ubuntu-src
export NODEPOOL_PAUSE_UBUNTU_TRUSTY_DIB=false
export NODEPOOL_PAUSE_UBUNTU_XENIAL_DIB=false
fi
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_CENTOS_7_DIB=$NODEPOOL_PAUSE_CENTOS_7_DIB"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_DEBIAN_JESSIE_DIB=$NODEPOOL_PAUSE_DEBIAN_JESSIE_DIB"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_FEDORA_26_DIB=$NODEPOOL_PAUSE_FEDORA_26_DIB"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_OPENSUSE_423_DIB=$NODEPOOL_PAUSE_OPENSUSE_423_DIB"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_UBUNTU_TRUSTY_DIB=$NODEPOOL_PAUSE_UBUNTU_TRUSTY_DIB"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"NODEPOOL_PAUSE_UBUNTU_XENIAL_DIB=$NODEPOOL_PAUSE_UBUNTU_XENIAL_DIB"
function post_test_hook {
/opt/stack/new/nodepool/tools/check_devstack_plugin.sh
}
export -f post_test_hook
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
./safe-devstack-vm-gate-wrap.sh
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'
- name: Run nodepool tests
command: tools/check_devstack_plugin.sh {{ ansible_user_dir }}/work/logs/nodepool
args:
chdir: '{{ zuul.project.src_dir }}'

View File

@ -1,15 +0,0 @@
- hosts: primary
tasks:
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
synchronize:
src: '{{ ansible_user_dir }}/workspace/'
dest: '{{ zuul.executor.log_root }}'
mode: pull
copy_links: true
verify_host: true
rsync_opts:
- --include=/logs/**
- --include=*/
- --exclude=*
- --prune-empty-dirs

View File

@ -1,85 +1,6 @@
- hosts: all
name: Autoconverted job legacy-dsvm-nodepool-py35 from old job gate-dsvm-nodepool-py35-nv
tasks:
- name: Ensure legacy workspace directory
file:
path: '{{ ansible_user_dir }}/workspace'
state: directory
- shell:
cmd: |
set -e
set -x
cat > clonemap.yaml << EOF
clonemap:
- name: openstack-infra/devstack-gate
dest: devstack-gate
EOF
/usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \
git://git.openstack.org \
openstack-infra/devstack-gate
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'
- shell:
cmd: |
set -e
set -x
export PYTHONUNBUFFERED=true
# Disable tempest as nodepool is talking to the cloud not tempest.
export DEVSTACK_GATE_TEMPEST=0
# Use neutron as the public clouds in use are neutron based.
export DEVSTACK_GATE_NEUTRON=1
# The nodepool process needs sudo rights in order to
# perform dib image builds
export DEVSTACK_GATE_REMOVE_STACK_SUDO=0
# Disable services we do not need for nodepool
export DEVSTACK_LOCAL_CONFIG="disable_service horizon"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-acentral"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-acompute"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-alarm-evaluator"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-alarm-notifier"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-anotification"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-api"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service ceilometer-collector"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service cinder"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-bak"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-sch"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-api"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service c-vol"
if [ "-py35" == "-py35" ]; then
export DEVSTACK_GATE_USE_PYTHON3=True
# swift is not ready for python3 yet
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-account"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-container"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-object"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-proxy"
fi
export BRANCH_OVERRIDE=default
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE
fi
# Because we are testing a non standard project, add the
# our project repository. This makes zuul do the right
# reference magic for testing changes.
export PROJECTS="openstack-infra/nodepool $PROJECTS"
# note the actual url here is somewhat irrelevant because it
# caches in nodepool, however make it a valid url for
# documentation purposes.
export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin nodepool git://git.openstack.org/openstack-infra/nodepool"
function post_test_hook {
/opt/stack/new/nodepool/tools/check_devstack_plugin.sh
}
export -f post_test_hook
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
./safe-devstack-vm-gate-wrap.sh
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'
- name: Run nodepool tests
command: tools/check_devstack_plugin.sh {{ ansible_user_dir }}/work/logs/nodepool
args:
chdir: '{{ zuul.project.src_dir }}'

View File

@ -1,6 +1,8 @@
#!/bin/bash -ex
NODEPOOL_INSTALL=${NODEPOOL_INSTALL:-/opt/stack/new/nodepool-venv}
LOGDIR=$1
NODEPOOL_INSTALL=${NODEPOOL_INSTALL:-/opt/stack/nodepool-venv}
NODEPOOL_CONFIG=${NODEPOOL_CONFIG:-/etc/nodepool/nodepool.yaml}
NODEPOOL_SECURE=${NODEPOOL_SECURE:-/etc/nodepool/secure.conf}
NODEPOOL="$NODEPOOL_INSTALL/bin/nodepool -c $NODEPOOL_CONFIG -s $NODEPOOL_SECURE"
@ -20,10 +22,8 @@ function waitforimage {
state='ready'
while ! $NODEPOOL image-list | grep $name | grep $state; do
$NODEPOOL image-list > /tmp/.nodepool-image-list.txt
$NODEPOOL list > /tmp/.nodepool-list.txt
sudo mv /tmp/.nodepool-image-list.txt $WORKSPACE/logs/nodepool-image-list.txt
sudo mv /tmp/.nodepool-list.txt $WORKSPACE/logs/nodepool-list.txt
$NODEPOOL image-list > ${LOGDIR}/nodepool-image-list.txt
$NODEPOOL list > ${LOGDIR}/nodepool-list.txt
sleep 10
done
}
@ -33,10 +33,8 @@ function waitfornode {
state='ready'
while ! $NODEPOOL list | grep $name | grep $state | grep "unlocked"; do
$NODEPOOL image-list > /tmp/.nodepool-image-list.txt
$NODEPOOL list > /tmp/.nodepool-list.txt
sudo mv /tmp/.nodepool-image-list.txt $WORKSPACE/logs/nodepool-image-list.txt
sudo mv /tmp/.nodepool-list.txt $WORKSPACE/logs/nodepool-list.txt
$NODEPOOL image-list > ${LOGDIR}/nodepool-image-list.txt
$NODEPOOL list > ${LOGDIR}/nodepool-list.txt
sleep 10
done
}