Initial cleanup.

This commit is contained in:
Felix Fontein 2020-10-28 13:28:37 +01:00
parent 42315baa74
commit 6555a5a0de
75 changed files with 1203 additions and 387 deletions

View File

@ -1,191 +0,0 @@
# README FIRST
# 1. replace "NAMESPACE" and "COLLECTION_NAME" with the correct name in the env section (e.g. with 'community' and 'mycollection')
# 2. If you don't have unit tests remove that section
# 3. If your collection depends on other collections ensure they are installed, see "Install collection dependencies"
# If you need help please ask in #ansible-devel on Freenode IRC
name: CI
on:
# Run CI against all pushes (direct commits, also merged PRs), Pull Requests
push:
pull_request:
# Uncomment the following two lines to run CI once per day (at 06:00 UTC)
# schedule:
# - cron: '0 6 * * *'
env:
NAMESPACE: NAMESPACE
COLLECTION_NAME: COLLECTION_NAME
jobs:
###
# Sanity tests (REQUIRED)
#
# https://docs.ansible.com/ansible/latest/dev_guide/testing_sanity.html
sanity:
name: Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
# It's important that Sanity is tested against all stable-X.Y branches
# Testing against `devel` may fail as new tests are added.
# - stable-2.9 # Only if your collection supports Ansible 2.9
- stable-2.10
- devel
runs-on: ubuntu-latest
steps:
# ansible-test requires the collection to be in a directory in the form
# .../ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}/
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python
uses: actions/setup-python@v2
with:
# it is just required to run that once as "ansible-test sanity" in the docker image
# will run on all python versions it supports.
python-version: 3.8
# Install the head of the given branch (devel, stable-2.10)
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
# run ansible-test sanity inside of Docker.
# The docker container has all the pinned dependencies that are required
# and all python versions ansible supports.
- name: Run sanity tests
run: ansible-test sanity --docker -v --color
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
###
# Unit tests (OPTIONAL)
#
# https://docs.ansible.com/ansible/latest/dev_guide/testing_units.html
units:
runs-on: ubuntu-latest
name: Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
strategy:
# As soon as the first unit test fails, cancel the others to free up the CI queue
fail-fast: true
matrix:
ansible:
# - stable-2.9 # Only if your collection supports Ansible 2.9
- stable-2.10
- devel
python:
- 2.6
- 2.7
- 3.5
- 3.6
- 3.7
- 3.8
- 3.9
exclude:
- ansible: stable-2.9
python: 3.9
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python ${{ matrix.ansible }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
# OPTIONAL If your unit test requires Python libraries from other collections
# Install them like this
- name: Install collection dependencies
run: ansible-galaxy collection install ansible.netcommon -p .
# Run the unit tests
- name: Run unit test
run: ansible-test units -v --color --python ${{ matrix.python }} --docker --coverage
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# ansible-test support producing code coverage date
- name: Generate coverage report
run: ansible-test coverage xml -v --requirements --group-by command --group-by version
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# See the reports at https://codecov.io/gh/ansible_collections/GITHUBORG/REPONAME
- uses: codecov/codecov-action@v1
with:
fail_ci_if_error: false
###
# Integration tests (RECOMMENDED)
#
# https://docs.ansible.com/ansible/latest/dev_guide/testing_integration.html
# If the application you are testing is available as a docker container and you want to test
# multiple versions see the following for an example:
# https://github.com/ansible-collections/community.zabbix/tree/master/.github/workflows
integration:
runs-on: ubuntu-latest
name: I (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }}})
strategy:
fail-fast: false
matrix:
ansible:
# - stable-2.9 # Only if your collection supports Ansible 2.9
- stable-2.10
- devel
python:
- 2.6
- 2.7
- 3.5
- 3.6
- 3.7
- 3.8
- 3.9
exclude:
- ansible: stable-2.9
python: 3.9
steps:
- name: Check out code
uses: actions/checkout@v2
with:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Set up Python ${{ matrix.ansible }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install ansible-base (${{ matrix.ansible }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
# OPTIONAL If your integration test requires Python libraries or modules from other collections
# Install them like this
- name: Install collection dependencies
run: ansible-galaxy collection install ansible.netcommon -p .
# Run the integration tests
- name: Run integration test
run: ansible-test integration -v --color --retry-on-error --continue-on-error --diff --python ${{ matrix.python }} --docker --coverage
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# ansible-test support producing code coverage date
- name: Generate coverage report
run: ansible-test coverage xml -v --requirements --group-by command --group-by version
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# See the reports at https://codecov.io/gh/ansible_collections/GITHUBORG/REPONAME
- uses: codecov/codecov-action@v1
with:
fail_ci_if_error: false

View File

View File

@ -1,50 +1,76 @@
# collection_template
You can build a new repository for an Ansible Collection using this template by following [Creating a repository from a template](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/creating-a-repository-from-a-template). This README.md contains recommended headings for your collection README.md, with comments describing what each section should contain. Once you have created your collection repository, delete this paragraph and the title above it from your README.md.
# Docker Community Collection
[![Run Status](https://api.shippable.com/projects/5f992599cc07df00079b99d1/badge?branch=main)](https://app.shippable.com/github/ansible-collections/community.docker/dashboard)
[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.docker)](https://codecov.io/gh/ansible-collections/community.docker)
# Foo Collection
<!-- Add CI and code coverage badges here. Samples included below. -->
[![CI](https://github.com/ansible-collections/REPONAMEHERE/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/REPONAMEHERE/actions) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/REPONAMEHERE)](https://codecov.io/gh/ansible-collections/REPONAMEHERE)
<!-- Describe the collection and why a user would want to use it. What does the collection do? -->
This repo contains the `community.docker` Ansible Collection. The collection includes many modules and plugins to work with Docker.
## Tested with Ansible
<!-- List the versions of Ansible the collection has been tested with. Must match what is in galaxy.yml. -->
Tested with the current Ansible 2.9 and 2.10 releases and the current development version of Ansible. Ansible versions before 2.9.10 are not supported.
## External requirements
<!-- List any external resources the collection depends on, for example minimum versions of an OS, libraries, or utilities. Do not list other Ansible collections here. -->
### Supported connections
<!-- Optional. If your collection supports only specific connection types (such as HTTPAPI, netconf, or others), list them here. -->
Most modules and plugins require the [Docker SDK for Python](https://pypi.org/project/docker/). For Python 2.6 support, use [the deprecated docker-py library](https://pypi.org/project/docker-py/) instead.
## Included content
<!-- Galaxy will eventually list the module docs within the UI, but until that is ready, you may need to either describe your plugins etc here, or point to an external docsite to cover that information. -->
* Connection plugins:
- community.docker.docker
* Inventory plugins:
- community.docker.docker_machine
- community.docker.docker_swarm
* Modules:
- community.docker.docker_compose
- community.docker.docker_config
- community.docker.docker_container_info
- community.docker.docker_container
- community.docker.docker_host_info
- community.docker.docker_image_info
- community.docker.docker_image
- community.docker.docker_login
- community.docker.docker_network_info
- community.docker.docker_network
- community.docker.docker_node_info
- community.docker.docker_node
- community.docker.docker_prune
- community.docker.docker_secret
- community.docker.docker_stack_info
- community.docker.docker_stack
- community.docker.docker_stack_task_info
- community.docker.docker_swarm_info
- community.docker.docker_swarm
- community.docker.docker_swarm_service_info
- community.docker.docker_swarm_service
- community.docker.docker_volume_info
- community.docker.docker_volume
## Using this collection
<!--Include some quick examples that cover the most common use cases for your collection content. -->
Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
ansible-galaxy collection install community.docker
You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml` using the format:
```yaml
collections:
- name: community.docker
```
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
## Contributing to this collection
<!--Describe how the community can contribute to your collection. At a minimum, include how and where users can create issues to report problems or request features for this collection. List contribution requirements, including preferred workflows and necessary testing, so you can benefit from community PRs. If you are following general Ansible contributor guidelines, you can link to - [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html). -->
If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
## Release notes
See the [changelog](https://github.com/ansible-collections/REPONAMEHERE/tree/main/CHANGELOG.rst).
## Roadmap
<!-- Optional. Include the roadmap for this collection, and the proposed release/versioning strategy so users can anticipate the upgrade/update cycle. -->
See the [changelog](https://github.com/ansible-collections/community.docker/tree/main/CHANGELOG.rst).
## More information
<!-- List out where the user can find additional information, such as working group meeting times, slack/IRC channels, or documentation for the product this collection automates. At a minimum, link to: -->
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
@ -55,8 +81,6 @@ See the [changelog](https://github.com/ansible-collections/REPONAMEHERE/tree/mai
## Licensing
<!-- Include the appropriate license information here and a pointer to the full licensing details. If the collection contains modules migrated from the ansible/ansible repo, you must use the same license that existed in the ansible/ansible repo. See the GNU license example below. -->
GNU General Public License v3.0 or later.
See [LICENSE](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
See [COPYING](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.

View File

@ -25,5 +25,5 @@ sections:
- Bugfixes
- - known_issues
- Known Issues
title: CHANGE THIS IN changelogs/config.yaml!
title: Docker Community Collection
trivial_section_name: trivial

View File

@ -1,22 +1,19 @@
# See https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html
namespace: community
name: FIXME
name: docker
version: 0.1.0
readme: README.md
authors:
- YOUR NAME (github.com/YOURGITHUB)
description: null
- Ansible Docker Working Group
description: Modules and plugins for working with Docker
license_file: COPYING
tags:
# tags so people can search for collections https://galaxy.ansible.com/search
# tags are all lower-case, no spaces, no dashes.
- example1
- example2
repository: https://github.com/ansible-collections/community.REPO_NAME
- docker
repository: https://github.com/ansible-collections/community.docker
#documentation: https://github.com/ansible-collection-migration/community.REPO_NAME/tree/main/docs
homepage: https://github.com/ansible-collections/community.REPO_NAME
issues: https://github.com/ansible-collections/community.REPO_NAME/issues
homepage: https://github.com/ansible-collections/community.docker
issues: https://github.com/ansible-collections/community.docker/issues
build_ignore:
# https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#ignoring-files-and-folders
- .gitignore

View File

@ -1 +0,0 @@
docker_image_info.py

View File

@ -1 +0,0 @@
docker_compose.py

106
shippable.yml Normal file
View File

@ -0,0 +1,106 @@
language: python
env:
matrix:
- T=none
matrix:
exclude:
- env: T=none
include:
- env: T=devel/sanity/1
- env: T=devel/sanity/extra
- env: T=2.10/sanity/1
- env: T=2.9/sanity/1
- env: T=devel/units/2.6/1
- env: T=devel/units/2.7/1
- env: T=devel/units/3.5/1
- env: T=devel/units/3.6/1
- env: T=devel/units/3.7/1
- env: T=devel/units/3.8/1
- env: T=devel/units/3.9/1
- env: T=2.10/units/2.6/1
- env: T=2.10/units/2.7/1
- env: T=2.10/units/3.5/1
- env: T=2.10/units/3.6/1
- env: T=2.10/units/3.7/1
- env: T=2.10/units/3.8/1
- env: T=2.10/units/3.9/1
- env: T=2.9/units/2.6/1
- env: T=2.9/units/2.7/1
- env: T=2.9/units/3.5/1
- env: T=2.9/units/3.6/1
- env: T=2.9/units/3.7/1
- env: T=2.9/units/3.8/1
# Group 1: docker_swarm
- env: T=devel/rhel/7.8/1
- env: T=devel/rhel/8.2/1
# docker_node, docker_node_info, docker_stack, docker_stack_info, docker_stack_task_info, docker_swarm_info, inventory_docker_machine, inventory_docker_swarm
- env: T=devel/rhel/7.8/2
- env: T=devel/rhel/8.2/2
# docker_config, docker_secret, docker_swarm_service, docker_swarm_service_info
- env: T=devel/rhel/7.8/3
- env: T=devel/rhel/8.2/3
# docker_host_info, docker_image, docker_image_info, docker_login, docker_network, docker_network_info, docker_prune, docker_volume, docker_volume_info
- env: T=devel/rhel/7.8/4
- env: T=devel/rhel/8.2/4
- env: T=devel/linux/centos7/4
- env: T=devel/linux/centos8/4
- env: T=devel/linux/fedora31/4
- env: T=devel/linux/fedora32/4
- env: T=devel/linux/opensuse15py2/4
- env: T=devel/linux/opensuse15/4
- env: T=devel/linux/ubuntu1604/4
- env: T=devel/linux/ubuntu1804/4
# docker_container, docker_container_info
- env: T=devel/rhel/7.8/5
- env: T=devel/rhel/8.2/5
- env: T=devel/linux/centos7/5
- env: T=devel/linux/centos8/5
- env: T=devel/linux/fedora31/5
- env: T=devel/linux/fedora32/5
- env: T=devel/linux/opensuse15py2/5
- env: T=devel/linux/opensuse15/5
- env: T=devel/linux/ubuntu1604/5
- env: T=devel/linux/ubuntu1804/5
- env: T=2.10/rhel/8.2/1
- env: T=2.10/rhel/8.2/2
- env: T=2.10/rhel/8.2/3
- env: T=2.10/linux/ubuntu1604/4
- env: T=2.10/linux/ubuntu1804/5
- env: T=2.9/rhel/8.2/1
- env: T=2.9/rhel/8.2/2
- env: T=2.9/rhel/8.2/3
- env: T=2.9/linux/ubuntu1604/4
- env: T=2.9/linux/ubuntu1804/5
branches:
except:
- "*-patch-*"
- "revert-*-*"
- "patchback/backports/*"
build:
ci:
- tests/utils/shippable/timing.sh tests/utils/shippable/shippable.sh $T
integrations:
notifications:
- integrationName: email
type: email
on_success: never
on_failure: never
on_start: never
on_pull_request: never

View File

@ -0,0 +1 @@
hidden

View File

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eux
[ -f "${INVENTORY}" ]
# Run connection tests with both the default and C locale.
ansible-playbook test_connection.yml -i "${INVENTORY}" "$@"
LC_ALL=C LANG=C ansible-playbook test_connection.yml -i "${INVENTORY}" "$@"

View File

@ -0,0 +1,43 @@
- hosts: "{{ target_hosts }}"
gather_facts: no
serial: 1
tasks:
### raw with unicode arg and output
- name: raw with unicode arg and output
raw: echo 汉语
register: command
- name: check output of raw with unicode arg and output
assert:
that:
- "'汉语' in command.stdout"
- command is changed # as of 2.2, raw should default to changed: true for consistency w/ shell/command/script modules
### copy local file with unicode filename and content
- name: create local file with unicode filename and content
local_action: lineinfile dest={{ local_tmp }}-汉语/汉语.txt create=true line=汉语
- name: remove remote file with unicode filename and content
action: "{{ action_prefix }}file path={{ remote_tmp }}-汉语/汉语.txt state=absent"
- name: create remote directory with unicode name
action: "{{ action_prefix }}file path={{ remote_tmp }}-汉语 state=directory"
- name: copy local file with unicode filename and content
action: "{{ action_prefix }}copy src={{ local_tmp }}-汉语/汉语.txt dest={{ remote_tmp }}-汉语/汉语.txt"
### fetch remote file with unicode filename and content
- name: remove local file with unicode filename and content
local_action: file path={{ local_tmp }}-汉语/汉语.txt state=absent
- name: fetch remote file with unicode filename and content
fetch: src={{ remote_tmp }}-汉语/汉语.txt dest={{ local_tmp }}-汉语/汉语.txt fail_on_missing=true validate_checksum=true flat=true
### remove local and remote temp files
- name: remove local temp file
local_action: file path={{ local_tmp }}-汉语 state=absent
- name: remove remote temp file
action: "{{ action_prefix }}file path={{ remote_tmp }}-汉语 state=absent"
### test wait_for_connection plugin
- ansible.builtin.wait_for_connection:

View File

@ -0,0 +1,2 @@
needs/target/connection
hidden

View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -eux
# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
group=$(python -c \
"from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
cd ../connection
INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
-e target_hosts="${group}" \
-e action_prefix= \
-e local_tmp=/tmp/ansible-local \
-e remote_tmp=/tmp/ansible-remote \
"$@"

View File

@ -1,9 +1,2 @@
shippable/posix/group3
skip/osx
skip/macos
skip/freebsd
skip/aix
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,6 +1,2 @@
shippable/posix/group5
skip/osx
skip/macos
skip/freebsd
skip/aix
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/osx
skip/macos
skip/freebsd
skip/aix
shippable/posix/group5
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group5
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group3
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group5
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,12 +1,3 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system. On VMs, we restart docker daemon
# after finishing the tests to minimize potential effects
# on other tests.
needs/root

View File

@ -1,9 +1,2 @@
shippable/posix/group3
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group2
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,9 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group3
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,9 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,9 +1,2 @@
shippable/posix/group1
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group2
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,9 +1,2 @@
shippable/posix/group1
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group2
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,13 +1,3 @@
shippable/posix/group2
disabled # See: https://github.com/ansible/ansible/issues/61815
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group1
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system. On VMs, we restart docker daemon
# after finishing the tests to minimize potential effects
# on other tests.
needs/root

View File

@ -1,9 +1,2 @@
shippable/posix/group1
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,9 +1,2 @@
shippable/posix/group5
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group3
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,9 +1,2 @@
shippable/posix/group3
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system.

View File

@ -1,6 +1,2 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,6 +1,2 @@
shippable/posix/group1
skip/aix
skip/osx
skip/macos
skip/freebsd
shippable/posix/group4
destructive

View File

@ -1,11 +1,4 @@
disabled
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # We need SSH access to the VM and need to be able to let
# docker-machine install docker in the VM. This won't work
# with tests running in docker containers.
needs/root

View File

@ -1,12 +1,3 @@
shippable/posix/group2
skip/aix
skip/osx
skip/macos
skip/freebsd
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
# the whole CI system. On VMs, we restart docker daemon
# after finishing the tests to minimize potential effects
# on other tests.
needs/root

View File

@ -0,0 +1,10 @@
####################################################################
# WARNING: These are designed specifically for Ansible tests #
# and should not be used as examples of how to write Ansible roles #
####################################################################
- name: Install EPEL
yum:
name: https://s3.amazonaws.com/ansible-ci-files/test/integration/targets/setup_epel/epel-release-latest-{{ ansible_distribution_major_version }}.noarch.rpm
disable_gpg_check: true
when: ansible_facts.distribution in ['RedHat', 'CentOS']

View File

@ -0,0 +1,3 @@
dependencies:
- setup_remote_constraints
- setup_pkg_mgr

View File

@ -0,0 +1,46 @@
---
####################################################################
# WARNING: These are designed specifically for Ansible tests #
# and should not be used as examples of how to write Ansible roles #
####################################################################
- name: Include OS-specific variables
include_vars: '{{ ansible_os_family }}.yml'
when: not ansible_os_family == "Darwin"
- name: Install OpenSSL
become: True
package:
name: '{{ openssl_package_name }}'
when: not ansible_os_family == 'Darwin'
- name: Install pyOpenSSL (Python 3)
become: True
package:
name: '{{ pyopenssl_package_name_python3 }}'
when: not ansible_os_family == 'Darwin' and ansible_python_version is version('3.0', '>=')
- name: Install pyOpenSSL (Python 2)
become: True
package:
name: '{{ pyopenssl_package_name }}'
when: not ansible_os_family == 'Darwin' and ansible_python_version is version('3.0', '<')
- name: Install pyOpenSSL (Darwin)
become: True
pip:
name: pyOpenSSL
extra_args: "-c {{ remote_constraints }}"
when: ansible_os_family == 'Darwin'
- name: register pyOpenSSL version
command: "{{ ansible_python.executable }} -c 'import OpenSSL; print(OpenSSL.__version__)'"
register: pyopenssl_version
- name: register openssl version
shell: "openssl version | cut -d' ' -f2"
register: openssl_version
- name: register cryptography version
command: "{{ ansible_python.executable }} -c 'import cryptography; print(cryptography.__version__)'"
register: cryptography_version

View File

@ -0,0 +1,3 @@
pyopenssl_package_name: python-openssl
pyopenssl_package_name_python3: python3-openssl
openssl_package_name: openssl

View File

@ -0,0 +1,3 @@
pyopenssl_package_name: py27-openssl
pyopenssl_package_name_python3: py36-openssl
openssl_package_name: openssl

View File

@ -0,0 +1,3 @@
pyopenssl_package_name: pyOpenSSL
pyopenssl_package_name_python3: python3-pyOpenSSL
openssl_package_name: openssl

View File

@ -0,0 +1,3 @@
pyopenssl_package_name: python-pyOpenSSL
pyopenssl_package_name_python3: python3-pyOpenSSL
openssl_package_name: openssl

View File

@ -0,0 +1,17 @@
---
####################################################################
# WARNING: These are designed specifically for Ansible tests #
# and should not be used as examples of how to write Ansible roles #
####################################################################
- set_fact:
pkg_mgr: community.general.pkgng
ansible_pkg_mgr: community.general.pkgng
cacheable: yes
when: ansible_os_family == "FreeBSD"
- set_fact:
pkg_mgr: community.general.zypper
ansible_pkg_mgr: community.general.zypper
cacheable: yes
when: ansible_os_family == "Suse"

View File

@ -0,0 +1 @@
needs/file/tests/utils/constraints.txt

View File

@ -0,0 +1,2 @@
dependencies:
- setup_remote_tmp_dir

View File

@ -0,0 +1,13 @@
####################################################################
# WARNING: These are designed specifically for Ansible tests #
# and should not be used as examples of how to write Ansible roles #
####################################################################
- name: record constraints.txt path on remote host
set_fact:
remote_constraints: "{{ remote_tmp_dir }}/constraints.txt"
- name: copy constraints.txt to remote host
copy:
src: "{{ role_path }}/../../../utils/constraints.txt"
dest: "{{ remote_constraints }}"

View File

@ -0,0 +1,5 @@
- name: delete temporary directory
include_tasks: default-cleanup.yml
- name: delete temporary directory (windows)
include_tasks: windows-cleanup.yml

View File

@ -0,0 +1,5 @@
- name: delete temporary directory
file:
path: "{{ remote_tmp_dir }}"
state: absent
no_log: yes

View File

@ -0,0 +1,11 @@
- name: create temporary directory
tempfile:
state: directory
suffix: .test
register: remote_tmp_dir
notify:
- delete temporary directory
- name: record temporary directory
set_fact:
remote_tmp_dir: "{{ remote_tmp_dir.path }}"

View File

@ -0,0 +1,15 @@
####################################################################
# WARNING: These are designed specifically for Ansible tests #
# and should not be used as examples of how to write Ansible roles #
####################################################################
- name: make sure we have the ansible_os_family and ansible_distribution_version facts
setup:
gather_subset: distribution
when: ansible_facts == {}
- include_tasks: "{{ lookup('first_found', files)}}"
vars:
files:
- "{{ ansible_os_family | lower }}.yml"
- "default.yml"

7
tests/requirements.yml Normal file
View File

@ -0,0 +1,7 @@
integration_tests_dependencies:
- ansible.posix
- community.internal_test_tools
- community.crypto
- community.general
unit_tests_dependencies:
- community.internal_test_tools

View File

@ -0,0 +1,7 @@
{
"include_symlinks": true,
"prefixes": [
"plugins/"
],
"output": "path-message"
}

View File

@ -0,0 +1,43 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""Prevent unwanted files from being added to the source tree."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
def main():
"""Main entry point."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
allowed_extensions = (
'.cs',
'.ps1',
'.psm1',
'.py',
)
skip_paths = set([
])
skip_directories = (
)
for path in paths:
if path in skip_paths:
continue
if any(path.startswith(skip_directory) for skip_directory in skip_directories):
continue
ext = os.path.splitext(path)[1]
if ext not in allowed_extensions:
print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions)))
if __name__ == '__main__':
main()

View File

@ -0,0 +1,4 @@
plugins/modules/docker_container.py use-argspec-type-path # uses colon-separated paths, can't use type=path
plugins/modules/docker_stack.py validate-modules:doc-elements-mismatch
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View File

@ -0,0 +1,4 @@
plugins/modules/docker_container.py use-argspec-type-path # uses colon-separated paths, can't use type=path
plugins/modules/docker_stack.py validate-modules:doc-elements-mismatch
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View File

@ -0,0 +1,3 @@
plugins/modules/docker_container.py use-argspec-type-path # uses colon-separated paths, can't use type=path
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View File

View File

@ -0,0 +1,33 @@
# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Compat for python2.7
#
# One unittest needs to import builtins via __import__() so we need to have
# the string that represents it
try:
import __builtin__
except ImportError:
BUILTINS = 'builtins'
else:
BUILTINS = '__builtin__'

122
tests/unit/compat/mock.py Normal file
View File

@ -0,0 +1,122 @@
# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python3.x's unittest.mock module
'''
import sys
# Python 2.7
# Note: Could use the pypi mock library on python3.x as well as python2.x. It
# is the same as the python3 stdlib mock library
try:
# Allow wildcard import because we really do want to import all of mock's
# symbols into this compat shim
# pylint: disable=wildcard-import,unused-wildcard-import
from unittest.mock import *
except ImportError:
# Python 2
# pylint: disable=wildcard-import,unused-wildcard-import
try:
from mock import *
except ImportError:
print('You need the mock library installed on python2.x to run tests')
# Prior to 3.4.4, mock_open cannot handle binary read_data
if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
file_spec = None
def _iterate_read_data(read_data):
# Helper for mock_open:
# Retrieve lines from read_data via a generator so that separate calls to
# readline, read, and readlines are properly interleaved
sep = b'\n' if isinstance(read_data, bytes) else '\n'
data_as_list = [l + sep for l in read_data.split(sep)]
if data_as_list[-1] == sep:
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line remove the
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
"""
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return type(read_data)().join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock

View File

@ -0,0 +1,38 @@
# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python2.7's unittest module
'''
import sys
# Allow wildcard import because we really do want to import all of
# unittests's symbols into this compat shim
# pylint: disable=wildcard-import,unused-wildcard-import
if sys.version_info < (2, 7):
try:
# Need unittest2 on python2.6
from unittest2 import *
except ImportError:
print('You need unittest2 installed on python2.6.x to run tests')
else:
from unittest import *

View File

@ -0,0 +1,31 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
@pytest.fixture
def patch_ansible_module(request, mocker):
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
if 'ANSIBLE_MODULE_ARGS' not in request.param:
request.param = {'ANSIBLE_MODULE_ARGS': request.param}
if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
args = json.dumps(request.param)
else:
raise Exception('Malformed data to the patch_ansible_module pytest fixture')
mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))

View File

@ -0,0 +1,2 @@
unittest2 ; python_version < '2.7'
importlib ; python_version < '2.7'

View File

@ -0,0 +1,52 @@
coverage >= 4.2, < 5.0.0, != 4.3.2 ; python_version <= '3.7' # features in 4.2+ required, avoid known bug in 4.3.2 on python 2.6, coverage 5.0+ incompatible
coverage >= 4.5.4, < 5.0.0 ; python_version > '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible
cryptography < 2.2 ; python_version < '2.7' # cryptography 2.2 drops support for python 2.6
deepdiff < 4.0.0 ; python_version < '3' # deepdiff 4.0.0 and later require python 3
jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later
urllib3 < 1.24 ; python_version < '2.7' # urllib3 1.24 and later require python 2.7 or later
pywinrm >= 0.3.0 # message encryption support
sphinx < 1.6 ; python_version < '2.7' # sphinx 1.6 and later require python 2.7 or later
sphinx < 1.8 ; python_version >= '2.7' # sphinx 1.8 and later are currently incompatible with rstcheck 3.3
pygments >= 2.4.0 # Pygments 2.4.0 includes bugfixes for YAML and YAML+Jinja lexers
wheel < 0.30.0 ; python_version < '2.7' # wheel 0.30.0 and later require python 2.7 or later
yamllint != 1.8.0, < 1.14.0 ; python_version < '2.7' # yamllint 1.8.0 and 1.14.0+ require python 2.7+
pycrypto >= 2.6 # Need features found in 2.6 and greater
ncclient >= 0.5.2 # Need features added in 0.5.2 and greater
idna < 2.6, >= 2.5 # linode requires idna < 2.9, >= 2.5, requests requires idna < 2.6, but cryptography will cause the latest version to be installed instead
paramiko < 2.4.0 ; python_version < '2.7' # paramiko 2.4.0 drops support for python 2.6
pytest < 3.3.0 ; python_version < '2.7' # pytest 3.3.0 drops support for python 2.6
pytest < 5.0.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7
pytest-forked < 1.0.2 ; python_version < '2.7' # pytest-forked 1.0.2 and later require python 2.7 or later
pytest-forked >= 1.0.2 ; python_version >= '2.7' # pytest-forked before 1.0.2 does not work with pytest 4.2.0+ (which requires python 2.7+)
ntlm-auth >= 1.3.0 # message encryption support using cryptography
requests < 2.20.0 ; python_version < '2.7' # requests 2.20.0 drops support for python 2.6
requests-ntlm >= 1.1.0 # message encryption support
requests-credssp >= 0.1.0 # message encryption support
voluptuous >= 0.11.0 # Schema recursion via Self
openshift >= 0.6.2, < 0.9.0 # merge_type support
virtualenv < 16.0.0 ; python_version < '2.7' # virtualenv 16.0.0 and later require python 2.7 or later
pathspec < 0.6.0 ; python_version < '2.7' # pathspec 0.6.0 and later require python 2.7 or later
pyopenssl < 18.0.0 ; python_version < '2.7' # pyOpenSSL 18.0.0 and later require python 2.7 or later
pyfmg == 0.6.1 # newer versions do not pass current unit tests
pyyaml < 5.1 ; python_version < '2.7' # pyyaml 5.1 and later require python 2.7 or later
pycparser < 2.19 ; python_version < '2.7' # pycparser 2.19 and later require python 2.7 or later
mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...)
pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option
xmltodict < 0.12.0 ; python_version < '2.7' # xmltodict 0.12.0 and later require python 2.7 or later
lxml < 4.3.0 ; python_version < '2.7' # lxml 4.3.0 and later require python 2.7 or later
pyvmomi < 6.0.0 ; python_version < '2.7' # pyvmomi 6.0.0 and later require python 2.7 or later
pyone == 1.1.9 # newer versions do not pass current integration tests
boto3 < 1.11 ; python_version < '2.7' # boto3 1.11 drops Python 2.6 support
botocore >= 1.10.0, < 1.14 ; python_version < '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca; botocore 1.14 drops Python 2.6 support
botocore >= 1.10.0 ; python_version >= '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca
setuptools < 45 ; python_version <= '2.7' # setuptools 45 and later require python 3.5 or later
cffi >= 1.14.2, != 1.14.3 # Yanked version which older versions of pip will still install:
# freeze pylint and its requirements for consistent test results
astroid == 2.2.5
isort == 4.3.15
lazy-object-proxy == 1.3.1
mccabe == 0.6.1
pylint == 2.3.1
typed-ast == 1.4.0 # 1.4.0 is required to compile on Python 3.8
wrapt == 1.11.1

View File

@ -0,0 +1,120 @@
#!/usr/bin/env python
"""Verify the currently executing Shippable test matrix matches the one defined in the "shippable.yml" file."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import json
import os
import re
import sys
import time
try:
from typing import NoReturn
except ImportError:
NoReturn = None
try:
# noinspection PyCompatibility
from urllib2 import urlopen # pylint: disable=ansible-bad-import-from
except ImportError:
# noinspection PyCompatibility
from urllib.request import urlopen
def main(): # type: () -> None
"""Main entry point."""
repo_full_name = os.environ['REPO_FULL_NAME']
required_repo_full_name = 'ansible-collections/community.docker'
if repo_full_name != required_repo_full_name:
sys.stderr.write('Skipping matrix check on repo "%s" which is not "%s".\n' % (repo_full_name, required_repo_full_name))
return
with open('shippable.yml', 'rb') as yaml_file:
yaml = yaml_file.read().decode('utf-8').splitlines()
defined_matrix = [match.group(1) for match in [re.search(r'^ *- env: T=(.*)$', line) for line in yaml] if match and match.group(1) != 'none']
if not defined_matrix:
fail('No matrix entries found in the "shippable.yml" file.',
'Did you modify the "shippable.yml" file?')
run_id = os.environ['SHIPPABLE_BUILD_ID']
sleep = 1
jobs = []
for attempts_remaining in range(4, -1, -1):
try:
jobs = json.loads(urlopen('https://api.shippable.com/jobs?runIds=%s' % run_id).read())
if not isinstance(jobs, list):
raise Exception('Shippable run %s data is not a list.' % run_id)
break
except Exception as ex:
if not attempts_remaining:
fail('Unable to retrieve Shippable run %s matrix.' % run_id,
str(ex))
sys.stderr.write('Unable to retrieve Shippable run %s matrix: %s\n' % (run_id, ex))
sys.stderr.write('Trying again in %d seconds...\n' % sleep)
time.sleep(sleep)
sleep *= 2
if len(jobs) != len(defined_matrix):
if len(jobs) == 1:
hint = '\n\nMake sure you do not use the "Rebuild with SSH" option.'
else:
hint = ''
fail('Shippable run %s has %d jobs instead of the expected %d jobs.' % (run_id, len(jobs), len(defined_matrix)),
'Try re-running the entire matrix.%s' % hint)
actual_matrix = dict((job.get('jobNumber'), dict(tuple(line.split('=', 1)) for line in job.get('env', [])).get('T', '')) for job in jobs)
errors = [(job_number, test, actual_matrix.get(job_number)) for job_number, test in enumerate(defined_matrix, 1) if actual_matrix.get(job_number) != test]
if len(errors):
error_summary = '\n'.join('Job %s expected "%s" but found "%s" instead.' % (job_number, expected, actual) for job_number, expected, actual in errors)
fail('Shippable run %s has a job matrix mismatch.' % run_id,
'Try re-running the entire matrix.\n\n%s' % error_summary)
def fail(message, output): # type: (str, str) -> NoReturn
# Include a leading newline to improve readability on Shippable "Tests" tab.
# Without this, the first line becomes indented.
output = '\n' + output.strip()
timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
# hack to avoid requiring junit-xml, which isn't pre-installed on Shippable outside our test containers
xml = '''
<?xml version="1.0" encoding="utf-8"?>
<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">
\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="%s" url="None">
\t\t<testcase classname="timeout" name="timeout">
\t\t\t<error message="%s" type="error">%s</error>
\t\t</testcase>
\t</testsuite>
</testsuites>
''' % (timestamp, message, output)
path = 'shippable/testresults/check-matrix.xml'
dir_path = os.path.dirname(path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(path, 'w') as junit_fd:
junit_fd.write(xml.lstrip())
sys.stderr.write(message + '\n')
sys.stderr.write(output + '\n')
sys.exit(1)
if __name__ == '__main__':
main()

19
tests/utils/shippable/cloud.sh Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
cloud="${args[0]}"
python="${args[1]}"
group="${args[2]}"
target="shippable/${cloud}/group${group}/"
stage="${S:-prod}"
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote-terminate always --remote-stage "${stage}" \
--docker --python "${python}"

18
tests/utils/shippable/linux.sh Executable file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
image="${args[1]}"
if [ "${#args[@]}" -gt 2 ]; then
target="shippable/posix/group${args[2]}/"
else
target="shippable/posix/"
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--docker "${image}"

22
tests/utils/shippable/remote.sh Executable file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
platform="${args[0]}"
version="${args[1]}"
if [ "${#args[@]}" -gt 2 ]; then
target="shippable/posix/group${args[2]}/"
else
target="shippable/posix/"
fi
stage="${S:-prod}"
provider="${P:-default}"
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"

View File

@ -0,0 +1 @@
remote.sh

24
tests/utils/shippable/sanity.sh Executable file
View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
group="${args[1]}"
if [ "${BASE_BRANCH:-}" ]; then
base_branch="origin/${BASE_BRANCH}"
else
base_branch=""
fi
if [ "${group}" == "extra" ]; then
../internal_test_tools/tools/run.py --color
exit
fi
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker --base-branch "${base_branch}" \
--allow-disabled

View File

@ -0,0 +1,212 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
ansible_version="${args[0]}"
script="${args[1]}"
function join {
local IFS="$1";
shift;
echo "$*";
}
test="$(join / "${args[@]:1}")"
docker images ansible/ansible
docker images quay.io/ansible/*
docker ps
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v '^drydock/' | sed 's/^.* //'); do
docker rm -f "${container}" || true # ignore errors
done
docker ps
if [ -d /home/shippable/cache/ ]; then
ls -la /home/shippable/cache/
fi
command -v python
python -V
function retry
{
# shellcheck disable=SC2034
for repetition in 1 2 3; do
set +e
"$@"
result=$?
set -e
if [ ${result} == 0 ]; then
return ${result}
fi
echo "@* -> ${result}"
done
echo "Command '@*' failed 3 times!"
exit -1
}
command -v pip
pip --version
pip list --disable-pip-version-check
if [ "${ansible_version}" == "devel" ]; then
retry pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/docker"
mkdir -p "${TEST_DIR}"
cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
if [ "${script}" != "sanity" ] || [ "${test}" == "sanity/extra" ]; then
# Nothing further should be added to this list.
# This is to prevent modules or plugins in this collection having a runtime dependency on other collections.
retry ansible-galaxy -vvv collection install community.internal_test_tools
fi
if [ "${script}" != "sanity" ] && [ "${script}" != "units" ]; then
# To prevent Python dependencies on other collections only install other collections for integration tests
retry ansible-galaxy -vvv collection install ansible.posix
retry ansible-galaxy -vvv collection install community.crypto
retry ansible-galaxy -vvv collection install community.general
fi
# END: HACK
export PYTHONIOENCODING='utf-8'
if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then
COVERAGE=yes
COMPLETE=yes
fi
if [ -n "${COVERAGE:-}" ]; then
# on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value
export COVERAGE="--coverage"
elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then
# on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message
export COVERAGE="--coverage"
else
# on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled)
export COVERAGE="--coverage-check"
fi
if [ -n "${COMPLETE:-}" ]; then
# disable change detection triggered by setting the COMPLETE environment variable to a non-empty value
export CHANGED=""
elif [[ "${COMMIT_MESSAGE}" =~ ci_complete ]]; then
# disable change detection triggered by having 'ci_complete' in the latest commit message
export CHANGED=""
else
# enable change detection (default behavior)
export CHANGED="--changed"
fi
if [ "${IS_PULL_REQUEST:-}" == "true" ]; then
# run unstable tests which are targeted by focused changes on PRs
export UNSTABLE="--allow-unstable-changed"
else
# do not run unstable tests outside PRs
export UNSTABLE=""
fi
# remove empty core/extras module directories from PRs created prior to the repo-merge
find plugins -type d -empty -print -delete
function cleanup
{
# for complete on-demand coverage generate a report for all files with no coverage on the "sanity/5" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/5" ]; then
stub="--stub"
# trigger coverage reporting for stubs even if no other coverage data exists
mkdir -p tests/output/coverage/
else
stub=""
fi
if [ -d tests/output/coverage/ ]; then
if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
process_coverage='yes' # process existing coverage files
elif [ "${stub}" ]; then
process_coverage='yes' # process coverage when stubs are enabled
else
process_coverage=''
fi
if [ "${process_coverage}" ]; then
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
set +ux
. ~/ansible-venv/bin/activate
set -ux
# shellcheck disable=SC2086
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
cp -a tests/output/reports/coverage=*.xml "$SHIPPABLE_RESULT_DIR/codecoverage/"
if [ "${ansible_version}" != "2.9" ]; then
# analyze and capture code coverage aggregated by integration test target
ansible-test coverage analyze targets generate -v "$SHIPPABLE_RESULT_DIR/testresults/coverage-analyze-targets.json"
fi
# upload coverage report to codecov.io only when using complete on-demand coverage
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
for file in tests/output/reports/coverage=*.xml; do
flags="${file##*/coverage=}"
flags="${flags%-powershell.xml}"
flags="${flags%.xml}"
# remove numbered component from stub files when converting to tags
flags="${flags//stub-[0-9]*/stub}"
flags="${flags//=/,}"
flags="${flags//[^a-zA-Z0-9_,]/_}"
bash <(curl -s https://codecov.io/bash) \
-f "${file}" \
-F "${flags}" \
-n "${test}" \
-t 8450ed26-4e94-4d07-8831-d2023d6d20a3 \
-X coveragepy \
-X gcov \
-X fix \
-X search \
-X xcode \
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
done
fi
fi
fi
if [ -d tests/output/junit/ ]; then
cp -aT tests/output/junit/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/data/ ]; then
cp -a tests/output/data/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/bot/ ]; then
cp -aT tests/output/bot/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
}
trap cleanup EXIT
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=60
else
timeout=50
fi
ansible-test env --dump --show --timeout "${timeout}" --color -v
"tests/utils/shippable/check_matrix.py"
"tests/utils/shippable/${script}.sh" "${test}"

16
tests/utils/shippable/timing.py Executable file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3.7
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()

View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -o pipefail -eu
"$@" 2>&1 | "$(dirname "$0")/timing.py"

27
tests/utils/shippable/units.sh Executable file
View File

@ -0,0 +1,27 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
version="${args[1]}"
group="${args[2]}"
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=90
else
timeout=30
fi
group1=()
case "${group}" in
1) options=("${group1[@]:+${group1[@]}}") ;;
esac
ansible-test env --timeout "${timeout}" --color -v
# shellcheck disable=SC2086
ansible-test units --color -v --docker default --python "${version}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
"${options[@]:+${options[@]}}" \