add compose regression to ci

to prevent any regressions, we should be running regression tests using
compose.

Signed-off-by: baude <bbaude@redhat.com>
This commit is contained in:
baude
2020-12-02 08:45:08 -06:00
parent 47af0afbd1
commit 82d7b9f2e2
24 changed files with 157 additions and 16 deletions

View File

@ -30,7 +30,7 @@ env:
PRIOR_UBUNTU_NAME: "ubuntu-19" PRIOR_UBUNTU_NAME: "ubuntu-19"
# Google-cloud VM Images # Google-cloud VM Images
IMAGE_SUFFIX: "c5402398833246208" IMAGE_SUFFIX: "c4704091098054656"
FEDORA_CACHE_IMAGE_NAME: "fedora-${IMAGE_SUFFIX}" FEDORA_CACHE_IMAGE_NAME: "fedora-${IMAGE_SUFFIX}"
PRIOR_FEDORA_CACHE_IMAGE_NAME: "prior-fedora-${IMAGE_SUFFIX}" PRIOR_FEDORA_CACHE_IMAGE_NAME: "prior-fedora-${IMAGE_SUFFIX}"
UBUNTU_CACHE_IMAGE_NAME: "ubuntu-${IMAGE_SUFFIX}" UBUNTU_CACHE_IMAGE_NAME: "ubuntu-${IMAGE_SUFFIX}"
@ -434,6 +434,21 @@ apiv2_test_task:
podman_system_info_script: '$SCRIPT_BASE/logcollector.sh podman' podman_system_info_script: '$SCRIPT_BASE/logcollector.sh podman'
time_script: '$SCRIPT_BASE/logcollector.sh time' time_script: '$SCRIPT_BASE/logcollector.sh time'
compose_test_task:
name: "compose test on $DISTRO_NV"
alias: compose_test
depends_on:
- validate
gce_instance: *standardvm
env:
<<: *stdenvars
TEST_FLAVOR: compose
clone_script: *noop # Comes from cache
gopath_cache: *ro_gopath_cache
setup_script: *setup
main_script: *main
always: *logs_artifacts
# Execute the podman integration tests on all primary platforms and release # Execute the podman integration tests on all primary platforms and release
# versions, as root, without involving the podman-remote client. # versions, as root, without involving the podman-remote client.
@ -619,6 +634,7 @@ success_task:
- docker-py_test - docker-py_test
- unit_test - unit_test
- apiv2_test - apiv2_test
- compose_test
- local_integration_test - local_integration_test
- remote_integration_test - remote_integration_test
- rootless_integration_test - rootless_integration_test

View File

@ -73,6 +73,10 @@ function _run_apiv2() {
make localapiv2 |& logformatter make localapiv2 |& logformatter
} }
function _run_compose() {
./test/compose/test-compose |& logformatter
}
function _run_int() { function _run_int() {
dotest integration dotest integration
} }

View File

@ -195,6 +195,7 @@ case "$TEST_FLAVOR" in
build) make clean ;; build) make clean ;;
unit) ;; unit) ;;
apiv2) ;& # use next item apiv2) ;& # use next item
compose) ;&
int) ;& int) ;&
sys) ;& sys) ;&
bindings) ;& bindings) ;&

View File

@ -0,0 +1,3 @@
9200 You Know, for Search
9600 "status":"green"
5601 Kibana

View File

@ -0,0 +1,18 @@
version: '3'
services:
writer:
environment:
- PODMAN_MSG=podman_rulez
build: write
ports:
- '5000:5000'
volumes:
- data:/data
reader:
build: read
ports:
- '5001:5000'
volumes:
- data:/data
volumes:
data:

View File

@ -0,0 +1,5 @@
FROM podman_python
WORKDIR /app
COPY . /app
ENTRYPOINT ["python3"]
CMD ["app.py"]

View File

@ -0,0 +1,10 @@
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
f = open("/data/message", "r")
return f.read()
if __name__ == '__main__':
app.run(host='0.0.0.0')

View File

@ -0,0 +1,5 @@
FROM podman_python
WORKDIR /app
COPY . /app
ENTRYPOINT ["python3"]
CMD ["app.py"]

View File

@ -0,0 +1,13 @@
from flask import Flask
import os
app = Flask(__name__)
@app.route('/')
def hello():
f = open("/data/message", "w")
f.write(os.getenv("PODMAN_MSG"))
f.close()
return "done"
if __name__ == '__main__':
app.run(host='0.0.0.0')

View File

@ -0,0 +1,10 @@
version: '3'
services:
web:
build: frontend
ports:
- '5000:5000'
volumes:
- /tmp/mount:/data:ro
labels:
- "io.podman=the_best"

View File

@ -0,0 +1,5 @@
FROM podman_python
WORKDIR /app
COPY . /app
ENTRYPOINT ["python3"]
CMD ["app.py"]

View File

@ -0,0 +1,10 @@
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
f = open("/data/message")
return f.read()
if __name__ == '__main__':
app.run(host='0.0.0.0')

View File

@ -0,0 +1,5 @@
this test creates a container with a mount (not volume) and also adds a label to the container.
validate by curl http://localhost:5000 and message should be same message as piped into the mount message.
also verify the label with podman ps and a filter that only catches that container

View File

@ -0,0 +1,6 @@
version: '3'
services:
web:
build: frontend
ports:
- '5001:5000'

View File

@ -0,0 +1,5 @@
FROM podman_python
WORKDIR /app
COPY . /app
ENTRYPOINT ["python3"]
CMD ["app.py"]

View File

@ -0,0 +1,9 @@
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return "Podman rulez!"
if __name__ == '__main__':
app.run(host='0.0.0.0')

View File

@ -0,0 +1,6 @@
version: '3'
services:
web:
build: frontend
ports:
- '5000:5000'

View File

@ -0,0 +1,6 @@
FROM alpine
WORKDIR /app
RUN apk update && apk add py3-pip && pip3 install flask
COPY . /app
ENTRYPOINT ["python3"]
CMD ["app.py"]

View File

@ -0,0 +1,9 @@
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return "Podman rulez!"
if __name__ == '__main__':
app.run(host='0.0.0.0')

View File

@ -81,12 +81,13 @@ function like() {
# "is" (equality) is a subset of "like", but one that expr fails on if # "is" (equality) is a subset of "like", but one that expr fails on if
# the expected result has shell-special characters like '['. Treat it # the expected result has shell-special characters like '['. Treat it
# as a special case. # as a special case.
if [[ $actual = $expect ]]; then
if [[ "$actual" = "$expect" ]]; then
_show_ok 1 "$testname=$expect" _show_ok 1 "$testname=$expect"
return return
fi fi
if expr "$actual" : "$expect" &>/dev/null; then if expr "$actual" : ".*$expect" &>/dev/null; then
# On success, include expected value; this helps readers understand # On success, include expected value; this helps readers understand
_show_ok 1 "$testname ('$actual') ~ $expect" _show_ok 1 "$testname ('$actual') ~ $expect"
return return
@ -132,9 +133,9 @@ function _show_ok() {
# Failed # Failed
local expect=$3 local expect=$3
local actual=$4 local actual=$4
echo -e "${red}not ok $count $testname${reset}" printf "${red}not ok $count $testname${reset}\n"
echo -e "${red}# expected: $expect${reset}" printf "${red}# expected: %s${reset}\n" "$expect"
echo -e "${red}# actual: ${bold}$actual${reset}" printf "${red}# actual: ${bold}%s${reset}\n" "$actual"
echo "not ok $count $testname" >>$LOG echo "not ok $count $testname" >>$LOG
echo " expected: $expect" >>$LOG echo " expected: $expect" >>$LOG
@ -347,6 +348,7 @@ done
TESTS_DIR=$WORKDIR/awesome-compose TESTS_DIR=$WORKDIR/awesome-compose
git clone $AWESOME_COMPOSE $TESTS_DIR git clone $AWESOME_COMPOSE $TESTS_DIR
git -C $TESTS_DIR checkout -q a3c38822277bcca04abbadf34120dcff808db3ec
# Identify the tests to run. If called with args, use those as globs. # Identify the tests to run. If called with args, use those as globs.
tests_to_run=() tests_to_run=()
@ -402,14 +404,10 @@ for t in ${tests_to_run[@]}; do
docker-compose down >>$logfile 2>&1 docker-compose down >>$logfile 2>&1
exit 1 exit 1
fi fi
echo "got here: $actual"
like "$actual" "$expect" "$testname : port $port" like "$actual" "$expect" "$testname : port $port"
done < $curls done < $curls
fi fi
echo "OK, press ENTER to stop"
read x
docker-compose down &> $logfile docker-compose down &> $logfile
if [[ $? -eq 0 ]]; then if [[ $? -eq 0 ]]; then
_show_ok 1 "$testname - down" _show_ok 1 "$testname - down"
@ -434,8 +432,8 @@ done
test_count=$(<$testcounter_file) test_count=$(<$testcounter_file)
failure_count=$(<$failures_file) failure_count=$(<$failures_file)
#if [ -z "$PODMAN_TESTS_KEEP_WORKDIR" ]; then if [ -z "$PODMAN_TESTS_KEEP_WORKDIR" ]; then
# rm -rf $WORKDIR rm -rf $WORKDIR
#fi fi
exit $failure_count exit $failure_count

View File

@ -1,3 +0,0 @@
9200 elasticsearch
9600 logstash
5601 kibana