mirror of
https://github.com/ipfs/kubo.git
synced 2025-05-17 23:16:11 +08:00
test: port CircleCI to GH Actions and improve sharness reporting (#9355)
Closes https://github.com/ipfs/kubo/issues/8991 Part of https://github.com/ipfs/kubo/issues/8804
This commit is contained in:
@ -143,14 +143,17 @@ jobs:
|
||||
path: /tmp/circleci-test-results
|
||||
sharness:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
image: ubuntu-2204:2022.10.1
|
||||
resource_class: << pipeline.parameters.resource_class >>
|
||||
working_directory: ~/ipfs/kubo
|
||||
environment:
|
||||
<<: *default_environment
|
||||
TEST_NO_DOCKER: 0
|
||||
TEST_NO_PLUGIN: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TEST_JUNIT: 1
|
||||
TEST_EXPENSIVE: 1
|
||||
steps:
|
||||
- run: sudo apt update
|
||||
- run: |
|
||||
@ -159,7 +162,7 @@ jobs:
|
||||
tar xfz go1.19.1.linux-amd64.tar.gz
|
||||
echo "export PATH=$(pwd)/go/bin:\$PATH" >> ~/.bashrc
|
||||
- run: go version
|
||||
- run: sudo apt install socat net-tools fish
|
||||
- run: sudo apt install socat net-tools fish libxml2-utils
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
@ -183,7 +186,7 @@ jobs:
|
||||
command: echo "export TEST_DOCKER_HOST=$(ip -4 addr show docker0 | grep -Po 'inet \K[\d.]+')" >> $BASH_ENV
|
||||
- run:
|
||||
echo TEST_DOCKER_HOST=$TEST_DOCKER_HOST &&
|
||||
make -O -j << pipeline.parameters.make_jobs >> coverage/sharness_tests.coverprofile test/sharness/test-results/sharness.xml TEST_GENERATE_JUNIT=1 CONTINUE_ON_S_FAILURE=1 TEST_DOCKER_HOST=$TEST_DOCKER_HOST
|
||||
make -O -j << pipeline.parameters.make_jobs >> test_sharness coverage/sharness_tests.coverprofile test/sharness/test-results/sharness.xml CONTINUE_ON_S_FAILURE=1 TEST_DOCKER_HOST=$TEST_DOCKER_HOST
|
||||
- run:
|
||||
when: always
|
||||
command: bash <(curl -s https://codecov.io/bash) -cF sharness -X search -f coverage/sharness_tests.coverprofile
|
||||
@ -345,13 +348,13 @@ jobs:
|
||||
npx playwright install
|
||||
working_directory: ~/ipfs/kubo/ipfs-webui
|
||||
- run:
|
||||
name: Running upstream tests (finish early if they fail)
|
||||
name: Run ipfs-webui@main build and smoke-test to confirm the upstream repo is not broken
|
||||
command: |
|
||||
npm test || circleci-agent step halt
|
||||
npm test
|
||||
working_directory: ~/ipfs/kubo/ipfs-webui
|
||||
- run:
|
||||
name: Running tests with kubo built from current commit
|
||||
command: npm test
|
||||
name: Test ipfs-webui@main E2E against the locally built Kubo binary
|
||||
command: npm run test:e2e
|
||||
working_directory: ~/ipfs/kubo/ipfs-webui
|
||||
environment:
|
||||
IPFS_GO_EXEC: /tmp/circleci-workspace/bin/ipfs
|
||||
|
200
.github/workflows/build.yml
vendored
Normal file
200
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,200 @@
|
||||
name: 'ci/gh-experiment: interop'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
env:
|
||||
GO_VERSION: 1.19.1
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
- uses: actions/checkout@v3
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: make build
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: kubo
|
||||
path: cmd/ipfs/ipfs
|
||||
ipfs-interop:
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suites:
|
||||
- 'exchange-files'
|
||||
- 'files pin circuit ipns cid-version-agnostic ipns-pubsub pubsub'
|
||||
fail-fast: false
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.12.0
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: kubo
|
||||
path: cmd/ipfs
|
||||
- run: chmod +x cmd/ipfs/ipfs
|
||||
- run: |
|
||||
echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
id: npm-cache-dir
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir.outputs.dir }}
|
||||
key: ${{ runner.os }}-${{ github.job }}-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ github.job }}-
|
||||
- run: mkdir interop
|
||||
- run: |
|
||||
npm init -y
|
||||
npm install ipfs@^0.61.0
|
||||
npm install ipfs-interop@^8.0.10
|
||||
working-directory: interop
|
||||
- run: npx ipfs-interop -- -t node $(sed -e 's#[^ ]*#-f test/&.js#g' <<< '${{ matrix.suites }}')
|
||||
env:
|
||||
LIBP2P_TCP_REUSEPORT: false
|
||||
LIBP2P_ALLOW_WEAK_RSA_KEYS: 1
|
||||
IPFS_GO_EXEC: ${{ github.workspace }}/cmd/ipfs/ipfs
|
||||
working-directory: interop
|
||||
go-ipfs-api:
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: kubo
|
||||
path: cmd/ipfs
|
||||
- run: chmod +x cmd/ipfs/ipfs
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ipfs/go-ipfs-api
|
||||
path: go-ipfs-api
|
||||
- run: cmd/ipfs/ipfs daemon --init --enable-namesys-pubsub &
|
||||
- run: |
|
||||
while ! cmd/ipfs/ipfs id --api=/ip4/127.0.0.1/tcp/5001 2>/dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
timeout-minutes: 5
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: go test -count=1 -v ./...
|
||||
working-directory: go-ipfs-api
|
||||
- run: cmd/ipfs/ipfs shutdown
|
||||
if: always()
|
||||
go-ipfs-http-client:
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: kubo
|
||||
path: cmd/ipfs
|
||||
- run: chmod +x cmd/ipfs/ipfs
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ipfs/go-ipfs-http-client
|
||||
path: go-ipfs-http-client
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: echo '${{ github.workspace }}/cmd/ipfs' >> $GITHUB_PATH
|
||||
- run: go test -count=1 -v ./...
|
||||
working-directory: go-ipfs-http-client
|
||||
ipfs-webui:
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NO_SANDBOX: true
|
||||
LIBP2P_TCP_REUSEPORT: false
|
||||
LIBP2P_ALLOW_WEAK_RSA_KEYS: 1
|
||||
E2E_IPFSD_TYPE: go
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16.12.0
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: kubo
|
||||
path: cmd/ipfs
|
||||
- run: chmod +x cmd/ipfs/ipfs
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ipfs/ipfs-webui
|
||||
path: ipfs-webui
|
||||
- run: |
|
||||
echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
id: npm-cache-dir
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir.outputs.dir }}
|
||||
key: ${{ runner.os }}-${{ github.job }}-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ github.job }}-
|
||||
- run: |
|
||||
npm ci --prefer-offline --no-audit --progress=false
|
||||
npx playwright install
|
||||
working-directory: ipfs-webui
|
||||
- name: Run ipfs-webui@main build and smoke-test to confirm the upstream repo is not broken
|
||||
run: npm test
|
||||
working-directory: ipfs-webui
|
||||
- name: Test ipfs-webui@main E2E against the locally built Kubo binary
|
||||
run: npm run test:e2e
|
||||
env:
|
||||
IPFS_GO_EXEC: ${{ github.workspace }}/cmd/ipfs/ipfs
|
||||
working-directory: ipfs-webui
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -1,5 +1,5 @@
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
name: "CodeQL"
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
25
.github/workflows/docker-build.yml
vendored
Normal file
25
.github/workflows/docker-build.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: 'ci/gh-experiment: docker-build'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
IMAGE_NAME: ipfs/kubo
|
||||
WIP_IMAGE_TAG: wip
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.1
|
||||
- uses: actions/checkout@v3
|
||||
- run: docker build -t $IMAGE_NAME:$WIP_IMAGE_TAG .
|
2
.github/workflows/docker-image.yml
vendored
2
.github/workflows/docker-image.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
||||
run: |
|
||||
TAGS="$(./bin/get-docker-tags.sh $(date -u +%F))"
|
||||
TAGS="${TAGS//$'\n'/'%0A'}"
|
||||
echo "::set-output name=value::$(echo $TAGS)"
|
||||
echo "value=$(echo $TAGS)" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
|
39
.github/workflows/gobuild.yml
vendored
Normal file
39
.github/workflows/gobuild.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: 'ci/gh-experiment: go build'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
runner:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
uses: ipfs/kubo/.github/workflows/runner.yml@ci/move-to-github-actions # TODO: change to master
|
||||
gobuild:
|
||||
needs: [runner]
|
||||
runs-on: ${{ fromJSON(needs.runner.outputs.config).labels }}
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: make cmd/ipfs-try-build
|
||||
env:
|
||||
TEST_NO_FUSE: 0
|
||||
- run: make cmd/ipfs-try-build
|
||||
env:
|
||||
TEST_NO_FUSE: 1
|
9
.github/workflows/golang-analysis.yml
vendored
9
.github/workflows/golang-analysis.yml
vendored
@ -1,8 +1,15 @@
|
||||
on: [push, pull_request]
|
||||
name: Go Checks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
name: All
|
||||
steps:
|
||||
|
32
.github/workflows/golint.yml
vendored
Normal file
32
.github/workflows/golint.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
name: 'ci/gh-experiment: go lint'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
golint:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: make -O test_go_lint
|
65
.github/workflows/gotest.yml
vendored
Normal file
65
.github/workflows/gotest.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
name: 'ci/gh-experiment: go test'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
gotest:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TEST_NO_DOCKER: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TRAVIS: 1
|
||||
GIT_PAGER: cat
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- run: |
|
||||
make -j 1 test/unit/gotest.junit.xml &&
|
||||
[[ ! $(jq -s -c 'map(select(.Action == "fail")) | .[]' test/unit/gotest.json) ]]
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
|
||||
if: always()
|
||||
with:
|
||||
name: unittests
|
||||
files: coverage/unit_tests.coverprofile
|
||||
- run: |
|
||||
# we want to first test with the kubo version in the go.mod file
|
||||
go test -v ./...
|
||||
|
||||
# we also want to test the examples against the current version of kubo
|
||||
# however, that version might be in a fork so we need to replace the dependency
|
||||
|
||||
# backup the go.mod and go.sum files to restore them after we run the tests
|
||||
cp go.mod go.mod.bak
|
||||
cp go.sum go.sum.bak
|
||||
|
||||
# make sure the examples run against the current version of kubo
|
||||
go mod edit -replace github.com/ipfs/kubo=./../../..
|
||||
go mod tidy
|
||||
|
||||
go test -v ./...
|
||||
|
||||
# restore the go.mod and go.sum files to their original state
|
||||
mv go.mod.bak go.mod
|
||||
mv go.sum.bak go.sum
|
||||
working-directory: docs/examples/kubo-as-a-library
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: unit
|
||||
path: test/unit/gotest.junit.xml
|
||||
if: always()
|
33
.github/workflows/runner.yml
vendored
Normal file
33
.github/workflows/runner.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
name: 'ci/gh-experiment: choose runner'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
config:
|
||||
description: "The runner's configuration"
|
||||
value: ${{ jobs.choose.outputs.config }}
|
||||
|
||||
jobs:
|
||||
choose:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
config: ${{ steps.config.outputs.result }}
|
||||
steps:
|
||||
- uses: actions/github-script@v6
|
||||
id: config
|
||||
with:
|
||||
script: |
|
||||
if (`${context.repo.owner}/${context.repo.repo}` === 'ipfs/kubo') {
|
||||
return {
|
||||
labels: ['self-hosted', 'linux', 'x64', 'kubo'],
|
||||
parallel: 10,
|
||||
aws: true
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
labels: ['ubuntu-latest'],
|
||||
parallel: 3,
|
||||
aws: false
|
||||
}
|
||||
}
|
||||
- run: echo ${{ steps.config.outputs.result }}
|
124
.github/workflows/sharness.yml
vendored
Normal file
124
.github/workflows/sharness.yml
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
name: 'ci/gh-experiment: sharness'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
runner:
|
||||
if: github.repository == 'ipfs/kubo' || github.event_name == 'workflow_dispatch'
|
||||
uses: ipfs/kubo/.github/workflows/runner.yml@ci/move-to-github-actions # TODO: change to master
|
||||
sharness:
|
||||
needs: [runner]
|
||||
runs-on: ${{ fromJSON(needs.runner.outputs.config).labels }}
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.1
|
||||
- name: Checkout Kubo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: kubo
|
||||
- name: Install missing tools
|
||||
run: sudo apt install -y socat net-tools fish libxml2-utils
|
||||
- name: Checkout IPFS Pinning Service API
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ipfs-shipyard/rb-pinning-service-api
|
||||
ref: 773c3adbb421c551d2d89288abac3e01e1f7c3a8
|
||||
path: rb-pinning-service-api
|
||||
# TODO: check if docker compose (not docker-compose) is available on default gh runners
|
||||
- name: Start IPFS Pinning Service API
|
||||
run: |
|
||||
(for i in {1..3}; do docker compose pull && break || sleep 5; done) &&
|
||||
docker compose up -d
|
||||
working-directory: rb-pinning-service-api
|
||||
- name: Restore Go Cache
|
||||
uses: protocol/cache-go-action@v1
|
||||
with:
|
||||
name: ${{ github.job }}
|
||||
- name: Find IPFS Pinning Service API address
|
||||
run: echo "TEST_DOCKER_HOST=$(ip -4 addr show docker0 | grep -Po 'inet \K[\d.]+')" >> $GITHUB_ENV
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: test/sharness/lib/dependencies
|
||||
key: ${{ runner.os }}-test-generate-junit-html-${{ hashFiles('test/sharness/lib/test-generate-junit-html.sh') }}
|
||||
- name: Run Sharness tests
|
||||
run: |
|
||||
make -O -j "$PARALLEL" \
|
||||
test_sharness \
|
||||
coverage/sharness_tests.coverprofile \
|
||||
test/sharness/test-results/sharness.xml \
|
||||
test/sharness/test-results/sharness.html \
|
||||
test/sharness/test-results/sharness-html
|
||||
working-directory: kubo
|
||||
env:
|
||||
TEST_NO_DOCKER: 0
|
||||
TEST_NO_PLUGIN: 1
|
||||
TEST_NO_FUSE: 1
|
||||
TEST_VERBOSE: 1
|
||||
TEST_JUNIT: 1
|
||||
TEST_EXPENSIVE: 1
|
||||
IPFS_CHECK_RCMGR_DEFAULTS: 1
|
||||
CONTINUE_ON_S_FAILURE: 1
|
||||
PARALLEL: ${{ fromJSON(needs.runner.outputs.config).parallel }}
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
|
||||
if: failure() || success()
|
||||
with:
|
||||
name: sharness
|
||||
files: kubo/coverage/sharness_tests.coverprofile
|
||||
- name: Aggregate results
|
||||
run: find kubo/test/sharness/test-results -name 't*-*.sh.*.counts' | kubo/test/sharness/lib/sharness/aggregate-results.sh > kubo/test/sharness/test-results/summary.txt
|
||||
- name: 👉️ If this step failed, go to «Summary» (top left) → «HTML Report» → inspect the «Failures» column
|
||||
run: |
|
||||
cat kubo/test/sharness/test-results/summary.txt &&
|
||||
grep 'failed\s*0' kubo/test/sharness/test-results/summary.txt
|
||||
- name: Add aggregate results to the summary
|
||||
if: failure() || success()
|
||||
run: |
|
||||
echo "# Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo >> $GITHUB_STEP_SUMMARY
|
||||
cat kubo/test/sharness/test-results/summary.txt >> $GITHUB_STEP_SUMMARY
|
||||
- name: Upload one-page HTML report to S3
|
||||
id: one-page
|
||||
uses: pl-strflt/tf-aws-gh-runner/.github/actions/upload-artifact@main
|
||||
if: fromJSON(needs.runner.outputs.config).aws && (failure() || success())
|
||||
with:
|
||||
source: kubo/test/sharness/test-results/sharness.html
|
||||
destination: sharness.html
|
||||
- name: Upload one-page HTML report
|
||||
if: (! fromJSON(needs.runner.outputs.config).aws) && (failure() || success())
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sharness.html
|
||||
path: kubo/test/sharness/test-results/sharness.html
|
||||
- name: Upload full HTML report to S3
|
||||
id: full
|
||||
uses: pl-strflt/tf-aws-gh-runner/.github/actions/upload-artifact@main
|
||||
if: fromJSON(needs.runner.outputs.config).aws && (failure() || success())
|
||||
with:
|
||||
source: kubo/test/sharness/test-results/sharness-html
|
||||
destination: sharness-html/
|
||||
- name: Upload full HTML report
|
||||
if: (! fromJSON(needs.runner.outputs.config).aws) && (failure() || success())
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sharness-html
|
||||
path: kubo/test/sharness/test-results/sharness-html
|
||||
- name: Add S3 links to the summary
|
||||
if: fromJSON(needs.runner.outputs.config).aws && (failure() || success())
|
||||
run: echo "$MD" >> $GITHUB_STEP_SUMMARY
|
||||
env:
|
||||
MD: |
|
||||
# HTML Reports
|
||||
|
||||
- View the [one page HTML report](${{ steps.one-page.outputs.url }})
|
||||
- View the [full HTML report](${{ steps.full.outputs.url }}index.html)
|
3
Rules.mk
3
Rules.mk
@ -136,8 +136,7 @@ help:
|
||||
@echo ' test_go_expensive - Run all go tests and compile on all platforms'
|
||||
@echo ' test_go_race - Run go tests with the race detector enabled'
|
||||
@echo ' test_go_lint - Run the `golangci-lint` vetting tool'
|
||||
@echo ' test_sharness_short - Run short sharness tests'
|
||||
@echo ' test_sharness_expensive - Run all sharness tests'
|
||||
@echo ' test_sharness - Run sharness tests'
|
||||
@echo ' coverage - Collects coverage info from unit tests and sharness'
|
||||
@echo
|
||||
.PHONY: help
|
||||
|
@ -14,7 +14,7 @@ environment:
|
||||
GOPATH: c:\gopath
|
||||
TEST_VERBOSE: 1
|
||||
#TEST_NO_FUSE: 1
|
||||
#TEST_SUITE: test_sharness_expensive
|
||||
#TEST_SUITE: test_sharness
|
||||
#GOFLAGS: -tags nofuse
|
||||
global:
|
||||
BASH: C:\cygwin\bin\bash
|
||||
@ -43,7 +43,7 @@ build_script:
|
||||
- '%BASH% -lc "cd $APPVEYOR_BUILD_FOLDER; exec 0</dev/null; export PATH=$GOPATH/bin:$PATH; make nofuse"'
|
||||
|
||||
test_script:
|
||||
- '%BASH% -lc "cd $APPVEYOR_BUILD_FOLDER; exec 0</dev/null; export PATH=$GOPATH/bin:$PATH; export GOFLAGS=''-tags nofuse''; export TEST_NO_FUSE=1; export TEST_VERBOSE=1; export TEST_SUITE=test_sharness_expensive; make $TEST_SUITE"'
|
||||
- '%BASH% -lc "cd $APPVEYOR_BUILD_FOLDER; exec 0</dev/null; export PATH=$GOPATH/bin:$PATH; export GOFLAGS=''-tags nofuse''; export TEST_NO_FUSE=1; export TEST_VERBOSE=1; export TEST_EXPENSIVE=1; export TEST_SUITE=test_sharness; make $TEST_SUITE"'
|
||||
|
||||
#build:
|
||||
# parallel: true
|
||||
|
@ -4,6 +4,7 @@ codecov:
|
||||
- "!travis-ci.org"
|
||||
- "!ci.ipfs.team:8111"
|
||||
- "!ci.ipfs.team"
|
||||
- "!github.com"
|
||||
notify:
|
||||
require_ci_to_pass: no
|
||||
after_n_builds: 2
|
||||
|
@ -46,7 +46,7 @@ endif
|
||||
export IPFS_COVER_DIR:= $(realpath $(d))/sharnesscover/
|
||||
|
||||
$(d)/sharness_tests.coverprofile: export TEST_NO_PLUGIN=1
|
||||
$(d)/sharness_tests.coverprofile: $(d)/ipfs cmd/ipfs/ipfs-test-cover $(d)/coverage_deps test_sharness_short
|
||||
$(d)/sharness_tests.coverprofile: $(d)/ipfs cmd/ipfs/ipfs-test-cover $(d)/coverage_deps test_sharness
|
||||
(cd $(@D)/sharnesscover && find . -type f | gocovmerge -list -) > $@
|
||||
|
||||
|
||||
|
9
test/sharness/.gitignore
vendored
9
test/sharness/.gitignore
vendored
@ -1,5 +1,14 @@
|
||||
# symlinks to lib/sharness
|
||||
/sharness.sh
|
||||
/lib-sharness
|
||||
# clone of sharness
|
||||
lib/sharness/
|
||||
# deps downloaded by lib/*.sh scripts
|
||||
lib/dependencies/
|
||||
# sharness files
|
||||
test-results/
|
||||
trash directory.*.sh/
|
||||
# makefile files
|
||||
plugins
|
||||
# macos files
|
||||
*.DS_Store
|
||||
|
@ -1,4 +1,4 @@
|
||||
# ipfs whole tests using the [sharness framework](https://github.com/mlafeldt/sharness/)
|
||||
# ipfs whole tests using the [sharness framework](https://github.com/pl-strflt/sharness/tree/feat/junit)
|
||||
|
||||
## Running all the tests
|
||||
|
||||
|
@ -42,10 +42,20 @@ $(d)/aggregate: $(T_$(d))
|
||||
@(cd $(@D) && ./lib/test-aggregate-results.sh)
|
||||
.PHONY: $(d)/aggregate
|
||||
|
||||
$(d)/test-results/sharness.xml: export TEST_GENERATE_JUNIT=1
|
||||
$(d)/test-results/sharness.xml: test_sharness_expensive
|
||||
$(d)/test-results/sharness.xml: $(T_$(d))
|
||||
@echo "*** $@ ***"
|
||||
@(cd $(@D)/.. && ./lib/gen-junit-report.sh)
|
||||
@(cd $(@D)/.. && ./lib/test-aggregate-junit-reports.sh)
|
||||
.PHONY: $(d)/test-results/sharness.xml
|
||||
|
||||
$(d)/test-results/sharness-html: $(d)/test-results/sharness.xml
|
||||
@echo "*** $@ ***"
|
||||
@(cd $(@D)/.. && ./lib/test-generate-junit-html.sh frames)
|
||||
.PHONY: $(d)/test-results/sharness-html
|
||||
|
||||
$(d)/test-results/sharness.html: $(d)/test-results/sharness.xml
|
||||
@echo "*** $@ ***"
|
||||
@(cd $(@D)/.. && ./lib/test-generate-junit-html.sh no-frames)
|
||||
.PHONY: $(d)/test-results/sharness.html
|
||||
|
||||
$(d)/clean-test-results:
|
||||
rm -rf $(@D)/test-results
|
||||
@ -62,16 +72,10 @@ $(d)/deps: $(SHARNESS_$(d)) $$(DEPS_$(d)) # use second expansion so coverage can
|
||||
test_sharness_deps: $(d)/deps
|
||||
.PHONY: test_sharness_deps
|
||||
|
||||
test_sharness_short: $(d)/aggregate
|
||||
.PHONY: test_sharness_short
|
||||
test_sharness: $(d)/aggregate
|
||||
.PHONY: test_sharness
|
||||
|
||||
|
||||
test_sharness_expensive: export TEST_EXPENSIVE=1
|
||||
test_sharness_expensive: test_sharness_short
|
||||
.PHONY: test_sharness_expensive
|
||||
|
||||
TEST += test_sharness_expensive
|
||||
TEST_SHORT += test_sharness_short
|
||||
TEST += test_sharness
|
||||
|
||||
|
||||
include mk/footer.mk
|
||||
|
@ -1,250 +0,0 @@
|
||||
From bc6bf844ef4e4cd468bc1ec96f2d6af738eb8d2f Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?=C5=81ukasz=20Magiera?= <magik6k@gmail.com>
|
||||
Date: Sat, 21 Apr 2018 22:01:45 +0200
|
||||
Subject: [PATCH] Generate partial JUnit reports
|
||||
|
||||
---
|
||||
sharness.sh | 114 +++++++++++++++++++++++++++++++++++++++++++++++++---
|
||||
1 file changed, 108 insertions(+), 6 deletions(-)
|
||||
|
||||
diff --git a/sharness.sh b/sharness.sh
|
||||
index 6750ff7..336e426 100644
|
||||
--- a/sharness.sh
|
||||
+++ b/sharness.sh
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/bin/sh
|
||||
+#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright (c) 2011-2012 Mathias Lafeldt
|
||||
# Copyright (c) 2005-2012 Git project
|
||||
@@ -106,6 +106,10 @@ if test -n "$color"; then
|
||||
test -n "$quiet" && return;;
|
||||
esac
|
||||
shift
|
||||
+
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ echo "$*" >> .junit/tout
|
||||
+ fi
|
||||
printf "%s" "$*"
|
||||
tput sgr0
|
||||
echo
|
||||
@@ -115,6 +119,10 @@ else
|
||||
say_color() {
|
||||
test -z "$1" && test -n "$quiet" && return
|
||||
shift
|
||||
+
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ echo "$*" >> .junit/tout
|
||||
+ fi
|
||||
printf "%s\n" "$*"
|
||||
}
|
||||
fi
|
||||
@@ -129,6 +137,12 @@ say() {
|
||||
say_color info "$*"
|
||||
}
|
||||
|
||||
+esc=$(printf '\033')
|
||||
+
|
||||
+esc_xml() {
|
||||
+ sed 's/&/\&/g; s/</\</g; s/>/\>/g; s/"/\"/g; s/'"$esc"'//g; s///g;'
|
||||
+}
|
||||
+
|
||||
test -n "$test_description" || error "Test script did not set test_description."
|
||||
|
||||
if test "$help" = "t"; then
|
||||
@@ -251,30 +265,78 @@ test_have_prereq() {
|
||||
test $total_prereq = $ok_prereq
|
||||
}
|
||||
|
||||
+# junit_testcase generates a testcase xml file after each test
|
||||
+
|
||||
+junit_testcase() {
|
||||
+ if test -z "$TEST_GENERATE_JUNIT"; then
|
||||
+ return
|
||||
+ fi
|
||||
+
|
||||
+ test_name=$1
|
||||
+ tc_file=".junit/case-$(printf "%04d" $test_count)"
|
||||
+ time_sec="$(cat .junit/time | xargs printf '%04d' | sed -e 's/\(...\)$/.\1/g')"
|
||||
+
|
||||
+ echo "$(expr $(cat .junit/time_total) + $(cat .junit/time) )" > .junit/time_total
|
||||
+
|
||||
+ shift
|
||||
+ cat > "$tc_file" <<-EOF
|
||||
+ <testcase name="$test_count - $(echo $test_name | esc_xml)" classname="sharness$(uname -s).${SHARNESS_TEST_NAME}" time="${time_sec}">
|
||||
+ $@
|
||||
+ EOF
|
||||
+
|
||||
+ if test -f .junit/tout; then
|
||||
+ cat >> "$tc_file" <<-EOF
|
||||
+ <system-out>
|
||||
+ $(cat .junit/tout | esc_xml)
|
||||
+ </system-out>
|
||||
+ EOF
|
||||
+ fi
|
||||
+
|
||||
+ if test -f .junit/terr; then
|
||||
+ cat >> "$tc_file" <<-EOF
|
||||
+ <system-err>
|
||||
+ $(cat .junit/terr | esc_xml)
|
||||
+ </system-err>
|
||||
+ EOF
|
||||
+ fi
|
||||
+
|
||||
+ echo "</testcase>" >> "$tc_file"
|
||||
+ rm -f .junit/tout .junit/terr .junit/time
|
||||
+}
|
||||
+
|
||||
# You are not expected to call test_ok_ and test_failure_ directly, use
|
||||
# the text_expect_* functions instead.
|
||||
|
||||
test_ok_() {
|
||||
test_success=$(($test_success + 1))
|
||||
say_color "" "ok $test_count - $@"
|
||||
+
|
||||
+ junit_testcase "$@"
|
||||
}
|
||||
|
||||
test_failure_() {
|
||||
test_failure=$(($test_failure + 1))
|
||||
say_color error "not ok $test_count - $1"
|
||||
+ test_name=$1
|
||||
shift
|
||||
echo "$@" | sed -e 's/^/# /'
|
||||
+ junit_testcase "$test_name" '<failure type="">'$(echo $@ | esc_xml)'</failure>'
|
||||
+
|
||||
test "$immediate" = "" || { EXIT_OK=t; exit 1; }
|
||||
}
|
||||
|
||||
test_known_broken_ok_() {
|
||||
test_fixed=$(($test_fixed + 1))
|
||||
say_color error "ok $test_count - $@ # TODO known breakage vanished"
|
||||
+
|
||||
+ junit_testcase "$@" '<failure type="known breakage vanished"/>'
|
||||
}
|
||||
|
||||
test_known_broken_failure_() {
|
||||
test_broken=$(($test_broken + 1))
|
||||
say_color warn "not ok $test_count - $@ # TODO known breakage"
|
||||
+
|
||||
+ junit_testcase "$@"
|
||||
}
|
||||
|
||||
# Public: Execute commands in debug mode.
|
||||
@@ -310,15 +372,25 @@ test_pause() {
|
||||
test_eval_() {
|
||||
# This is a separate function because some tests use
|
||||
# "return" to end a test_expect_success block early.
|
||||
- eval </dev/null >&3 2>&4 "$*"
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ eval </dev/null > >(tee -a .junit/tout >&3) 2> >(tee -a .junit/terr >&4) "$*"
|
||||
+ else
|
||||
+ eval </dev/null >&3 2>&4 "$*"
|
||||
+ fi
|
||||
}
|
||||
|
||||
test_run_() {
|
||||
test_cleanup=:
|
||||
expecting_failure=$2
|
||||
+
|
||||
+ start_time_ms=$(date "+%s%3N");
|
||||
test_eval_ "$1"
|
||||
eval_ret=$?
|
||||
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ echo $(expr $(date "+%s%3N") - ${start_time_ms} ) > .junit/time;
|
||||
+ fi
|
||||
+
|
||||
if test "$chain_lint" = "t"; then
|
||||
test_eval_ "(exit 117) && $1"
|
||||
if test "$?" != 117; then
|
||||
@@ -355,8 +427,18 @@ test_skip_() {
|
||||
of_prereq=" of $test_prereq"
|
||||
fi
|
||||
|
||||
- say_color skip >&3 "skipping test: $@"
|
||||
- say_color skip "ok $test_count # skip $1 (missing $missing_prereq${of_prereq})"
|
||||
+ say_color skip >&3 "skipping test: $1"
|
||||
+ say_color skip "ok $test_count # skip $1 (missing $missing_prereqm${of_prereq})"
|
||||
+
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ cat > ".junit/case-$(printf "%04d" $test_count)" <<-EOF
|
||||
+ <testcase name="$test_count - $(echo $2 | esc_xml)" classname="sharness$(uname -s).${SHARNESS_TEST_NAME}" time="0">
|
||||
+ <skipped>
|
||||
+ skip $(echo $1 | esc_xml) (missing $missing_prereq${of_prereq})
|
||||
+ </skipped>
|
||||
+ </testcase>
|
||||
+ EOF
|
||||
+ fi
|
||||
: true
|
||||
;;
|
||||
*)
|
||||
@@ -403,7 +485,7 @@ test_expect_success() {
|
||||
test "$#" = 3 && { test_prereq=$1; shift; } || test_prereq=
|
||||
test "$#" = 2 || error "bug in the test script: not 2 or 3 parameters to test_expect_success"
|
||||
export test_prereq
|
||||
- if ! test_skip_ "$@"; then
|
||||
+ if ! test_skip_ "$@" "$1"; then
|
||||
say >&3 "expecting success: $2"
|
||||
if test_run_ "$2"; then
|
||||
test_ok_ "$1"
|
||||
@@ -442,7 +524,7 @@ test_expect_failure() {
|
||||
test "$#" = 3 && { test_prereq=$1; shift; } || test_prereq=
|
||||
test "$#" = 2 || error "bug in the test script: not 2 or 3 parameters to test_expect_failure"
|
||||
export test_prereq
|
||||
- if ! test_skip_ "$@"; then
|
||||
+ if ! test_skip_ "$@" "$1"; then
|
||||
say >&3 "checking known breakage: $2"
|
||||
if test_run_ "$2" expecting_failure; then
|
||||
test_known_broken_ok_ "$1"
|
||||
@@ -675,6 +757,7 @@ test_done() {
|
||||
test_results_dir="$SHARNESS_TEST_DIRECTORY/test-results"
|
||||
mkdir -p "$test_results_dir"
|
||||
test_results_path="$test_results_dir/${SHARNESS_TEST_FILE%.$SHARNESS_TEST_EXTENSION}.$$.counts"
|
||||
+ junit_results_path="$test_results_dir/${SHARNESS_TEST_FILE%.$SHARNESS_TEST_EXTENSION}.$$.xml.part"
|
||||
|
||||
cat >>"$test_results_path" <<-EOF
|
||||
total $test_count
|
||||
@@ -684,6 +767,16 @@ test_done() {
|
||||
failed $test_failure
|
||||
|
||||
EOF
|
||||
+
|
||||
+ if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ time_sec="$(cat .junit/time_total | xargs printf "%04d" | sed -e 's/\(...\)$/.\1/g')"
|
||||
+
|
||||
+ cat >>"$junit_results_path" <<-EOF
|
||||
+ <testsuite errors="$test_broken" failures="$((test_failure+test_fixed))" tests="$test_count" package="sharness$(uname -s).${SHARNESS_TEST_NAME}" time="${time_sec}">
|
||||
+ $(find .junit -name 'case-*' | sort | xargs cat)
|
||||
+ </testsuite>
|
||||
+ EOF
|
||||
+ fi
|
||||
fi
|
||||
|
||||
if test "$test_fixed" != 0; then
|
||||
@@ -745,6 +838,9 @@ export PATH SHARNESS_BUILD_DIRECTORY
|
||||
SHARNESS_TEST_FILE="$0"
|
||||
export SHARNESS_TEST_FILE
|
||||
|
||||
+SHARNESS_TEST_NAME=$(basename ${SHARNESS_TEST_FILE} ".sh")
|
||||
+export SHARNESS_TEST_NAME
|
||||
+
|
||||
# Prepare test area.
|
||||
test_dir="trash directory.$(basename "$SHARNESS_TEST_FILE" ".$SHARNESS_TEST_EXTENSION")"
|
||||
test -n "$root" && test_dir="$root/$test_dir"
|
||||
@@ -771,6 +867,12 @@ mkdir -p "$test_dir" || exit 1
|
||||
# in subprocesses like git equals our $PWD (for pathname comparisons).
|
||||
cd -P "$test_dir" || exit 1
|
||||
|
||||
+# Prepare JUnit report dir
|
||||
+if test -n "$TEST_GENERATE_JUNIT"; then
|
||||
+ mkdir -p .junit
|
||||
+ echo 0 > .junit/time_total
|
||||
+fi
|
||||
+
|
||||
this_test=${SHARNESS_TEST_FILE##*/}
|
||||
this_test=${this_test%.$SHARNESS_TEST_EXTENSION}
|
||||
for skp in $SKIP_TESTS; do
|
||||
--
|
||||
2.17.0
|
||||
|
@ -1,8 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cat > test-results/sharness.xml <<-EOF
|
||||
<?xml version="1.1" encoding="UTF-8"?>
|
||||
<testsuites name="sharness">
|
||||
$(find test-results -name '*.xml.part' | sort | xargs cat)
|
||||
</testsuites>
|
||||
EOF
|
@ -1,60 +1,50 @@
|
||||
#!/bin/sh
|
||||
# install sharness.sh
|
||||
#
|
||||
# Copyright (c) 2014 Juan Batiz-Benet
|
||||
# Copyright (c) 2014, 2022 Juan Batiz-Benet, Piotr Galar
|
||||
# MIT Licensed; see the LICENSE file in this repository.
|
||||
#
|
||||
|
||||
# settings
|
||||
version=5eee9b51b5621cec95a64018f0cc779963b230d2
|
||||
patch_version=17
|
||||
gitrepo=pl-strflt/sharness
|
||||
githash=803df39d3cba16bb7d493dd6cd8bc5e29826da61
|
||||
|
||||
urlprefix=https://github.com/mlafeldt/sharness.git
|
||||
if test ! -n "$clonedir" ; then
|
||||
clonedir=lib
|
||||
fi
|
||||
sharnessdir=sharness
|
||||
|
||||
if test -f "$clonedir/$sharnessdir/SHARNESS_VERSION_${version}_p${patch_version}"
|
||||
then
|
||||
# There is the right version file. Great, we are done!
|
||||
exit 0
|
||||
fi
|
||||
gitdir="$clonedir/$sharnessdir/.git"
|
||||
|
||||
die() {
|
||||
echo >&2 "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
apply_patches() {
|
||||
git config --local user.email "noone@nowhere"
|
||||
git config --local user.name "No One"
|
||||
git am ../0001-Generate-partial-JUnit-reports.patch
|
||||
|
||||
touch "SHARNESS_VERSION_${version}_p${patch_version}" || die "Could not create 'SHARNESS_VERSION_${version}_p${patch_version}'"
|
||||
}
|
||||
|
||||
checkout_version() {
|
||||
git checkout "$version" || die "Could not checkout '$version'"
|
||||
rm -f SHARNESS_VERSION_* || die "Could not remove 'SHARNESS_VERSION_*'"
|
||||
echo "Sharness version $version is checked out!"
|
||||
|
||||
apply_patches
|
||||
}
|
||||
|
||||
if test -d "$clonedir/$sharnessdir/.git"
|
||||
then
|
||||
# We need to update sharness!
|
||||
cd "$clonedir/$sharnessdir" || die "Could not cd into '$clonedir/$sharnessdir' directory"
|
||||
git fetch || die "Could not fetch to update sharness"
|
||||
checkout_version
|
||||
else
|
||||
# We need to clone sharness!
|
||||
mkdir -p "$clonedir" || die "Could not create '$clonedir' directory"
|
||||
cd "$clonedir" || die "Could not cd into '$clonedir' directory"
|
||||
|
||||
git clone "$urlprefix" || die "Could not clone '$urlprefix'"
|
||||
cd "$sharnessdir" || die "Could not cd into '$sharnessdir' directory"
|
||||
checkout_version
|
||||
if test -d "$clonedir/$sharnessdir"; then
|
||||
giturl="git@github.com:${gitrepo}.git"
|
||||
echo "Checking if $giturl is already cloned (and if its origin is correct)"
|
||||
if ! test -d "$gitdir" || test "$(git --git-dir "$gitdir" remote get-url origin)" != "$giturl"; then
|
||||
echo "Removing $clonedir/$sharnessdir"
|
||||
rm -rf "$clonedir/$sharnessdir" || die "Could not remove $clonedir/$sharnessdir"
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! test -d "$clonedir/$sharnessdir"; then
|
||||
giturl="https://github.com/${gitrepo}.git"
|
||||
echo "Cloning $giturl into $clonedir/$sharnessdir"
|
||||
git clone "$giturl" "$clonedir/$sharnessdir" || die "Could not clone $giturl into $clonedir/$sharnessdir"
|
||||
fi
|
||||
|
||||
|
||||
echo "Changing directory to $clonedir/$sharnessdir"
|
||||
cd "$clonedir/$sharnessdir" || die "Could not cd into '$clonedir/$sharnessdir' directory"
|
||||
|
||||
echo "Checking if $githash is already fetched"
|
||||
if ! git show "$githash" >/dev/null 2>&1; then
|
||||
echo "Fetching $githash"
|
||||
git fetch origin "$githash" || die "Could not fetch $githash"
|
||||
fi
|
||||
|
||||
echo "Resetting to $githash"
|
||||
git reset --hard "$githash" || die "Could not reset to $githash"
|
||||
|
||||
exit 0
|
||||
|
17
test/sharness/lib/test-aggregate-junit-reports.sh
Executable file
17
test/sharness/lib/test-aggregate-junit-reports.sh
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Script to aggregate results using Sharness
|
||||
#
|
||||
# Copyright (c) 2014, 2022 Christian Couder, Piotr Galar
|
||||
# MIT Licensed; see the LICENSE file in this repository.
|
||||
#
|
||||
|
||||
SHARNESS_AGGREGATE_JUNIT="lib/sharness/aggregate-junit-reports.sh"
|
||||
|
||||
test -f "$SHARNESS_AGGREGATE_JUNIT" || {
|
||||
echo >&2 "Cannot find: $SHARNESS_AGGREGATE_JUNIT"
|
||||
echo >&2 "Please check Sharness installation."
|
||||
exit 1
|
||||
}
|
||||
|
||||
ls test-results/t*-*.sh.*.xml.part | "$SHARNESS_AGGREGATE_JUNIT" > test-results/sharness.xml
|
58
test/sharness/lib/test-generate-junit-html.sh
Executable file
58
test/sharness/lib/test-generate-junit-html.sh
Executable file
@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
dependencies=(
|
||||
"url=https://sourceforge.net/projects/saxon/files/Saxon-HE/11/Java/SaxonHE11-4J.zip;md5=8a4783d307c32c898f8995b8f337fd6b"
|
||||
"url=https://raw.githubusercontent.com/pl-strflt/ant/c781f7d79b92cc55530245d9554682a47f46851e/src/etc/junit-frames-saxon.xsl;md5=6eb013566903a91e4959413f6ff144d0"
|
||||
"url=https://raw.githubusercontent.com/pl-strflt/ant/c781f7d79b92cc55530245d9554682a47f46851e/src/etc/junit-noframes-saxon.xsl;md5=8d54882d5f9d32a7743ec675cc2e30ac"
|
||||
)
|
||||
|
||||
dependenciesdir="lib/dependencies"
|
||||
mkdir -p "$dependenciesdir"
|
||||
|
||||
get_md5() {
|
||||
md5sum "$1" | cut -d ' ' -f 1
|
||||
}
|
||||
|
||||
for dependency in "${dependencies[@]}"; do
|
||||
url="$(echo "$dependency" | cut -d ';' -f 1 | cut -d '=' -f 2)"
|
||||
md5="$(echo "$dependency" | cut -d ';' -f 2 | cut -d '=' -f 2)"
|
||||
filename="$(basename "$url")"
|
||||
if test -f "$dependenciesdir/$filename" && test "$(get_md5 "$dependenciesdir/$filename")" = "$md5"; then
|
||||
echo "Using cached $filename"
|
||||
else
|
||||
echo "Downloading $filename"
|
||||
curl -L --max-redirs 5 --retry 5 --no-progress-meter --output "$dependenciesdir/$filename" "$url"
|
||||
actual_md5="$(get_md5 "$dependenciesdir/$filename")"
|
||||
if test "$actual_md5" != "$md5"; then
|
||||
echo "Downloaded $filename has wrong md5sum ('$actual_md5' != '$md5')"
|
||||
exit 1
|
||||
fi
|
||||
dirname=${filename%.*}
|
||||
extension=${filename#$dirname.}
|
||||
if test "$extension" = "zip"; then
|
||||
echo "Removing old $dependenciesdir/$dirname"
|
||||
rm -rf "$dependenciesdir/$dirname"
|
||||
echo "Unzipping $dependenciesdir/$filename"
|
||||
unzip "$dependenciesdir/$filename" -d "$dependenciesdir/$dirname"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
case "$1" in
|
||||
"frames")
|
||||
java -jar lib/dependencies/SaxonHE11-4J/saxon-he-11.4.jar \
|
||||
-s:test-results/sharness.xml \
|
||||
-xsl:lib/dependencies/junit-frames-saxon.xsl \
|
||||
output.dir=$(pwd)/test-results/sharness-html
|
||||
;;
|
||||
"no-frames")
|
||||
java -jar lib/dependencies/SaxonHE11-4J/saxon-he-11.4.jar \
|
||||
-s:test-results/sharness.xml \
|
||||
-xsl:lib/dependencies/junit-noframes-saxon.xsl \
|
||||
-o:test-results/sharness.html
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 [frames|no-frames]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
@ -3,7 +3,7 @@
|
||||
# Copyright (c) 2014 Christian Couder
|
||||
# MIT Licensed; see the LICENSE file in this repository.
|
||||
#
|
||||
# We are using sharness (https://github.com/mlafeldt/sharness)
|
||||
# We are using sharness (https://github.com/pl-strflt/sharness/tree/feat/junit)
|
||||
# which was extracted from the Git test framework.
|
||||
|
||||
# use the ipfs tool to test against
|
||||
@ -27,14 +27,17 @@ fi
|
||||
# to pass through in some cases.
|
||||
test "$TEST_VERBOSE" = 1 && verbose=t
|
||||
test "$TEST_IMMEDIATE" = 1 && immediate=t
|
||||
test "$TEST_JUNIT" = 1 && junit=t
|
||||
test "$TEST_NO_COLOR" = 1 && no_color=t
|
||||
# source the common hashes first.
|
||||
. lib/test-lib-hashes.sh
|
||||
|
||||
|
||||
SHARNESS_LIB="lib/sharness/sharness.sh"
|
||||
ln -sf lib/sharness/sharness.sh .
|
||||
ln -sf lib/sharness/lib-sharness .
|
||||
|
||||
. "$SHARNESS_LIB" || {
|
||||
echo >&2 "Cannot source: $SHARNESS_LIB"
|
||||
. "sharness.sh" || {
|
||||
echo >&2 "Cannot source: sharness.sh"
|
||||
echo >&2 "Please check Sharness installation."
|
||||
exit 1
|
||||
}
|
||||
@ -106,10 +109,15 @@ expr "$TEST_OS" : "CYGWIN_NT" >/dev/null || test_set_prereq STD_ERR_MSG
|
||||
|
||||
if test "$TEST_VERBOSE" = 1; then
|
||||
echo '# TEST_VERBOSE='"$TEST_VERBOSE"
|
||||
echo '# TEST_IMMEDIATE='"$TEST_IMMEDIATE"
|
||||
echo '# TEST_NO_FUSE='"$TEST_NO_FUSE"
|
||||
echo '# TEST_NO_DOCKER='"$TEST_NO_DOCKER"
|
||||
echo '# TEST_NO_PLUGIN='"$TEST_NO_PLUGIN"
|
||||
echo '# TEST_EXPENSIVE='"$TEST_EXPENSIVE"
|
||||
echo '# TEST_OS='"$TEST_OS"
|
||||
echo '# TEST_JUNIT='"$TEST_JUNIT"
|
||||
echo '# TEST_NO_COLOR='"$TEST_NO_COLOR"
|
||||
echo '# TEST_ULIMIT_PRESET='"$TEST_ULIMIT_PRESET"
|
||||
fi
|
||||
|
||||
# source our generic test lib
|
||||
@ -541,4 +549,3 @@ purge_blockstore() {
|
||||
[[ -z "$( ipfs repo gc )" ]]
|
||||
'
|
||||
}
|
||||
|
||||
|
@ -121,14 +121,14 @@ test_expect_success "GET invalid IPNS root returns 400 (Bad Request)" '
|
||||
test_curl_resp_http_code "http://127.0.0.1:$port/ipns/QmInvalid/pleaseDontAddMe" "HTTP/1.1 400 Bad Request"
|
||||
'
|
||||
|
||||
test_expect_failure "GET IPNS path succeeds" '
|
||||
test_expect_success "GET IPNS path succeeds" '
|
||||
ipfs name publish --allow-offline "$HASH" &&
|
||||
PEERID=$(ipfs config Identity.PeerID) &&
|
||||
test_check_peerid "$PEERID" &&
|
||||
curl -sfo actual "http://127.0.0.1:$port/ipns/$PEERID"
|
||||
'
|
||||
|
||||
test_expect_failure "GET IPNS path output looks good" '
|
||||
test_expect_success "GET IPNS path output looks good" '
|
||||
test_cmp expected actual
|
||||
'
|
||||
|
||||
|
@ -111,7 +111,7 @@ test_launch_ipfs_daemon_without_network
|
||||
'
|
||||
|
||||
test_expect_success "GET for application/vnd.ipld.car with query filename includes Content-Disposition with custom filename" '
|
||||
curl -svX GET -H "Accept: application/vnd.ipld.car" "http://127.0.0.1:$GWAY_PORT/ipfs/$ROOT_DIR_CID/subdir/ascii.txt?filename=foobar.car" > curl_output_filename 2>&1 &&
|
||||
curl -svX GET -H "Accept: application/vnd.ipld.car" "http://127.0.0.1:$GWAY_PORT/ipfs/$ROOT_DIR_CID/subdir/ascii.txt?filename=foobar.car" >/dev/null 2>curl_output_filename &&
|
||||
cat curl_output_filename &&
|
||||
grep "< Content-Disposition: attachment\; filename=\"foobar.car\"" curl_output_filename
|
||||
'
|
||||
|
@ -72,6 +72,12 @@ flaky_advanced_test() {
|
||||
test_expect_success "shut down nodes" '
|
||||
iptb stop && iptb_wait_stop
|
||||
'
|
||||
|
||||
# NOTE: data transferred stats checks are flaky
|
||||
# trying to debug them by printing out the stats hides the flakiness
|
||||
# my theory is that the extra time cat calls take to print out the stats
|
||||
# allow for proper cleanup to happen
|
||||
go-sleep 1s
|
||||
}
|
||||
|
||||
run_advanced_test() {
|
||||
|
@ -35,15 +35,6 @@ test_resolve_setup_name() {
|
||||
'
|
||||
}
|
||||
|
||||
test_resolve_setup_name_fail() {
|
||||
local key="$1"
|
||||
local ref="$2"
|
||||
|
||||
test_expect_failure "resolve: prepare $key" '
|
||||
ipfs name publish --key="$key" --allow-offline "$ref"
|
||||
'
|
||||
}
|
||||
|
||||
test_resolve() {
|
||||
src=$1
|
||||
dst=$2
|
||||
@ -129,23 +120,7 @@ test_resolve_cmd_b32() {
|
||||
'
|
||||
}
|
||||
|
||||
|
||||
#todo remove this once the online resolve is fixed
|
||||
test_resolve_fail() {
|
||||
src=$1
|
||||
dst=$2
|
||||
|
||||
test_expect_failure "resolve succeeds: $src" '
|
||||
ipfs resolve "$src" >actual
|
||||
'
|
||||
|
||||
test_expect_failure "resolved correctly: $src -> $dst" '
|
||||
printf "$dst" >expected &&
|
||||
test_cmp expected actual
|
||||
'
|
||||
}
|
||||
|
||||
test_resolve_cmd_fail() {
|
||||
test_resolve_cmd_success() {
|
||||
test_resolve "/ipfs/$a_hash" "/ipfs/$a_hash"
|
||||
test_resolve "/ipfs/$a_hash/b" "/ipfs/$b_hash"
|
||||
test_resolve "/ipfs/$a_hash/b/c" "/ipfs/$c_hash"
|
||||
@ -155,23 +130,16 @@ test_resolve_cmd_fail() {
|
||||
test_resolve "/ipld/$dag_hash/i/j" "/ipld/$dag_hash/i/j"
|
||||
test_resolve "/ipld/$dag_hash/i" "/ipld/$dag_hash/i"
|
||||
|
||||
# At the moment, publishing _fails_ because we fail to put to the DHT.
|
||||
# However, resolving succeeds because we resolve the record we put to our own
|
||||
# node.
|
||||
#
|
||||
# We should find a nice way to truly support offline publishing. But this
|
||||
# behavior isn't terrible.
|
||||
|
||||
test_resolve_setup_name_fail "self" "/ipfs/$a_hash"
|
||||
test_resolve_setup_name "self" "/ipfs/$a_hash"
|
||||
test_resolve "/ipns/$self_hash" "/ipfs/$a_hash"
|
||||
test_resolve "/ipns/$self_hash/b" "/ipfs/$b_hash"
|
||||
test_resolve "/ipns/$self_hash/b/c" "/ipfs/$c_hash"
|
||||
|
||||
test_resolve_setup_name_fail "self" "/ipfs/$b_hash"
|
||||
test_resolve_setup_name "self" "/ipfs/$b_hash"
|
||||
test_resolve "/ipns/$self_hash" "/ipfs/$b_hash"
|
||||
test_resolve "/ipns/$self_hash/c" "/ipfs/$c_hash"
|
||||
|
||||
test_resolve_setup_name_fail "self" "/ipfs/$c_hash"
|
||||
test_resolve_setup_name "self" "/ipfs/$c_hash"
|
||||
test_resolve "/ipns/$self_hash" "/ipfs/$c_hash"
|
||||
}
|
||||
|
||||
@ -181,7 +149,7 @@ test_resolve_cmd_b32
|
||||
|
||||
# should work online
|
||||
test_launch_ipfs_daemon
|
||||
test_resolve_cmd_fail
|
||||
test_resolve_cmd_success
|
||||
test_kill_ipfs_daemon
|
||||
|
||||
test_done
|
||||
|
@ -79,9 +79,9 @@ test_expect_success "check that init script configs were applied" '
|
||||
'
|
||||
|
||||
test_expect_success "simple ipfs add/cat can be run in docker container" '
|
||||
expected="Hello Worlds" &&
|
||||
HASH=$(docker_exec "$DOC_ID" "echo $(cat expected) | ipfs add | cut -d' ' -f2") &&
|
||||
docker_exec "$DOC_ID" "ipfs cat $HASH" >actual &&
|
||||
echo "Hello Worlds" | tr -d "[:cntrl:]" > expected &&
|
||||
HASH=$(docker_exec "$DOC_ID" "echo $(cat expected) | ipfs add -q" | tr -d "[:cntrl:]") &&
|
||||
docker_exec "$DOC_ID" "ipfs cat $HASH" | tr -d "[:cntrl:]" > actual &&
|
||||
test_cmp expected actual
|
||||
'
|
||||
|
||||
@ -102,4 +102,3 @@ test_expect_success "stop docker container" '
|
||||
docker_rm "$DOC_ID"
|
||||
docker_rmi "$IMAGE_ID"
|
||||
test_done
|
||||
|
||||
|
Reference in New Issue
Block a user