Drone: Delete migrated workflows (#106870)

* Drone: Remove verify_storybook pipeline

Already exists in GitHub Actions.

* Drone: Remove lint_backend pipeline

Already exists in GHA.

* Drone: Remove backend tests

These already exist in GitHub Actions.

* Drone: Remove shellcheck pipeline

* Drone: Remove unused images

* Drone: Remove lint_frontend pipeline

Already in GHA.

* Drone: Remove test_frontend pipeline

Already exists in GHA.

* Drone: Remove integration_benchmarks pipeline

This was last used in January. GHA does not have it, but it is relatively trivial to run locally.
This commit is contained in:
Mariell Hoversholm
2025-06-18 21:03:23 +02:00
committed by GitHub
parent 17ad7af3b8
commit 8598fa213a
18 changed files with 2 additions and 2918 deletions

View File

@ -11,26 +11,10 @@ load(
"docs_pipelines",
"trigger_docs_main",
)
load(
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
)
load(
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/trigger_downstream.star",
"enterprise_downstream_pipeline",
)
load(
"scripts/drone/pipelines/verify_storybook.star",
"verify_storybook",
)
load(
"scripts/drone/utils/utils.star",
"failure_template",
@ -60,20 +44,14 @@ def main_pipelines():
# Let's make an effort to reduce the amount of string constants in "depends_on" lists.
pipelines = [
docs_pipelines(ver_mode, trigger_docs_main()),
test_backend(trigger, ver_mode),
lint_backend_pipeline(trigger, ver_mode),
verify_storybook(trigger, ver_mode),
build_e2e(trigger, ver_mode),
integration_tests(trigger, prefix = ver_mode, ver_mode = ver_mode),
enterprise_downstream_pipeline(),
notify_pipeline(
name = "main-notify",
slack_channel = "grafana-ci-notifications",
trigger = dict(trigger, status = ["failure"]),
depends_on = [
"main-test-backend",
"main-build-e2e-publish",
"main-integration-tests",
],
template = failure_template,
secret = "slack_webhook",

View File

@ -3,10 +3,6 @@ This module returns all pipelines used in the event of a pull request.
It also includes a function generating a PR trigger from a list of included and excluded paths.
"""
load(
"scripts/drone/pipelines/benchmarks.star",
"integration_benchmarks",
)
load(
"scripts/drone/pipelines/build.star",
"build_e2e",
@ -16,18 +12,6 @@ load(
"docs_pipelines",
"trigger_docs_pr",
)
load(
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
)
load(
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/verify_drone.star",
"verify_drone",
@ -36,10 +20,6 @@ load(
"scripts/drone/pipelines/verify_starlark.star",
"verify_starlark",
)
load(
"scripts/drone/pipelines/verify_storybook.star",
"verify_storybook",
)
ver_mode = "pr"
trigger = {
@ -69,68 +49,8 @@ def pr_pipelines():
),
ver_mode,
),
verify_storybook(
get_pr_trigger(
include_paths = ["packages/grafana-ui/**"],
),
ver_mode,
),
test_backend(
get_pr_trigger(
include_paths = [
"Makefile",
"pkg/**",
"packaging/**",
".drone.yml",
"conf/**",
"go.sum",
"go.mod",
"public/app/plugins/**/plugin.json",
"docs/sources/setup-grafana/configure-grafana/feature-toggles/**",
"devenv/**",
"apps/**",
],
),
ver_mode,
),
lint_backend_pipeline(
get_pr_trigger(
include_paths = [
".golangci.toml",
"Makefile",
"pkg/**",
"packaging/**",
".drone.yml",
"conf/**",
"go.sum",
"go.mod",
"public/app/plugins/**/plugin.json",
"devenv/**",
".bingo/**",
"apps/**",
],
),
ver_mode,
),
build_e2e(trigger, ver_mode),
integration_tests(
get_pr_trigger(
include_paths = [
"pkg/**",
"packaging/**",
".drone.yml",
"conf/**",
"go.sum",
"go.mod",
"public/app/plugins/**/plugin.json",
],
),
prefix = ver_mode,
),
docs_pipelines(ver_mode, trigger_docs_pr()),
integration_benchmarks(
prefix = ver_mode,
),
]
def get_pr_trigger(include_paths = None, exclude_paths = None):

View File

@ -2,11 +2,6 @@
This module returns all the pipelines used in the event of a release along with supporting functions.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
@ -16,19 +11,9 @@ load(
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"download_grabpl_step",
"identify_runner_step",
"memcached_integration_tests_steps",
"mysql_integration_tests_steps",
"postgres_integration_tests_steps",
"publish_grafanacom_step",
"publish_linux_packages_step",
"redis_integration_tests_steps",
"remote_alertmanager_integration_tests_steps",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"verify_grafanacom_step",
"wire_install_step",
"yarn_install_step",
)
load(
@ -255,47 +240,6 @@ def publish_npm_pipelines():
),
]
def integration_test_pipelines():
"""
Trigger integration tests on release builds
These pipelines should be triggered when we have a release that does a lot of
cherry-picking and we still want to have all the integration tests run on that
particular build.
Returns:
List of Drone pipelines
"""
trigger = {
"event": ["promote"],
"target": "integration-tests",
}
pipelines = []
volumes = integration_test_services_volumes()
integration_test_steps = postgres_integration_tests_steps() + \
mysql_integration_tests_steps("mysql80", "8.0") + \
redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
pipelines.append(pipeline(
name = "integration-tests",
trigger = trigger,
services = integration_test_services(),
steps = [
download_grabpl_step(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
] +
integration_test_steps,
environment = {"EDITION": "oss"},
volumes = volumes,
))
return pipelines
def verify_release_pipeline(
name = "verify-prerelease-assets",
bucket = from_secret(prerelease_bucket),

View File

@ -2,26 +2,6 @@
This module returns all the pipelines used in the event of pushes to an RRC branch.
"""
load(
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
)
load(
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
)
load(
"scripts/drone/pipelines/lint_frontend.star",
"lint_frontend_pipeline",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/steps/lib.star",
"enterprise_downstream_step",
@ -48,11 +28,6 @@ trigger = {
def rrc_patch_pipelines():
pipelines = [
test_frontend(trigger, ver_mode),
lint_frontend_pipeline(trigger, ver_mode),
test_backend(trigger, ver_mode),
lint_backend_pipeline(trigger, ver_mode),
integration_tests(trigger, prefix = ver_mode, ver_mode = ver_mode),
rrc_enterprise_downstream_pipeline(trigger = trigger),
]
@ -68,6 +43,5 @@ def rrc_enterprise_downstream_pipeline(trigger):
name = "rrc-trigger-downstream",
trigger = trigger,
steps = steps,
depends_on = ["rrc-integration-tests"],
environment = environment,
)

View File

@ -1,84 +0,0 @@
"""
This module returns the pipeline used for integration benchmarks.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step",
"integration_benchmarks_step",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def integration_benchmarks(prefix):
"""Generate a pipeline for integration tests.
Args:
prefix: used in the naming of the pipeline.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
services = integration_test_services()
volumes = integration_test_services_volumes() + github_app_pipeline_volumes()
# In pull requests, attempt to clone grafana enterprise.
init_steps = [
github_app_generate_token_step(),
enterprise_setup_step(isPromote = True),
]
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
# Ensure that verif_gen_cue happens after we clone enterprise
# At the time of writing this, very_gen_cue is depended on by the wire step which is what everything else depends on.
verify_step["depends_on"].append("clone-enterprise")
verify_jsonnet_step["depends_on"].append("clone-enterprise")
init_steps += [
compile_build_cmd(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
]
benchmark_steps = integration_benchmarks_step("sqlite") + \
integration_benchmarks_step("postgres", {
"PGPASSWORD": "grafanatest",
"GRAFANA_TEST_DB": "postgres",
"POSTGRES_HOST": "postgres",
}) + \
integration_benchmarks_step("mysql-8.0", {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": "mysql80",
})
return pipeline(
name = "{}-integration-benchmarks".format(prefix),
trigger = {
"event": ["promote"],
"target": ["gobenchmarks"],
},
environment = environment,
services = services,
volumes = volumes,
steps = init_steps + benchmark_steps,
)

View File

@ -1,86 +0,0 @@
"""
This module returns the pipeline used for integration tests.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"download_grabpl_step",
"enterprise_setup_step",
"identify_runner_step",
"memcached_integration_tests_steps",
"mysql_integration_tests_steps",
"postgres_integration_tests_steps",
"redis_integration_tests_steps",
"remote_alertmanager_integration_tests_steps",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def integration_tests(trigger, prefix, ver_mode = "pr"):
"""Generate a pipeline for integration tests.
Args:
trigger: controls which events can trigger the pipeline execution.
prefix: used in the naming of the pipeline.
ver_mode: defines the event / origin of this build. In this function, if it is set to pr, then it will attempt to clone grafana-enterprise. Otherwise it has no effect.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
services = integration_test_services()
volumes = integration_test_services_volumes()
init_steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
init_steps.append(github_app_generate_token_step())
init_steps.append(enterprise_setup_step())
volumes += github_app_pipeline_volumes()
init_steps += [
download_grabpl_step(),
compile_build_cmd(),
identify_runner_step(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
]
# test_steps = [a, b] + [c, d] + [e, f]...
test_steps = postgres_integration_tests_steps() + \
mysql_integration_tests_steps("mysql80", "8.0") + \
redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
return pipeline(
name = "{}-integration-tests".format(prefix),
trigger = trigger,
environment = environment,
services = services,
volumes = volumes,
steps = init_steps + test_steps,
)

View File

@ -1,71 +0,0 @@
"""
This module returns the pipeline used for linting backend code.
"""
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step",
"identify_runner_step",
"lint_drone_step",
"validate_modfile_step",
"validate_openapi_spec_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def lint_backend_pipeline(trigger, ver_mode):
"""Generates the pipelines used linting backend code.
Args:
trigger: controls which events can trigger the pipeline execution.
ver_mode: used in the naming of the pipeline.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
wire_step = wire_install_step()
wire_step.update({"depends_on": []})
init_steps = [
identify_runner_step(),
compile_build_cmd(),
]
volumes = []
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
init_steps.append(github_app_generate_token_step())
init_steps.append(enterprise_setup_step())
volumes += github_app_pipeline_volumes()
init_steps.append(wire_step)
test_steps = [
validate_modfile_step(),
validate_openapi_spec_step(),
]
if ver_mode == "main":
test_steps.append(lint_drone_step())
return pipeline(
name = "{}-lint-backend".format(ver_mode),
trigger = trigger,
services = [],
steps = init_steps + test_steps,
environment = environment,
volumes = volumes,
)

View File

@ -1,69 +0,0 @@
"""
This module returns the pipeline used for linting frontend code.
"""
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"enterprise_setup_step",
"identify_runner_step",
"lint_frontend_step",
"verify_api_clients_step",
"verify_i18n_step",
"yarn_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def lint_frontend_pipeline(trigger, ver_mode):
"""Generates the pipelines used linting frontend code.
Args:
trigger: controls which events can trigger the pipeline execution.
ver_mode: used in the naming of the pipeline.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
init_steps = []
lint_step = lint_frontend_step()
i18n_step = verify_i18n_step()
api_clients_step = verify_api_clients_step()
volumes = []
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
init_steps = [
github_app_generate_token_step(),
enterprise_setup_step(),
]
volumes += github_app_pipeline_volumes()
init_steps += [
identify_runner_step(),
yarn_install_step(),
]
test_steps = [
lint_step,
i18n_step,
api_clients_step,
]
return pipeline(
name = "{}-lint-frontend".format(ver_mode),
trigger = trigger,
services = [],
steps = init_steps + test_steps,
environment = environment,
volumes = volumes,
)

View File

@ -1,66 +0,0 @@
"""
This module returns the pipeline used for testing backend code.
"""
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"enterprise_setup_step",
"identify_runner_step",
"test_backend_integration_step",
"test_backend_step",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_backend(trigger, ver_mode):
"""Generates the pipeline used for testing OSS backend code.
Args:
trigger: a Drone trigger for the pipeline.
ver_mode: affects the pipeline name.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
volumes = []
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
steps.append(github_app_generate_token_step())
steps.append(enterprise_setup_step())
volumes += github_app_pipeline_volumes()
steps += [
identify_runner_step(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
test_backend_step(),
test_backend_integration_step(),
]
return pipeline(
name = "{}-test-backend".format(ver_mode),
trigger = trigger,
steps = steps,
environment = environment,
volumes = volumes,
)

View File

@ -1,60 +0,0 @@
"""
This module returns the pipeline used for testing backend code.
"""
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
"github_app_pipeline_volumes",
)
load(
"scripts/drone/steps/lib.star",
"betterer_frontend_step",
"enterprise_setup_step",
"identify_runner_step",
"test_frontend_step",
"yarn_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_frontend(trigger, ver_mode):
"""Generates the pipeline used for testing frontend code.
Args:
trigger: a Drone trigger for the pipeline
ver_mode: indirectly controls which revision of enterprise code to use.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
steps = [
identify_runner_step(),
yarn_install_step(),
betterer_frontend_step(),
]
test_step = test_frontend_step()
volumes = []
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
steps.append(github_app_generate_token_step())
steps.append(enterprise_setup_step())
volumes += github_app_pipeline_volumes()
steps.append(test_step)
return pipeline(
name = "{}-test-frontend".format(ver_mode),
trigger = trigger,
steps = steps,
environment = environment,
volumes = volumes,
)

View File

@ -35,7 +35,6 @@ def enterprise_downstream_pipeline():
]
deps = [
"main-build-e2e-publish",
"main-integration-tests",
]
return pipeline(
name = "main-trigger-downstream",

View File

@ -1,41 +0,0 @@
"""
This module returns the pipeline used for verifying the storybook build.
"""
load(
"scripts/drone/steps/lib.star",
"e2e_storybook_step",
"identify_runner_step",
"start_storybook_step",
"yarn_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def verify_storybook(trigger, ver_mode):
"""Generates the pipeline used for verifying the storybook build.
Args:
trigger: a Drone trigger for the pipeline
ver_mode: indirectly controls which revision of enterprise code to use.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
steps = [
identify_runner_step(),
yarn_install_step(),
start_storybook_step(),
e2e_storybook_step(),
]
return pipeline(
name = "{}-verify-storybook".format(ver_mode),
trigger = trigger,
steps = steps,
environment = environment,
)

View File

@ -10,14 +10,6 @@ load(
"scripts/drone/events/release.star",
"verify_release_pipeline",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/steps/github.star",
"github_app_generate_token_step",
@ -215,7 +207,6 @@ def rgm_main():
name = "rgm-main-prerelease",
trigger = main_trigger,
steps = rgm_run("rgm-build", "drone_build_main.sh"),
depends_on = ["main-test-backend"],
)
def rgm_tag():
@ -252,7 +243,6 @@ def rgm_nightly_build():
name = "rgm-nightly-build",
trigger = nightly_trigger,
steps = rgm_run("rgm-build", "drone_build_nightly_grafana.sh") + copy_steps,
depends_on = ["nightly-test-backend", "nightly-test-frontend"],
)
def rgm_nightly_publish():
@ -278,8 +268,6 @@ def rgm_nightly_publish():
def rgm_nightly_pipeline():
return [
test_frontend(nightly_trigger, "nightly"),
test_backend(nightly_trigger, "nightly"),
rgm_nightly_build(),
rgm_nightly_publish(),
]

View File

@ -1,72 +0,0 @@
"""
This module has functions for Drone services to be used in pipelines.
"""
load(
"scripts/drone/utils/images.star",
"images",
)
def integration_test_services_volumes():
return [
{"name": "postgres", "temp": {"medium": "memory"}},
{"name": "mysql80", "temp": {"medium": "memory"}},
]
def integration_test_services():
services = [
{
"name": "postgres",
"image": images["postgres_alpine"],
"environment": {
"POSTGRES_USER": "grafanatest",
"POSTGRES_PASSWORD": "grafanatest",
"POSTGRES_DB": "grafanatest",
"PGDATA": "/var/lib/postgresql/data/pgdata",
},
"volumes": [
{"name": "postgres", "path": "/var/lib/postgresql/data/pgdata"},
],
},
{
"name": "mysql80",
"image": images["mysql8"],
"environment": {
"MYSQL_ROOT_PASSWORD": "rootpass",
"MYSQL_DATABASE": "grafana_tests",
"MYSQL_USER": "grafana",
"MYSQL_PASSWORD": "password",
},
"volumes": [{"name": "mysql80", "path": "/var/lib/mysql"}],
"commands": ["docker-entrypoint.sh mysqld --default-authentication-plugin=mysql_native_password"],
},
{
"name": "mimir_backend",
"image": images["mimir"],
"environment": {},
"commands": ["/bin/mimir -target=backend -alertmanager.grafana-alertmanager-compatibility-enabled -alertmanager.utf8-strict-mode-enabled"],
},
{
"name": "redis",
"image": images["redis_alpine"],
"environment": {},
},
{
"name": "memcached",
"image": images["memcached_alpine"],
"environment": {},
},
]
return services
def ldap_service():
return {
"name": "ldap",
"image": images["openldap"],
"environment": {
"LDAP_ADMIN_PASSWORD": "grafana",
"LDAP_DOMAIN": "grafana.org",
"SLAPD_ADDITIONAL_MODULES": "memberof",
},
}

View File

@ -568,34 +568,6 @@ def build_plugins_step(ver_mode):
],
}
def test_backend_step():
return {
"name": "test-backend",
"image": images["go"],
"depends_on": [
"wire-install",
],
"commands": [
# shared-mime-info and shared-mime-info-lang is used for exactly 1 test for the
# mime.TypeByExtension function.
"apk add --update build-base shared-mime-info shared-mime-info-lang",
"go list -f '{{.Dir}}/...' -m | xargs go test -short -covermode=atomic -timeout=5m",
],
}
def test_backend_integration_step():
return {
"name": "test-backend-integration",
"image": images["go"],
"depends_on": [
"wire-install",
],
"commands": [
"apk add --update build-base",
"go test -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\\(.*\\)/' | sort -u)",
],
}
def betterer_frontend_step():
"""Run betterer on frontend code.
@ -615,44 +587,6 @@ def betterer_frontend_step():
],
}
def test_frontend_step():
"""Runs tests on frontend code.
Returns:
Drone step.
"""
return {
"name": "test-frontend",
"image": images["node"],
"environment": {
"TEST_MAX_WORKERS": "50%",
},
"depends_on": [
"yarn-install",
],
"commands": [
"yarn run ci:test-frontend",
],
}
def lint_frontend_step():
return {
"name": "lint-frontend",
"image": images["node"],
"environment": {
"TEST_MAX_WORKERS": "50%",
},
"depends_on": [
"yarn-install",
],
"commands": [
"yarn run prettier:check",
"yarn run lint",
"yarn run typecheck",
],
}
def verify_i18n_step():
extract_error_message = "\nExtraction failed. Make sure that you have no dynamic translation phrases, such as 't(\\`preferences.theme.\\$${themeID}\\`, themeName)' and that no translation key is used twice. Search the output for '[warning]' to find the offending file."
uncommited_error_message = "\nTranslation extraction has not been committed. Please run 'make i18n-extract', commit the changes and push again."
@ -835,23 +769,6 @@ def start_storybook_step():
"detach": True,
}
def e2e_storybook_step():
return {
"name": "end-to-end-tests-storybook-suite",
"image": images["cypress"],
"depends_on": [
"start-storybook",
],
"environment": {
"HOST": "start-storybook",
"PORT": "9001",
},
"commands": [
"npx wait-on@7.2.0 -t 1m http://$HOST:$PORT",
"yarn e2e:storybook",
],
}
def cloud_plugins_e2e_tests_step(suite, cloud, trigger = None):
"""Run cloud plugins end-to-end tests.
@ -1014,129 +931,6 @@ def publish_images_step(ver_mode, docker_repo, trigger = None, depends_on = ["rg
return step
def integration_tests_steps(name, cmds, hostname = None, port = None, environment = None, canFail = False):
"""Integration test steps
Args:
name: the name of the step.
cmds: the commands to run to perform the integration tests.
hostname: the hostname where the remote server is available.
port: the port where the remote server is available.
environment: Any extra environment variables needed to run the integration tests.
canFail: controls whether the step can fail.
Returns:
A list of drone steps. If a hostname / port were provided, then a step to wait for the remove server to be
available is also returned.
"""
dockerize_name = "wait-for-{}".format(name)
depends = [
"wire-install",
]
step = {
"name": "{}-integration-tests".format(name),
"image": images["go"],
"depends_on": depends,
"commands": [
"apk add --update build-base",
] + cmds,
}
if canFail:
step["failure"] = "ignore"
if environment:
step["environment"] = environment
if hostname == None:
return [step]
depends = depends.append(dockerize_name)
return [
dockerize_step(dockerize_name, hostname, port),
step,
]
def integration_benchmarks_step(name, environment = None):
cmds = [
"if [ -z ${GO_PACKAGES} ]; then echo 'missing GO_PACKAGES'; false; fi",
"go test -v -run=^$ -benchmem -timeout=1h -count=8 -bench=. ${GO_PACKAGES}",
]
return integration_tests_steps("{}-benchmark".format(name), cmds, environment = environment)
def postgres_integration_tests_steps():
cmds = [
"apk add --update postgresql-client",
"psql -p 5432 -h postgres -U grafanatest -d grafanatest -f " +
"devenv/docker/blocks/postgres_tests/setup.sql",
"go clean -testcache",
"go test -p=1 -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\\(.*\\)/' | sort -u)",
]
environment = {
"PGPASSWORD": "grafanatest",
"GRAFANA_TEST_DB": "postgres",
"POSTGRES_HOST": "postgres",
}
return integration_tests_steps("postgres", cmds, "postgres", "5432", environment)
def mysql_integration_tests_steps(hostname, version):
cmds = [
"apk add --update mariadb-client", # alpine doesn't package mysql anymore; more info: https://wiki.alpinelinux.org/wiki/MySQL
"cat devenv/docker/blocks/mysql_tests/setup.sql | mariadb -h {} -P 3306 -u root -prootpass --disable-ssl-verify-server-cert".format(hostname),
"go clean -testcache",
"go test -p=1 -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\\(.*\\)/' | sort -u)",
]
environment = {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": hostname,
}
return integration_tests_steps("mysql-{}".format(version), cmds, hostname, "3306", environment)
def redis_integration_tests_steps():
cmds = [
"go clean -testcache",
"go list -f '{{.Dir}}/...' -m | xargs go test -run IntegrationRedis -covermode=atomic -timeout=2m",
]
environment = {
"REDIS_URL": "redis://redis:6379/0",
}
return integration_tests_steps("redis", cmds, "redis", "6379", environment = environment)
def remote_alertmanager_integration_tests_steps():
cmds = [
"go clean -testcache",
"go test -run TestIntegrationRemoteAlertmanager -covermode=atomic -timeout=2m ./pkg/services/ngalert/...",
]
environment = {
"AM_TENANT_ID": "test",
"AM_URL": "http://mimir_backend:8080",
}
return integration_tests_steps("remote-alertmanager", cmds, "mimir_backend", "8080", environment = environment)
def memcached_integration_tests_steps():
cmds = [
"go clean -testcache",
"go list -f '{{.Dir}}/...' -m | xargs go test -run IntegrationMemcached -covermode=atomic -timeout=2m",
]
environment = {
"MEMCACHED_HOSTS": "memcached:11211",
}
return integration_tests_steps("memcached", cmds, "memcached", "11211", environment)
def release_canary_npm_packages_step(trigger = None):
"""Releases canary NPM packages.

View File

@ -20,20 +20,11 @@ images = {
"ubuntu": "ubuntu:22.04",
"curl": "byrnedo/alpine-curl:0.1.8",
"plugins_slack": "plugins/slack",
"python": "python:3.8",
"postgres_alpine": "postgres:12.3-alpine",
"mimir": "grafana/mimir-alpine:r316-55f47f8",
"mysql8": "mysql:8.0.32",
"redis_alpine": "redis:6.2.11-alpine",
"memcached_alpine": "memcached:1.6.9-alpine",
"package_publish": "us.gcr.io/kubernetes-dev/package-publish:latest",
"openldap": "osixia/openldap:1.4.0",
"drone_downstream": "grafana/drone-downstream",
"docker_puppeteer": "grafana/docker-puppeteer:1.1.0",
"docs": "grafana/docs-base:latest",
"cypress": "cypress/included:14.3.2",
"dockerize": "jwilder/dockerize:0.6.1",
"rocky": "rockylinux:9",
"wine": "scottyhardy/docker-wine:stable-9.0",
"github_app_secret_writer": "us-docker.pkg.dev/grafanalabs-global/docker-deployment-tools-prod/github-app-secret-writer:2024-11-05-v11688112090.1-83920c59",
}