vault/Makefile

416 lines
15 KiB
Makefile
Raw Normal View History

2017-10-23 12:38:30 -04:00
# Determine this makefile's path.
# Be sure to place this BEFORE `include` directives, if any.
THIS_FILE := $(lastword $(MAKEFILE_LIST))
MAIN_PACKAGES=$$($(GO_CMD) list ./... | grep -v vendor/ )
SDK_PACKAGES=$$(cd $(CURDIR)/sdk && $(GO_CMD) list ./... | grep -v vendor/ )
API_PACKAGES=$$(cd $(CURDIR)/api && $(GO_CMD) list ./... | grep -v vendor/ )
ALL_PACKAGES=$(MAIN_PACKAGES) $(SDK_PACKAGES) $(API_PACKAGES)
TEST=$$(echo $(ALL_PACKAGES) | grep -v integ/ )
TEST_TIMEOUT?=45m
EXTENDED_TEST_TIMEOUT=60m
INTEG_TEST_TIMEOUT=120m
2015-03-04 02:14:18 -05:00
VETARGS?=-asmdecl -atomic -bool -buildtags -copylocks -methods -nilfunc -printf -rangeloops -shift -structtags -unsafeptr
GOFMT_FILES?=$$(find . -name '*.go' | grep -v pb.go | grep -v vendor)
SED?=$(shell command -v gsed || command -v sed)
GO_VERSION_MIN=$$(cat $(CURDIR)/.go-version)
GO_CMD?=go
CGO_ENABLED?=0
2018-07-16 10:18:09 -04:00
ifneq ($(FDB_ENABLED), )
CGO_ENABLED=1
BUILD_TAGS+=foundationdb
endif
# Set BUILD_MINIMAL to a non-empty value to build a minimal version of Vault with only core features.
BUILD_MINIMAL ?=
ifneq ($(strip $(BUILD_MINIMAL)),)
BUILD_TAGS+=minimal
endif
2017-02-05 20:30:40 -05:00
default: dev
2015-03-04 02:14:18 -05:00
2018-03-20 14:54:10 -04:00
# bin generates the releasable binaries for Vault
2017-10-23 12:38:30 -04:00
bin: prep
2018-07-16 10:18:09 -04:00
@CGO_ENABLED=$(CGO_ENABLED) BUILD_TAGS='$(BUILD_TAGS) ui' sh -c "'$(CURDIR)/scripts/build.sh'"
2015-03-04 02:14:18 -05:00
# dev creates binaries for testing Vault locally. These are put
2018-04-09 17:36:05 -04:00
# into ./bin/ as well as $GOPATH/bin
2023-05-02 08:46:13 -04:00
dev: BUILD_TAGS+=testonly
dev: prep
2023-03-21 16:59:40 -04:00
@CGO_ENABLED=$(CGO_ENABLED) BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
2023-05-02 08:46:13 -04:00
dev-ui: BUILD_TAGS+=testonly
dev-ui: assetcheck prep
2018-07-16 10:18:09 -04:00
@CGO_ENABLED=$(CGO_ENABLED) BUILD_TAGS='$(BUILD_TAGS) ui' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
2023-05-02 08:46:13 -04:00
dev-dynamic: BUILD_TAGS+=testonly
dev-dynamic: prep
2016-05-09 23:17:38 -04:00
@CGO_ENABLED=1 BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
2024-11-12 12:38:59 -05:00
# quickdev creates binaries for testing Vault locally like dev, but skips
# the prep step.
quickdev: BUILD_TAGS+=testonly
quickdev:
@CGO_ENABLED=$(CGO_ENABLED) BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
# *-mem variants will enable memory profiling which will write snapshots of heap usage
# to $TMP/vaultprof every 5 minutes. These can be analyzed using `$ go tool pprof <profile_file>`.
# Note that any build can have profiling added via: `$ BUILD_TAGS=memprofiler make ...`
dev-mem: BUILD_TAGS+=memprofiler
dev-mem: dev
dev-ui-mem: BUILD_TAGS+=memprofiler
dev-ui-mem: assetcheck dev-ui
dev-dynamic-mem: BUILD_TAGS+=memprofiler
dev-dynamic-mem: dev-dynamic
# Creates a Docker image by adding the compiled linux/amd64 binary found in ./bin.
# The resulting image is tagged "vault:dev".
2023-05-02 08:46:13 -04:00
docker-dev: BUILD_TAGS+=testonly
docker-dev: prep
docker build --build-arg VERSION=$(GO_VERSION_MIN) --build-arg BUILD_TAGS="$(BUILD_TAGS)" -f scripts/docker/Dockerfile -t vault:dev .
2023-05-02 08:46:13 -04:00
docker-dev-ui: BUILD_TAGS+=testonly
docker-dev-ui: prep
docker build --build-arg VERSION=$(GO_VERSION_MIN) --build-arg BUILD_TAGS="$(BUILD_TAGS)" -f scripts/docker/Dockerfile.ui -t vault:dev-ui .
2015-03-04 02:14:18 -05:00
# test runs the unit tests and vets the code
2023-05-02 08:46:13 -04:00
test: BUILD_TAGS+=testonly
test: prep
2018-07-16 10:18:09 -04:00
@CGO_ENABLED=$(CGO_ENABLED) \
VAULT_ADDR= \
VAULT_TOKEN= \
VAULT_DEV_ROOT_TOKEN_ID= \
VAULT_ACC= \
$(GO_CMD) test -tags='$(BUILD_TAGS)' $(TEST) $(TESTARGS) -timeout=$(TEST_TIMEOUT) -parallel=20
2015-03-04 02:14:18 -05:00
2023-05-02 08:46:13 -04:00
testcompile: BUILD_TAGS+=testonly
testcompile: prep
@for pkg in $(TEST) ; do \
$(GO_CMD) test -v -c -tags='$(BUILD_TAGS)' $$pkg -parallel=4 ; \
done
2015-03-20 12:59:48 -04:00
# testacc runs acceptance tests
2023-05-02 08:46:13 -04:00
testacc: BUILD_TAGS+=testonly
testacc: prep
2015-03-20 12:59:48 -04:00
@if [ "$(TEST)" = "./..." ]; then \
echo "ERROR: Set TEST to a specific package"; \
exit 1; \
fi
VAULT_ACC=1 $(GO_CMD) test -tags='$(BUILD_TAGS)' $(TEST) -v $(TESTARGS) -timeout=$(EXTENDED_TEST_TIMEOUT)
2015-03-20 12:59:48 -04:00
2015-03-04 02:14:18 -05:00
# testrace runs the race checker
2023-05-02 08:46:13 -04:00
testrace: BUILD_TAGS+=testonly
testrace: prep
@CGO_ENABLED=1 \
VAULT_ADDR= \
VAULT_TOKEN= \
VAULT_DEV_ROOT_TOKEN_ID= \
VAULT_ACC= \
$(GO_CMD) test -tags='$(BUILD_TAGS)' -race $(TEST) $(TESTARGS) -timeout=$(EXTENDED_TEST_TIMEOUT) -parallel=20
2015-03-04 02:14:18 -05:00
cover:
./scripts/coverage.sh --html
2015-03-04 02:14:18 -05:00
# vet runs the Go source code static analysis tool `vet` to find
# any common errors.
vet:
@$(GO_CMD) list -f '{{.Dir}}' ./... | grep -v /vendor/ \
| grep -v '.*github.com/hashicorp/vault$$' \
| xargs $(GO_CMD) vet ; if [ $$? -eq 1 ]; then \
echo ""; \
echo "Vet found suspicious constructs. Please check the reported constructs"; \
echo "and fix them if necessary before submitting the code for reviewal."; \
fi
2015-03-04 02:14:18 -05:00
[QT-506] Use enos scenario samples for testing (#22641) Replace our prior implementation of Enos test groups with the new Enos sampling feature. With this feature we're able to describe which scenarios and variant combinations are valid for a given artifact and allow enos to create a valid sample field (a matrix of all compatible scenarios) and take an observation (select some to run) for us. This ensures that every valid scenario and variant combination will now be a candidate for testing in the pipeline. See QT-504[0] for further details on the Enos sampling capabilities. Our prior implementation only tested the amd64 and arm64 zip artifacts, as well as the Docker container. We now include the following new artifacts in the test matrix: * CE Amd64 Debian package * CE Amd64 RPM package * CE Arm64 Debian package * CE Arm64 RPM package Each artifact includes a sample definition for both pre-merge/post-merge (build) and release testing. Changes: * Remove the hand crafted `enos-run-matrices` ci matrix targets and replace them with per-artifact samples. * Use enos sampling to generate different sample groups on all pull requests. * Update the enos scenario matrices to handle HSM and FIPS packages. * Simplify enos scenarios by using shared globals instead of cargo-culted locals. Note: This will require coordination with vault-enterprise to ensure a smooth migration to the new system. Integrating new scenarios or modifying existing scenarios/variants should be much smoother after this initial migration. [0] https://github.com/hashicorp/enos/pull/102 Signed-off-by: Ryan Cragun <me@ryan.ec>
2023-09-08 14:46:32 -04:00
# deprecations runs staticcheck tool to look for deprecations. Checks entire code to see if it
# has deprecated function, variable, constant or field
deprecations: bootstrap prep
@BUILD_TAGS='$(BUILD_TAGS)' ./scripts/deprecations-checker.sh ""
# ci-deprecations runs staticcheck tool to look for deprecations. All output gets piped to revgrep
# which will only return an error if changes that is not on main has deprecated function, variable, constant or field
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
ci-deprecations: prep check-tools-external
@BUILD_TAGS='$(BUILD_TAGS)' ./scripts/deprecations-checker.sh main
# vet-codechecker runs our custom linters on the test functions. All output gets
# piped to revgrep which will only return an error if new piece of code violates
[QT-506] Use enos scenario samples for testing (#22641) Replace our prior implementation of Enos test groups with the new Enos sampling feature. With this feature we're able to describe which scenarios and variant combinations are valid for a given artifact and allow enos to create a valid sample field (a matrix of all compatible scenarios) and take an observation (select some to run) for us. This ensures that every valid scenario and variant combination will now be a candidate for testing in the pipeline. See QT-504[0] for further details on the Enos sampling capabilities. Our prior implementation only tested the amd64 and arm64 zip artifacts, as well as the Docker container. We now include the following new artifacts in the test matrix: * CE Amd64 Debian package * CE Amd64 RPM package * CE Arm64 Debian package * CE Arm64 RPM package Each artifact includes a sample definition for both pre-merge/post-merge (build) and release testing. Changes: * Remove the hand crafted `enos-run-matrices` ci matrix targets and replace them with per-artifact samples. * Use enos sampling to generate different sample groups on all pull requests. * Update the enos scenario matrices to handle HSM and FIPS packages. * Simplify enos scenarios by using shared globals instead of cargo-culted locals. Note: This will require coordination with vault-enterprise to ensure a smooth migration to the new system. Integrating new scenarios or modifying existing scenarios/variants should be much smoother after this initial migration. [0] https://github.com/hashicorp/enos/pull/102 Signed-off-by: Ryan Cragun <me@ryan.ec>
2023-09-08 14:46:32 -04:00
# the check
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
vet-codechecker: check-tools-internal
@echo "==> Running go vet with ./tools/codechecker..."
@$(GO_CMD) vet -vettool=$$(which codechecker) -tags=$(BUILD_TAGS) ./... 2>&1 | revgrep
# vet-codechecker runs our custom linters on the test functions. All output gets
[QT-506] Use enos scenario samples for testing (#22641) Replace our prior implementation of Enos test groups with the new Enos sampling feature. With this feature we're able to describe which scenarios and variant combinations are valid for a given artifact and allow enos to create a valid sample field (a matrix of all compatible scenarios) and take an observation (select some to run) for us. This ensures that every valid scenario and variant combination will now be a candidate for testing in the pipeline. See QT-504[0] for further details on the Enos sampling capabilities. Our prior implementation only tested the amd64 and arm64 zip artifacts, as well as the Docker container. We now include the following new artifacts in the test matrix: * CE Amd64 Debian package * CE Amd64 RPM package * CE Arm64 Debian package * CE Arm64 RPM package Each artifact includes a sample definition for both pre-merge/post-merge (build) and release testing. Changes: * Remove the hand crafted `enos-run-matrices` ci matrix targets and replace them with per-artifact samples. * Use enos sampling to generate different sample groups on all pull requests. * Update the enos scenario matrices to handle HSM and FIPS packages. * Simplify enos scenarios by using shared globals instead of cargo-culted locals. Note: This will require coordination with vault-enterprise to ensure a smooth migration to the new system. Integrating new scenarios or modifying existing scenarios/variants should be much smoother after this initial migration. [0] https://github.com/hashicorp/enos/pull/102 Signed-off-by: Ryan Cragun <me@ryan.ec>
2023-09-08 14:46:32 -04:00
# piped to revgrep which will only return an error if new piece of code that is
# not on main violates the check
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
ci-vet-codechecker: tools-internal check-tools-external
@echo "==> Running go vet with ./tools/codechecker..."
@$(GO_CMD) vet -vettool=$$(which codechecker) -tags=$(BUILD_TAGS) ./... 2>&1 | revgrep origin/main
# lint runs vet plus a number of other checkers, it is more comprehensive, but louder
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
lint: check-tools-external
@$(GO_CMD) list -f '{{.Dir}}' ./... | grep -v /vendor/ \
| xargs golangci-lint run; if [ $$? -eq 1 ]; then \
echo ""; \
echo "Lint found suspicious constructs. Please check the reported constructs"; \
echo "and fix them if necessary before submitting the code for reviewal."; \
fi
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
# for ci jobs, runs lint against the changed packages in the commit
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
ci-lint: check-tools-external
@golangci-lint run --deadline 10m --new-from-rev=HEAD~
# Lint protobuf files
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
protolint: prep check-tools-external
@echo "==> Linting protobufs..."
@buf lint
2017-09-04 19:16:11 -04:00
# prep runs `go generate` to build the dynamically generated
2015-03-04 02:14:18 -05:00
# source files.
#
# n.b.: prep used to depend on fmtcheck, but since fmtcheck is
# now run as a pre-commit hook (and there's little value in
# making every build run the formatter), we've removed that
# dependency.
2024-03-26 06:30:30 -04:00
prep: check-go-version clean
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Running go generate..."
@GOARCH= GOOS= $(GO_CMD) generate $(MAIN_PACKAGES)
@GOARCH= GOOS= cd api && $(GO_CMD) generate $(API_PACKAGES)
@GOARCH= GOOS= cd sdk && $(GO_CMD) generate $(SDK_PACKAGES)
# Git doesn't allow us to store shared hooks in .git. Instead, we make sure they're up-to-date
# whenever a make target is invoked.
.PHONY: hooks
hooks:
@if [ -d .git/hooks ]; then cp .hooks/* .git/hooks/; fi
2015-03-04 02:14:18 -05:00
-include hooks # Make sure they're always up-to-date
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
# bootstrap the build by generating any necessary code and downloading additional tools that may
# be used by devs.
bootstrap: tools prep
# Note: if you have plugins in GOPATH you can update all of them via something like:
# for i in $(ls | grep vault-plugin-); do cd $i; git remote update; git reset --hard origin/master; dep ensure -update; git add .; git commit; git push; cd ..; done
2018-04-10 02:32:41 -04:00
update-plugins:
grep vault-plugin- go.mod | cut -d ' ' -f 1 | while read -r P; do echo "Updating $P..."; go get -v "$P"; done
2018-04-10 02:32:41 -04:00
static-assets-dir:
@mkdir -p ./http/web_ui
2018-04-03 10:46:45 -04:00
install-ui-dependencies:
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Installing JavaScript assets"
@cd ui && pnpm i
test-ember: install-ui-dependencies
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Running ember tests"
@cd ui && pnpm run test:oss
2018-04-03 10:46:45 -04:00
test-ember-enos: install-ui-dependencies
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Running ember tests with a real backend"
@cd ui && pnpm run test:enos
ember-dist: install-ui-dependencies
2018-04-03 10:46:45 -04:00
@cd ui && npm rebuild node-sass
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Building Ember application"
@cd ui && pnpm run build
2018-04-03 10:46:45 -04:00
@rm -rf ui/if-you-need-to-delete-this-open-an-issue-async-disk-cache
ember-dist-dev: install-ui-dependencies
@cd ui && npm rebuild node-sass
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
@echo "==> Building Ember application"
@cd ui && pnpm run build:dev
static-dist: ember-dist
static-dist-dev: ember-dist-dev
2018-04-03 10:46:45 -04:00
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
proto: check-tools-external
@echo "==> Generating Go code from protobufs..."
buf generate
# No additional sed expressions should be added to this list. Going forward
# we should just use the variable names chosen by protobuf. These are left
# here for backwards compatibility, namely for SDK compilation.
$(SED) -i -e 's/Id/ID/' -e 's/SPDX-License-IDentifier/SPDX-License-Identifier/' vault/request_forwarding_service.pb.go
$(SED) -i -e 's/Idp/IDP/' -e 's/Url/URL/' -e 's/Id/ID/' -e 's/IDentity/Identity/' -e 's/EntityId/EntityID/' -e 's/Api/API/' -e 's/Qr/QR/' -e 's/Totp/TOTP/' -e 's/Mfa/MFA/' -e 's/Pingid/PingID/' -e 's/namespaceId/namespaceID/' -e 's/Ttl/TTL/' -e 's/BoundCidrs/BoundCIDRs/' -e 's/SPDX-License-IDentifier/SPDX-License-Identifier/' helper/identity/types.pb.go helper/identity/mfa/types.pb.go helper/storagepacker/types.pb.go sdk/plugin/pb/backend.pb.go sdk/logical/identity.pb.go vault/activity/activity_log.pb.go
# This will inject the sentinel struct tags as decorated in the proto files.
protoc-go-inject-tag -input=./helper/identity/types.pb.go
protoc-go-inject-tag -input=./helper/identity/mfa/types.pb.go
2016-10-20 12:39:19 -04:00
importfmt: check-tools-external
find . -name '*.go' | grep -v pb.go | grep -v vendor | xargs gosimports -w
fmt: importfmt
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
find . -name '*.go' | grep -v pb.go | grep -v vendor | xargs gofumpt -w
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
fmtcheck: check-go-fmt
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
.PHONY: go-mod-download
go-mod-download:
@$(CURDIR)/scripts/go-helper.sh mod-download
.PHONY: go-mod-tidy
go-mod-tidy:
@$(CURDIR)/scripts/go-helper.sh mod-tidy
protofmt:
buf format -w
semgrep:
semgrep --include '*.go' --exclude 'vendor' -a -f tools/semgrep .
assetcheck:
@echo "==> Checking compiled UI assets..."
@sh -c "'$(CURDIR)/scripts/assetcheck.sh'"
spellcheck:
@echo "==> Spell checking website..."
@misspell -error -source=text website/source
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
.PHONY check-go-fmt:
check-go-fmt:
@$(CURDIR)/scripts/go-helper.sh check-fmt
.PHONY check-go-version:
check-go-version:
@$(CURDIR)/scripts/go-helper.sh check-version $(GO_VERSION_MIN)
.PHONY: check-proto-fmt
check-proto-fmt:
buf format -d --error-format github-actions --exit-code
.PHONY: check-proto-delta
check-proto-delta: prep
@echo "==> Checking for a delta in proto generated Go files..."
@echo "==> Deleting all *.pg.go files..."
find . -type f -name '*.pb.go' -delete -print0
@$(MAKE) -f $(THIS_FILE) proto
@if ! git diff --exit-code; then echo "Go protobuf bindings need to be regenerated. Run 'make proto' to fix them." && exit 1; fi
.PHONY:check-sempgrep
check-sempgrep: check-tools-external
@echo "==> Checking semgrep..."
@semgrep --error --include '*.go' --exclude 'vendor' -f tools/semgrep/ci .
.PHONY: check-tools
check-tools:
@$(CURDIR)/tools/tools.sh check
.PHONY: check-tools-external
check-tools-external:
@$(CURDIR)/tools/tools.sh check-external
.PHONY: check-tools-internal
check-tools-internal:
@$(CURDIR)/tools/tools.sh check-internal
VAULT-31181: Add `pipeline` tool to Vault (#28536) As the Vault pipeline and release processes evolve over time, so too must the tooling that drives them. Historically we've utilized a combination of CI features and shell scripts that are wrapped into make targets to drive our CI. While this approach has worked, it requires careful consideration of what features to use (bash in CI almost never matches bash in developer machines, etc.) and often requires a deep understanding of several CLI tools (jq, etc). `make` itself also has limitations in user experience, e.g. passing flags. As we're all in on Github Actions as our pipeline coordinator, continuing to utilize and build CLI tools to perform our pipeline tasks makes sense. This PR adds a new CLI tool called `pipeline` which we can use to build new isolated tasks that we can string together in Github Actions. We intend to use this utility as the interface for future release automation work, see VAULT-27514. For the first task in this new `pipeline` tool, I've chosen to build two small sub-commands: * `pipeline releases list-versions` - Allows us to list Vault versions between a range. The range is configurable either by setting `--upper` and/or `--lower` bounds, or by using the `--nminus` to set the N-X to go back from the current branches version. As CE and ENT do not have version parity we also consider the `--edition`, as well as none-to-many `--skip` flags to exclude specific versions. * `pipeline generate enos-dynamic-config` - Which creates dynamic enos configuration based on the branch and the current list of release versions. It takes largely the same flags as the `release list-versions` command, however it also expects a `--dir` for the enos directory and a `--file` where the dynamic configuration will be written. This allows us to dynamically update and feed the latest versions into our sampling algorithm to get coverage over all supported prior versions. We then integrate these new tools into the pipeline itself and cache the dynamic config on a weekly basis. We also cache the pipeline tool itself as it will likely become a repository for pipeline specific tooling. The caching strategy for the `pipeline` tool itself will make most workflows that require it super fast. Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-10-23 17:31:24 -04:00
.PHONY: check-tools-pipeline
check-tools-pipeline:
@$(CURDIR)/tools/tools.sh check-pipeline
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
check-vault-in-path:
@VAULT_BIN=$$(command -v vault) || { echo "vault command not found"; exit 1; }; \
[ -x "$$VAULT_BIN" ] || { echo "$$VAULT_BIN not executable"; exit 1; }; \
printf "Using Vault at %s:\n\$$ vault version\n%s\n" "$$VAULT_BIN" "$$(vault version)"
.PHONY: tools
tools:
@$(CURDIR)/tools/tools.sh install
.PHONY: tools-external
tools-external:
@$(CURDIR)/tools/tools.sh install-external
.PHONY: tools-internal
tools-internal:
@$(CURDIR)/tools/tools.sh install-internal
VAULT-31181: Add `pipeline` tool to Vault (#28536) As the Vault pipeline and release processes evolve over time, so too must the tooling that drives them. Historically we've utilized a combination of CI features and shell scripts that are wrapped into make targets to drive our CI. While this approach has worked, it requires careful consideration of what features to use (bash in CI almost never matches bash in developer machines, etc.) and often requires a deep understanding of several CLI tools (jq, etc). `make` itself also has limitations in user experience, e.g. passing flags. As we're all in on Github Actions as our pipeline coordinator, continuing to utilize and build CLI tools to perform our pipeline tasks makes sense. This PR adds a new CLI tool called `pipeline` which we can use to build new isolated tasks that we can string together in Github Actions. We intend to use this utility as the interface for future release automation work, see VAULT-27514. For the first task in this new `pipeline` tool, I've chosen to build two small sub-commands: * `pipeline releases list-versions` - Allows us to list Vault versions between a range. The range is configurable either by setting `--upper` and/or `--lower` bounds, or by using the `--nminus` to set the N-X to go back from the current branches version. As CE and ENT do not have version parity we also consider the `--edition`, as well as none-to-many `--skip` flags to exclude specific versions. * `pipeline generate enos-dynamic-config` - Which creates dynamic enos configuration based on the branch and the current list of release versions. It takes largely the same flags as the `release list-versions` command, however it also expects a `--dir` for the enos directory and a `--file` where the dynamic configuration will be written. This allows us to dynamically update and feed the latest versions into our sampling algorithm to get coverage over all supported prior versions. We then integrate these new tools into the pipeline itself and cache the dynamic config on a weekly basis. We also cache the pipeline tool itself as it will likely become a repository for pipeline specific tooling. The caching strategy for the `pipeline` tool itself will make most workflows that require it super fast. Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-10-23 17:31:24 -04:00
.PHONY: tools-pipeline
tools-pipeline:
@$(CURDIR)/tools/tools.sh install-pipeline
mysql-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/mysql-database-plugin ./plugins/database/mysql/mysql-database-plugin
mysql-legacy-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/mysql-legacy-database-plugin ./plugins/database/mysql/mysql-legacy-database-plugin
cassandra-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/cassandra-database-plugin ./plugins/database/cassandra/cassandra-database-plugin
2019-01-17 20:14:57 -05:00
influxdb-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/influxdb-database-plugin ./plugins/database/influxdb/influxdb-database-plugin
2019-01-17 20:14:57 -05:00
postgresql-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/postgresql-database-plugin ./plugins/database/postgresql/postgresql-database-plugin
mssql-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/mssql-database-plugin ./plugins/database/mssql/mssql-database-plugin
hana-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/hana-database-plugin ./plugins/database/hana/hana-database-plugin
mongodb-database-plugin:
@CGO_ENABLED=0 $(GO_CMD) build -o bin/mongodb-database-plugin ./plugins/database/mongodb/mongodb-database-plugin
[QT-436] Pseudo random artifact test scenarios (#18056) Introducing a new approach to testing Vault artifacts before merge and after merge/notorization/signing. Rather than run a few static scenarios across the artifacts, we now have the ability to run a pseudo random sample of scenarios across many different build artifacts. We've added 20 possible scenarios for the AMD64 and ARM64 binary bundles, which we've broken into five test groups. On any given push to a pull request branch, we will now choose a random test group and execute its corresponding scenarios against the resulting build artifacts. This gives us greater test coverage but lets us split the verification across many different pull requests. The post-merge release testing pipeline behaves in a similar fashion, however, the artifacts that we use for testing have been notarized and signed prior to testing. We've also reduce the number of groups so that we run more scenarios after merge to a release branch. We intend to take what we've learned building this in Github Actions and roll it into an easier to use feature that is native to Enos. Until then, we'll have to manually add scenarios to each matrix file and manually number the test group. It's important to note that Github requires every matrix to include at least one vector, so every artifact that is being tested must include a single scenario in order for all workflows to pass and thus satisfy branch merge requirements. * Add support for different artifact types to enos-run * Add support for different runner type to enos-run * Add arm64 scenarios to build matrix * Expand build matrices to include different variants * Update Consul versions in Enos scenarios and matrices * Refactor enos-run environment * Add minimum version filtering support to enos-run. This allows us to automatically exclude scenarios that require a more recent version of Vault * Add maximum version filtering support to enos-run. This allows us to automatically exclude scenarios that require an older version of Vault * Fix Node 12 deprecation warnings * Rename enos-verify-stable to enos-release-testing-oss * Convert artifactory matrix into enos-release-testing-oss matrices * Add all Vault editions to Enos scenario matrices * Fix verify version with complex Vault edition metadata * Rename the crt-builder to ci-helper * Add more version helpers to ci-helper and Makefile * Update CODEOWNERS for quality team * Add support for filtering matrices by group and version constraints * Add support for pseudo random test scenario execution Signed-off-by: Ryan Cragun <me@ryan.ec>
2022-12-12 15:46:04 -05:00
# These ci targets are used for used for building and testing in Github Actions
# workflows and for Enos scenarios.
.PHONY: ci-build
ci-build:
@$(CURDIR)/scripts/ci-helper.sh build
.PHONY: ci-build-ui
ci-build-ui:
@$(CURDIR)/scripts/ci-helper.sh build-ui
[QT-506] Use enos scenario samples for testing (#22641) Replace our prior implementation of Enos test groups with the new Enos sampling feature. With this feature we're able to describe which scenarios and variant combinations are valid for a given artifact and allow enos to create a valid sample field (a matrix of all compatible scenarios) and take an observation (select some to run) for us. This ensures that every valid scenario and variant combination will now be a candidate for testing in the pipeline. See QT-504[0] for further details on the Enos sampling capabilities. Our prior implementation only tested the amd64 and arm64 zip artifacts, as well as the Docker container. We now include the following new artifacts in the test matrix: * CE Amd64 Debian package * CE Amd64 RPM package * CE Arm64 Debian package * CE Arm64 RPM package Each artifact includes a sample definition for both pre-merge/post-merge (build) and release testing. Changes: * Remove the hand crafted `enos-run-matrices` ci matrix targets and replace them with per-artifact samples. * Use enos sampling to generate different sample groups on all pull requests. * Update the enos scenario matrices to handle HSM and FIPS packages. * Simplify enos scenarios by using shared globals instead of cargo-culted locals. Note: This will require coordination with vault-enterprise to ensure a smooth migration to the new system. Integrating new scenarios or modifying existing scenarios/variants should be much smoother after this initial migration. [0] https://github.com/hashicorp/enos/pull/102 Signed-off-by: Ryan Cragun <me@ryan.ec>
2023-09-08 14:46:32 -04:00
.PHONY: ci-bundle
ci-bundle:
@$(CURDIR)/scripts/ci-helper.sh bundle
.PHONY: ci-copy-binary
ci-copy-binary:
@$(CURDIR)/scripts/ci-helper.sh copy-binary
[QT-506] Use enos scenario samples for testing (#22641) Replace our prior implementation of Enos test groups with the new Enos sampling feature. With this feature we're able to describe which scenarios and variant combinations are valid for a given artifact and allow enos to create a valid sample field (a matrix of all compatible scenarios) and take an observation (select some to run) for us. This ensures that every valid scenario and variant combination will now be a candidate for testing in the pipeline. See QT-504[0] for further details on the Enos sampling capabilities. Our prior implementation only tested the amd64 and arm64 zip artifacts, as well as the Docker container. We now include the following new artifacts in the test matrix: * CE Amd64 Debian package * CE Amd64 RPM package * CE Arm64 Debian package * CE Arm64 RPM package Each artifact includes a sample definition for both pre-merge/post-merge (build) and release testing. Changes: * Remove the hand crafted `enos-run-matrices` ci matrix targets and replace them with per-artifact samples. * Use enos sampling to generate different sample groups on all pull requests. * Update the enos scenario matrices to handle HSM and FIPS packages. * Simplify enos scenarios by using shared globals instead of cargo-culted locals. Note: This will require coordination with vault-enterprise to ensure a smooth migration to the new system. Integrating new scenarios or modifying existing scenarios/variants should be much smoother after this initial migration. [0] https://github.com/hashicorp/enos/pull/102 Signed-off-by: Ryan Cragun <me@ryan.ec>
2023-09-08 14:46:32 -04:00
.PHONY: ci-get-artifact-basename
ci-get-artifact-basename:
@$(CURDIR)/scripts/ci-helper.sh artifact-basename
[QT-436] Pseudo random artifact test scenarios (#18056) Introducing a new approach to testing Vault artifacts before merge and after merge/notorization/signing. Rather than run a few static scenarios across the artifacts, we now have the ability to run a pseudo random sample of scenarios across many different build artifacts. We've added 20 possible scenarios for the AMD64 and ARM64 binary bundles, which we've broken into five test groups. On any given push to a pull request branch, we will now choose a random test group and execute its corresponding scenarios against the resulting build artifacts. This gives us greater test coverage but lets us split the verification across many different pull requests. The post-merge release testing pipeline behaves in a similar fashion, however, the artifacts that we use for testing have been notarized and signed prior to testing. We've also reduce the number of groups so that we run more scenarios after merge to a release branch. We intend to take what we've learned building this in Github Actions and roll it into an easier to use feature that is native to Enos. Until then, we'll have to manually add scenarios to each matrix file and manually number the test group. It's important to note that Github requires every matrix to include at least one vector, so every artifact that is being tested must include a single scenario in order for all workflows to pass and thus satisfy branch merge requirements. * Add support for different artifact types to enos-run * Add support for different runner type to enos-run * Add arm64 scenarios to build matrix * Expand build matrices to include different variants * Update Consul versions in Enos scenarios and matrices * Refactor enos-run environment * Add minimum version filtering support to enos-run. This allows us to automatically exclude scenarios that require a more recent version of Vault * Add maximum version filtering support to enos-run. This allows us to automatically exclude scenarios that require an older version of Vault * Fix Node 12 deprecation warnings * Rename enos-verify-stable to enos-release-testing-oss * Convert artifactory matrix into enos-release-testing-oss matrices * Add all Vault editions to Enos scenario matrices * Fix verify version with complex Vault edition metadata * Rename the crt-builder to ci-helper * Add more version helpers to ci-helper and Makefile * Update CODEOWNERS for quality team * Add support for filtering matrices by group and version constraints * Add support for pseudo random test scenario execution Signed-off-by: Ryan Cragun <me@ryan.ec>
2022-12-12 15:46:04 -05:00
.PHONY: ci-get-date
ci-get-date:
@$(CURDIR)/scripts/ci-helper.sh date
.PHONY: ci-get-revision
ci-get-revision:
@$(CURDIR)/scripts/ci-helper.sh revision
.PHONY: ci-get-version-package
ci-get-version-package:
@$(CURDIR)/scripts/ci-helper.sh version-package
.PHONY: ci-prepare-ent-legal
ci-prepare-ent-legal:
@$(CURDIR)/scripts/ci-helper.sh prepare-ent-legal
.PHONY: ci-prepare-ce-legal
ci-prepare-ce-legal:
@$(CURDIR)/scripts/ci-helper.sh prepare-ce-legal
.PHONY: ci-copywriteheaders
ci-copywriteheaders:
copywrite headers --plan
# Special case for MPL headers in /api, /sdk, and /shamir
cd api && $(CURDIR)/scripts/copywrite-exceptions.sh
cd sdk && $(CURDIR)/scripts/copywrite-exceptions.sh
cd shamir && $(CURDIR)/scripts/copywrite-exceptions.sh
[QT-645] Restructure dev tools (#24559) We're on a quest to reduce our pipeline execution time to both enhance our developer productivity but also to reduce the overall cost of the CI pipeline. The strategy we use here reduces workflow execution time and network I/O cost by reducing our module cache size and using binary external tools when possible. We no longer download modules and build many of the external tools thousands of times a day. Our previous process of installing internal and external developer tools was scattered and inconsistent. Some tools were installed via `go generate -tags tools ./tools/...`, others via various `make` targets, and some only in Github Actions workflows. This process led to some undesirable side effects: * The modules of some dev and test tools were included with those of the Vault project. This leads to us having to manage our own Go modules with those of external tools. Prior to Go 1.16 this was the recommended way to handle external tools, but now `go install tool@version` is the recommended way to handle external tools that need to be build from source as it supports specific versions but does not modify the go.mod. * Due to Github cache constraints we combine our build and test Go module caches together, but having our developer tools as deps in our module results in a larger cache which is downloaded on every build and test workflow runner. Removing the external tools that were included in our go.mod reduced the expanded module cache by size by ~300MB, thus saving time and network I/O costs when downloading the module cache. * Not all of our developer tools were included in our modules. Some were being installed with `go install` or `go run`, so they didn't take advantage of a single module cache. This resulted in us downloading Go modules on every CI and Build runner in order to build our external tools. * Building our developer tools from source in CI is slow. Where possible we can prefer to use pre-built binaries in CI workflows. No more module download or tool compiles if we can avoid them. I've refactored how we define internal and external build tools in our Makefile and added several new targets to handle both building the developer tools locally for development and verifying that they are available. This allows for an easy developer bootstrap while also supporting installation of many of the external developer tools from pre-build binaries in CI. This reduces our network IO and run time across nearly all of our actions runners. While working on this I caught and resolved a few unrelated issue: * Both our Go and Proto format checks we're being run incorrectly. In CI they we're writing changes but not failing if changes were detected. The Go was less of a problem as we have git hooks that are intended to enforce formatting, however we drifted over time. * Our Git hooks couldn't handle removing a Go file without failing. I moved the diff check into the new Go helper and updated it to handle removing files. * I combined a few separate scripts and into helpers and added a few new capabilities. * I refactored how we install Go modules to make it easier to download and tidy all of the projects go.mod's. * Refactor our internal and external tool installation and verification into a tools.sh helper. * Combined more complex Go verification into `scripts/go-helper.sh` and utilize it in the `Makefile` and git commit hooks. * Add `Makefile` targets for executing our various tools.sh helpers. * Update our existing `make` targets to use new tool targets. * Normalize our various scripts and targets output to have a consistent output format. * In CI, install many of our external dependencies as binaries wherever possible. When not possible we'll build them from scratch but not mess with the shared module cache. * [QT-641] Remove our external build tools from our project Go modules. * [QT-641] Remove extraneous `go list`'s from our `set-up-to` composite action. * Fix formatting and regen our protos Signed-off-by: Ryan Cragun <me@ryan.ec>
2024-01-09 12:50:46 -05:00
.PHONY: all bin default prep test vet bootstrap fmt fmtcheck mysql-database-plugin mysql-legacy-database-plugin cassandra-database-plugin influxdb-database-plugin postgresql-database-plugin mssql-database-plugin hana-database-plugin mongodb-database-plugin ember-dist ember-dist-dev static-dist static-dist-dev assetcheck check-vault-in-path packages build build-ci semgrep semgrep-ci vet-codechecker ci-vet-codechecker clean dev
.NOTPARALLEL: ember-dist ember-dist-dev
.PHONY: all-packages
all-packages:
@echo $(ALL_PACKAGES) | tr ' ' '\n'
2024-03-26 06:30:30 -04:00
.PHONY: clean
clean:
@echo "==> Cleaning..."