diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..81fd6345a --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,16 @@ + + +### Requires + + +### Supports + diff --git a/.github/workflows/golangci_lint.yml b/.github/workflows/golangci_lint.yml index 2b787841e..81b4839fe 100644 --- a/.github/workflows/golangci_lint.yml +++ b/.github/workflows/golangci_lint.yml @@ -5,23 +5,13 @@ on: [pull_request] jobs: golangci-lint: runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + actions: read steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Set up Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: "go.mod" - - name: Build binary - shell: bash - run: go build ./... - name: golangci-lint - uses: golangci/golangci-lint-action@aaa42aa0628b4ae2578232a66b541047968fac86 # v6.1.0 + uses: smartcontractkit/.github/actions/ci-lint-go@2ac9d97a83a5edded09af7fcf4ea5bce7a4473a4 # v0.2.6 with: - version: v1.60.1 - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: true - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - - name: Print lint report artifact - if: failure() - shell: bash - run: cat ./golangci-lint-report.xml + golangci-lint-version: v1.62.2 + \ No newline at end of file diff --git a/.github/workflows/llm-action-error-reporter.yml b/.github/workflows/llm-action-error-reporter.yml new file mode 100644 index 000000000..98622317a --- /dev/null +++ b/.github/workflows/llm-action-error-reporter.yml @@ -0,0 +1,23 @@ +name: LLM Action Error Reporter +on: + workflow_run: + workflows: ["PKG Build and Test"] # As soon as one of the listed worfklows is completed, reporter is triggered + types: + - completed + +jobs: + analyze_logs: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + repository-projects: read + actions: read + steps: + - name: Analyze logs + uses: smartcontractkit/.github/actions/llm-action-error-reporter@d125ca9fe5e3b410de7c6db4a4ce3ed7a0728cd6 # v0.3.0 + with: + parent-workflow-conclusion: ${{ github.event.workflow_run.conclusion }} + skip-on-success: true # Skip posting comment if no errors are found + gh-token: ${{ github.token }} + openai-api-key: ${{ secrets.OPENAI_API_KEY }} \ No newline at end of file diff --git a/.github/workflows/observability.yml b/.github/workflows/observability.yml index 7c9653f1e..f70124109 100644 --- a/.github/workflows/observability.yml +++ b/.github/workflows/observability.yml @@ -21,7 +21,7 @@ jobs: go-version-file: "go.mod" - name: Build - run: go build -v ./... + run: make build - name: Unit Tests - run: go test -v ./... + run: make test diff --git a/.github/workflows/pkg.yml b/.github/workflows/pkg.yml index 4140ba01a..fbe228286 100644 --- a/.github/workflows/pkg.yml +++ b/.github/workflows/pkg.yml @@ -18,7 +18,7 @@ jobs: run: go build -v ./... - name: Unit Tests - run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=pkg_coverage.out + run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=coverage.txt - name: Print Races if: failure() @@ -42,20 +42,13 @@ jobs: if: failure() run: find . -type f|fgrep '/testdata/fuzz/'|while read f; do echo $f; cat $f; done - - name: Upload Fuzz Tests Failing Inputs - if: failure() - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - name: failing-fuzz-inputs - path: "**/testdata/fuzz/**" - - name: Upload Go test results if: always() uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: go-test-results path: | - ./pkg_coverage.out + ./coverage.txt ./race.* check-tidy: @@ -70,10 +63,12 @@ jobs: - name: Ensure "make gomodtidy" has been run run: | make gomodtidy - git diff --minimal --exit-code + git add --all + git diff --minimal --cached --exit-code - name: Ensure "make generate" has been run run: | make rm-mocked make rm-builders make generate - git diff --stat --exit-code + git add --all + git diff --stat --cached --exit-code diff --git a/.github/workflows/sonar-scan.yml b/.github/workflows/sonar-scan.yml index a1c2b77fb..8361b2eb2 100644 --- a/.github/workflows/sonar-scan.yml +++ b/.github/workflows/sonar-scan.yml @@ -14,11 +14,11 @@ jobs: ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - name: Wait for workflows - uses: smartcontractkit/chainlink-github-actions/utils/wait-for-workflows@e29366cdecfe6befff9ab8c3cfe4825218505d58 # v2.3.16 + uses: smartcontractkit/.github/actions/wait-for-workflows@dca9ab89d734e82738b8aa52bd25d09b205ec6ee # v0.1.1 with: - max-timeout: "900" + max-timeout: "1200" polling-interval: "30" - exclude-workflow-names: "" + exclude-workflow-names: "Build External Repositories, Observability Lib Checks, Run Benchmarks, LLM Action Error Reporter" exclude-workflow-ids: "" github-token: ${{ secrets.GITHUB_TOKEN }} env: @@ -30,46 +30,12 @@ jobs: runs-on: ubuntu-latest if: always() steps: - - name: Checkout the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - with: - fetch-depth: 0 # fetches all history for all tags and branches to provide more metadata for sonar reports - - - name: Download Golangci-lint report - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: golangci_lint.yml - workflow_conclusion: "" - name_is_regexp: true - name: golangci-lint-report - if_no_artifact_found: warn - - - name: Download Go PKG test reports - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: pkg.yml - workflow_conclusion: "" - name_is_regexp: true - name: go-test-results - if_no_artifact_found: warn - - - name: Set SonarQube Report Paths - if: always() - id: sonarqube_report_paths - shell: bash - run: | - echo "sonarqube_coverage_report_paths=$(find -type f -name '*coverage.out' -printf "%p,")" >> $GITHUB_OUTPUT - echo "sonarqube_golangci_report_paths=$(find -type f -name 'golangci-lint-report.xml' -printf "%p,")" >> $GITHUB_OUTPUT - - name: SonarQube Scan - if: always() - uses: sonarsource/sonarqube-scan-action@53c3e3207fe4b8d52e2f1ac9d6eb1d2506f626c0 # v2.0.2 + uses: smartcontractkit/.github/actions/ci-sonarqube-go@5f4a9c9c3407dd499a1ebbc658a45b9beb9bf675 # v0.3.0 with: - args: > - -Dsonar.go.coverage.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }} - -Dsonar.go.golangci-lint.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_golangci_report_paths }} - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} + # sonarqube inputs + include-lint: "true" + test-report-workflow: pkg.yml + lint-report-workflow: golangci_lint.yml + sonar-token: ${{ secrets.SONAR_TOKEN }} + sonar-host-url: ${{ secrets.SONAR_HOST_URL }} diff --git a/.gitignore b/.gitignore index 6d1bc4012..493ae60ed 100644 --- a/.gitignore +++ b/.gitignore @@ -19,11 +19,13 @@ **/testdata/fuzz/* # Dependency directories (remove the comment below to include it) -# vendor/ +vendor/ # IntelliJ IDE .idea -vendor/ +# Visual Studio Code +.vscode +# Generated files *.wasm diff --git a/.golangci.yml b/.golangci.yml index 8c1a4b166..8b4070989 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -2,23 +2,29 @@ run: timeout: 15m0s linters: enable: + - containedctx + - depguard + - errname + - errorlint - exhaustive - exportloopref - - revive + - fatcontext + - ginkgolinter - goimports - gosec + - loggercheck + - mirror - misspell + - noctx + - perfsprint + - prealloc + - revive - rowserrcheck - - errorlint - - unconvert + - spancheck - sqlclosecheck - - noctx + - testifylint + - unconvert - whitespace - - depguard - - containedctx - - fatcontext - - mirror - - loggercheck linters-settings: exhaustive: default-signifies-exhaustive: true diff --git a/.tool-versions b/.tool-versions index b82d197d7..167471122 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,5 +1,5 @@ -golang 1.22.7 +golang 1.23.3 protoc 25.1 protoc-gen-go-grpc 1.3.0 -golangci-lint 1.55.2 +golangci-lint 1.62.2 mockery 2.43.2 diff --git a/Makefile b/Makefile index 8955675da..06a572d67 100644 --- a/Makefile +++ b/Makefile @@ -38,7 +38,7 @@ generate: mockery install-protoc gomods mockery .PHONY: lint-workspace lint -GOLANGCI_LINT_VERSION := 1.60.1 +GOLANGCI_LINT_VERSION := 1.62.2 GOLANGCI_LINT_COMMON_OPTS := --max-issues-per-linter 0 --max-same-issues 0 GOLANGCI_LINT_DIRECTORY := ./golangci-lint diff --git a/go.mod b/go.mod index 062c250d4..7bd59cbd5 100644 --- a/go.mod +++ b/go.mod @@ -1,11 +1,9 @@ module github.com/smartcontractkit/chainlink-common -go 1.22.0 - -toolchain go1.22.7 +go 1.23.3 require ( - github.com/andybalholm/brotli v1.1.0 + github.com/andybalholm/brotli v1.1.1 github.com/atombender/go-jsonschema v0.16.1-0.20240916205339-a74cd4e2851c github.com/bytecodealliance/wasmtime-go/v23 v23.0.0 github.com/confluentinc/confluent-kafka-go/v2 v2.3.0 @@ -19,48 +17,56 @@ require ( github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.0.1 github.com/hashicorp/consul/sdk v0.16.0 github.com/hashicorp/go-hclog v1.5.0 - github.com/hashicorp/go-plugin v1.6.2-0.20240829161738-06afb6d7ae99 + github.com/hashicorp/go-plugin v1.6.2 github.com/iancoleman/strcase v0.3.0 github.com/invopop/jsonschema v0.12.0 + github.com/jackc/pgx/v4 v4.18.3 github.com/jmoiron/sqlx v1.4.0 github.com/jonboulle/clockwork v0.4.0 github.com/jpillora/backoff v1.0.0 github.com/lib/pq v1.10.9 github.com/linkedin/goavro/v2 v2.12.0 + github.com/marcboeker/go-duckdb v1.8.3 github.com/pelletier/go-toml/v2 v2.2.0 github.com/prometheus/client_golang v1.17.0 github.com/riferrei/srclient v0.5.4 github.com/santhosh-tekuri/jsonschema/v5 v5.2.0 + github.com/scylladb/go-reflectx v1.0.1 github.com/shopspring/decimal v1.4.0 github.com/smartcontractkit/grpc-proxy v0.0.0-20240830132753-a7e17fec5ab7 github.com/smartcontractkit/libocr v0.0.0-20241007185508-adbe57025f12 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 - go.opentelemetry.io/otel v1.28.0 + go.opentelemetry.io/otel v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 + go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 - go.opentelemetry.io/otel/log v0.4.0 - go.opentelemetry.io/otel/metric v1.28.0 - go.opentelemetry.io/otel/sdk v1.28.0 - go.opentelemetry.io/otel/sdk/log v0.4.0 - go.opentelemetry.io/otel/sdk/metric v1.28.0 - go.opentelemetry.io/otel/trace v1.28.0 + go.opentelemetry.io/otel/log v0.6.0 + go.opentelemetry.io/otel/metric v1.30.0 + go.opentelemetry.io/otel/sdk v1.30.0 + go.opentelemetry.io/otel/sdk/log v0.6.0 + go.opentelemetry.io/otel/sdk/metric v1.30.0 + go.opentelemetry.io/otel/trace v1.30.0 go.uber.org/goleak v1.3.0 + go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - golang.org/x/crypto v0.27.0 + golang.org/x/crypto v0.28.0 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 - golang.org/x/tools v0.25.0 - gonum.org/v1/gonum v0.15.0 - google.golang.org/grpc v1.65.0 - google.golang.org/protobuf v1.34.2 + golang.org/x/tools v0.26.0 + gonum.org/v1/gonum v0.15.1 + google.golang.org/grpc v1.67.1 + google.golang.org/protobuf v1.35.1 sigs.k8s.io/yaml v1.4.0 ) require ( + github.com/apache/arrow-go/v18 v18.0.0 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/buger/jsonparser v1.1.1 // indirect @@ -72,20 +78,33 @@ require ( github.com/go-logr/stdr v1.2.2 // indirect github.com/go-playground/locales v0.13.0 // indirect github.com/go-playground/universal-translator v0.17.0 // indirect + github.com/goccy/go-json v0.10.3 // indirect github.com/goccy/go-yaml v1.12.0 // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect; indirec + github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect + github.com/google/flatbuffers v24.3.25+incompatible // indirect github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect github.com/hashicorp/yamux v0.1.1 // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgconn v1.14.3 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.3 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/pgtype v1.14.0 // indirect + github.com/klauspost/compress v1.17.11 // indirect + github.com/klauspost/cpuid/v2 v2.2.8 // indirect github.com/leodido/go-urn v1.2.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mr-tron/base58 v1.2.0 // indirect github.com/oklog/run v1.0.0 // indirect + github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.4.1-0.20230718164431-9a2bf3000d16 // indirect @@ -95,16 +114,16 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/x448/float16 v0.8.4 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect + github.com/zeebo/xxh3 v1.0.2 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect - go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.21.0 // indirect - golang.org/x/net v0.29.0 // indirect + golang.org/x/net v0.30.0 // indirect golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.25.0 // indirect - golang.org/x/text v0.18.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index aa0c53162..623f00962 100644 --- a/go.sum +++ b/go.sum @@ -4,12 +4,18 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4 github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.9.4 h1:mnUj0ivWy6UzbB1uLFqKR6F+ZyiDc7j4iGgHTpO+5+I= github.com/Microsoft/hcsshim v0.9.4/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= -github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= -github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/apache/arrow-go/v18 v18.0.0 h1:1dBDaSbH3LtulTyOVYaBCHO3yVRwjV+TZaqn3g6V7ZM= +github.com/apache/arrow-go/v18 v18.0.0/go.mod h1:t6+cWRSmKgdQ6HsxisQjok+jBpKGhRDiqcf3p0p/F+A= +github.com/apache/thrift v0.21.0 h1:tdPmh/ptjE1IJnhbhrcl2++TauVjy242rkV/UzJChnE= +github.com/apache/thrift v0.21.0/go.mod h1:W1H8aR/QRtYNvrPeFXBtobyRkd0/YVhTc6i07XIAgDw= github.com/atombender/go-jsonschema v0.16.1-0.20240916205339-a74cd4e2851c h1:cxQVoh6kY+c4b0HUchHjGWBI8288VhH50qxKG3hdEg0= github.com/atombender/go-jsonschema v0.16.1-0.20240916205339-a74cd4e2851c/go.mod h1:3XzxudkrYVUvbduN/uI2fl4lSrMSzU0+3RCu2mpnfx8= github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= @@ -29,12 +35,17 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/confluentinc/confluent-kafka-go/v2 v2.3.0 h1:icCHutJouWlQREayFwCc7lxDAhws08td+W3/gdqgZts= github.com/confluentinc/confluent-kafka-go/v2 v2.3.0/go.mod h1:/VTy8iEpe6mD9pkCH5BhijlUl8ulUXymKv1Qig5Rgb8= github.com/containerd/cgroups v1.0.4 h1:jN/mbWBEaz+T1pi5OFtnkQ+8qnmEbAr1Oo1FRm5B0dA= github.com/containerd/cgroups v1.0.4/go.mod h1:nLNQtsF7Sl2HxNebu77i1R0oDlhiTG+kO4JTrUzo6IA= github.com/containerd/containerd v1.6.8 h1:h4dOFDwzHmqFEP754PgfgTeVXFnLiRc6kiqC7tplDJs= github.com/containerd/containerd v1.6.8/go.mod h1:By6p5KqPK0/7/CgO/A6t/Gz+CUYUu2zf1hUaaymVXB0= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -61,6 +72,8 @@ github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADi github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= github.com/go-json-experiment/json v0.0.0-20231102232822-2e55bd4e08b0 h1:ymLjT4f35nQbASLnvxEde4XOBL+Sn7rFuV+FOJqkljg= github.com/go-json-experiment/json v0.0.0-20231102232822-2e55bd4e08b0/go.mod h1:6daplAwHHGbUGib4990V3Il26O0OC4aRyvewaaAihaA= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -76,10 +89,15 @@ github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7a github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-yaml v1.12.0 h1:/1WHjnMsI1dlIBQutrvSMGZRQufVO3asrHfTwfACoPM= github.com/goccy/go-yaml v1.12.0/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -103,6 +121,8 @@ github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6 github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/flatbuffers v24.3.25+incompatible h1:CX395cjN9Kke9mmalRoL3d81AtFUxJM+yDthflgJGkI= +github.com/google/flatbuffers v24.3.25+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -112,6 +132,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -125,14 +146,61 @@ github.com/hashicorp/consul/sdk v0.16.0 h1:SE9m0W6DEfgIVCJX7xU+iv/hUl4m/nxqMTnCd github.com/hashicorp/consul/sdk v0.16.0/go.mod h1:7pxqqhqoaPqnBnzXD1StKed62LqJeClzVsUEy85Zr0A= github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= -github.com/hashicorp/go-plugin v1.6.2-0.20240829161738-06afb6d7ae99 h1:OSQYEsRT3tRttZkk6zyC3aAaliwd7Loi/KgXgXxGtwA= -github.com/hashicorp/go-plugin v1.6.2-0.20240829161738-06afb6d7ae99/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= +github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog= +github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q= github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= +github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= +github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= +github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= +github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= @@ -143,12 +211,28 @@ github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFF github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= +github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= +github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/linkedin/goavro/v2 v2.9.7/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= @@ -158,10 +242,16 @@ github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamh github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/marcboeker/go-duckdb v1.8.3 h1:ZkYwiIZhbYsT6MmJsZ3UPTHrTZccDdM4ztoqSlEMXiQ= +github.com/marcboeker/go-duckdb v1.8.3/go.mod h1:C9bYRE1dPYb1hhfu/SSomm78B0FXmNgRvv6YBW/Hooc= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -171,8 +261,14 @@ github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/moby/sys/mount v0.3.3 h1:fX1SVkXFJ47XWDoeFW4Sq7PdQJnV2QIDZAqjNqgEjUs= github.com/moby/sys/mount v0.3.3/go.mod h1:PBaEorSNTLG5t/+4EgukEQVlAvVEc6ZjTySwKdqp5K0= github.com/moby/sys/mountinfo v0.6.2 h1:BzJjoreD5BMFNmD9Rus6gdd1pLuecOFPt8wC+Vygl78= @@ -193,6 +289,9 @@ github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b github.com/opencontainers/runc v1.1.3/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= +github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -210,15 +309,26 @@ github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwa github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY= github.com/riferrei/srclient v0.5.4 h1:dfwyR5u23QF7beuVl2WemUY2KXh5+Sc4DHKyPXBNYuc= github.com/riferrei/srclient v0.5.4/go.mod h1:vbkLmWcgYa7JgfPvuy/+K8fTS0p1bApqadxrxi/S1MI= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= github.com/santhosh-tekuri/jsonschema/v5 v5.2.0 h1:WCcC4vZDS1tYNxjWlwRJZQy28r8CMoggKnxNzxsVDMQ= github.com/santhosh-tekuri/jsonschema/v5 v5.2.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/scylladb/go-reflectx v1.0.1 h1:b917wZM7189pZdlND9PbIJ6NQxfDPfBvUaQ7cjj1iZQ= +github.com/scylladb/go-reflectx v1.0.1/go.mod h1:rWnOfDIRWBGN0miMLIcoPt/Dhi2doCMZqwMCJ3KupFc= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartcontractkit/grpc-proxy v0.0.0-20240830132753-a7e17fec5ab7 h1:12ijqMM9tvYVEm+nR826WsrNi6zCKpwBhuApq127wHs= @@ -226,11 +336,14 @@ github.com/smartcontractkit/grpc-proxy v0.0.0-20240830132753-a7e17fec5ab7/go.mod github.com/smartcontractkit/libocr v0.0.0-20241007185508-adbe57025f12 h1:NzZGjaqez21I3DU7objl3xExTH4fxYvzTqar8DC6360= github.com/smartcontractkit/libocr v0.0.0-20241007185508-adbe57025f12/go.mod h1:fb1ZDVXACvu4frX3APHZaEBp0xi1DIm34DcA0CwTsZM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= @@ -248,59 +361,91 @@ github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/ github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= +github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 h1:vS1Ao/R55RNV4O7TA2Qopok8yN+X0LIP6RVWLFkprck= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0/go.mod h1:BMsdeOxN04K0L5FNUBfjFdvwWGNe/rkmSwH4Aelu/X0= -go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= -go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= +go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 h1:UiRNKd1OgqsLbFwE+wkAWTdiAxXtCBqKIHeBIse4FUA= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9/go.mod h1:eqZlW3pJWhjyexnDPrdQxix1pn0wwhI4AO4GKpP/bMI= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 h1:QSKmLBzbFULSyHzOdO9JsN9lpE4zkrz1byYGmJecdVE= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0/go.mod h1:sTQ/NH8Yrirf0sJ5rWqVu+oT82i4zL9FaF6rWcqnptM= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 h1:U2guen0GhqH8o/G2un8f/aG/y++OuW6MyCo6hT9prXk= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0/go.mod h1:yeGZANgEcpdx/WK0IvvRFC+2oLiMS2u4L/0Rj2M2Qr0= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 h1:VrMAbeJz4gnVDg2zEzjHG4dEH86j4jO6VYB+NgtGD8s= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0/go.mod h1:qqN/uFdpeitTvm+JDqqnjm517pmQRYxTORbETHq5tOc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 h1:lsInsfvhVIfOI6qHVyysXMNDnjO9Npvl7tlDPJFBVd4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0/go.mod h1:KQsVNh4OjgjTG0G6EiNi1jVpnaeeKsKMRwbLN+f1+8M= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6ZXmNPRR8ul6i3WgFURCHzaXjHdm0karRG/+dj3s= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 h1:umZgi92IyxfXd/l4kaDhnKgY8rnN/cZcF1LKc6I8OQ8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0/go.mod h1:4lVs6obhSVRb1EW5FhOuBTyiQhtRtAnnva9vD3yRfq8= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 h1:0MH3f8lZrflbUWXVxyBg/zviDFdGE062uKh5+fu8Vv0= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0/go.mod h1:Vh68vYiHY5mPdekTr0ox0sALsqjoVy0w3Os278yX5SQ= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 h1:BJee2iLkfRfl9lc7aFmBwkWxY/RI1RDdXepSF6y8TPE= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0/go.mod h1:DIzlHs3DRscCIBU3Y9YSzPfScwnYnzfnCd4g8zA7bZc= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 h1:EVSnY9JbEEW92bEkIYOVMw4q1WJxIAGoFTrtYOzWuRQ= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0/go.mod h1:Ea1N1QQryNXpCD0I1fdLibBAIpQuBkznMmkdKrapk1Y= -go.opentelemetry.io/otel/log v0.4.0 h1:/vZ+3Utqh18e8TPjuc3ecg284078KWrR8BRz+PQAj3o= -go.opentelemetry.io/otel/log v0.4.0/go.mod h1:DhGnQvky7pHy82MIRV43iXh3FlKN8UUKftn0KbLOq6I= -go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= -go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/sdk/log v0.4.0 h1:1mMI22L82zLqf6KtkjrRy5BbagOTWdJsqMY/HSqILAA= -go.opentelemetry.io/otel/sdk/log v0.4.0/go.mod h1:AYJ9FVF0hNOgAVzUG/ybg/QttnXhUePWAupmCqtdESo= -go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= -go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= -go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= -go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/otel/log v0.6.0 h1:nH66tr+dmEgW5y+F9LanGJUBYPrRgP4g2EkmPE3LeK8= +go.opentelemetry.io/otel/log v0.6.0/go.mod h1:KdySypjQHhP069JX0z/t26VHwa8vSwzgaKmXtIB3fJM= +go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= +go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= +go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= +go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= +go.opentelemetry.io/otel/sdk/log v0.6.0 h1:4J8BwXY4EeDE9Mowg+CyhWVBhTSLXVXodiXxS/+PGqI= +go.opentelemetry.io/otel/sdk/log v0.6.0/go.mod h1:L1DN8RMAduKkrwRAFDEX3E3TLOq46+XMGSbUfHU/+vE= +go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= +go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= +go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= +go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= -golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= @@ -311,11 +456,13 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= -golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -326,8 +473,14 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -337,56 +490,68 @@ golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210331175145-43e1dd70ce54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= -golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= -golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.25.0 h1:oFU9pkj/iJgs+0DT+VMHrx+oBKs/LJMV+Uvg78sl+fE= -golang.org/x/tools v0.25.0/go.mod h1:/vtpO8WL1N9cQC3FN5zPqb//fRXskFHbLKk4OW1Q7rg= +golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= +golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -gonum.org/v1/gonum v0.15.0 h1:2lYxjRbTYyxkJxlhC+LvJIx3SsANPdRybu1tGj9/OrQ= -gonum.org/v1/gonum v0.15.0/go.mod h1:xzZVBJBtS+Mz4q0Yl2LJTk+OxOg4jiXZ7qBoM0uISGo= +gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0= +gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210401141331-865547bb08e2/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd h1:BBOTEWLuuEGQy9n1y9MhVJ9Qt0BDu21X8qZs71/uPZo= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:fO8wJzT2zbQbAjbIoos1285VfEIYKDDY+Dt+WpTkh6g= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -398,17 +563,21 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/observability-lib/Makefile b/observability-lib/Makefile index ad7795deb..be7def023 100644 --- a/observability-lib/Makefile +++ b/observability-lib/Makefile @@ -12,4 +12,8 @@ lint: .PHONY: test test: - go test -v ./... \ No newline at end of file + go test ./... + +.PHONY: update +update: + go test ./dashboards/... -update=1 diff --git a/observability-lib/README.md b/observability-lib/README.md index 27b0dad0f..8c4e4d249 100644 --- a/observability-lib/README.md +++ b/observability-lib/README.md @@ -15,8 +15,7 @@ The observability-lib is structured as follows: ```shell observability-lib/ api/ # Grafana HTTP API Client to interact with resources - cmd/ # CLI to interact deploy or generateJSON from dashboards defined in folder below - dashboards/ # Dashboards definitions + cmd/ # CLI grafana/ # grafana-foundations-sdk abstraction to manipulate grafana resources ``` @@ -89,43 +88,56 @@ func main() { ``` -More advanced examples can be found in the [dashboards](./dashboards) folder : -- [DON OCR](./dashboards/atlas-don/component.go) -- [Capabilities](./dashboards/capabilities/component.go) -- [Node General](./dashboards/core-node/component.go) -- [Node Components](./dashboards/core-node-components/component.go) -- [Kubernetes Resources](./dashboards/k8s-resources/component.go) -- [NOP OCR Health](./dashboards/nop-ocr/component.go) - ## Cmd Usage -The CLI can be used to : -- Deploy dashboards and alerts to grafana -- Generate JSON from dashboards defined in the `dashboards` folder +CLI to manipulate grafana resources + +### Contact Point -`func NewDashboard(props *Props)` in each [dashboards](./dashboards) packages is called from [cmd](./cmd/builder.go) to deploy or generate JSON from the dashboard. +#### List -Example to deploy a dashboard to grafana instance using URL and token: ```shell -make build -./observability-lib deploy \ - --dashboard-name DashboardName \ - --dashboard-folder FolderName \ - --grafana-url $GRAFANA_URL \ - --grafana-token $GRAFANA_TOKEN \ - --type core-node \ - --platform kubernetes \ - --metrics-datasource Prometheus +./observability-lib api contact-point list \ + --grafana-url http://localhost:3000 \ + --grafana-token ``` -To see how to get a grafana token you can check this [page](https://grafana.com/docs/grafana/latest/administration/service-accounts/) -Example to generate JSON from a dashboard defined in the `dashboards` folder: +#### Delete + ```shell -make build -./observability-lib generate \ - --dashboard-name DashboardName \ - --type core-node-components \ - --platform kubernetes +./observability-lib api contact-point delete \ + --grafana-url http://localhost:3000 \ + --grafana-token +``` + +### Dashboard + +#### Delete + +```shell +./observability-lib api dashboard delete \ + --grafana-url http://localhost:3000 \ + --grafana-token +``` + +### Notification Policy + +#### List + +```shell +./observability-lib api notification-policy list \ + --grafana-url http://localhost:3000 \ + --grafana-token +``` + +#### Delete + +```shell +./observability-lib api notification-policy delete \ + --grafana-url http://localhost:3000 \ + --grafana-token \ + --matchers key,=,value \ + --matchers key2,=,value2 ``` ## Makefile Usage diff --git a/observability-lib/api/contact-point.go b/observability-lib/api/contact-point.go index b31f5bc0a..331a561d6 100644 --- a/observability-lib/api/contact-point.go +++ b/observability-lib/api/contact-point.go @@ -80,7 +80,7 @@ func (c *Client) DeleteContactPoint(uid string) (DeleteContactPointResponse, *re } statusCode := resp.StatusCode() - if statusCode != 204 { + if statusCode != 202 { return DeleteContactPointResponse{}, resp, fmt.Errorf("error deleting contact point, received unexpected status code %d: %s", statusCode, resp.String()) } @@ -95,6 +95,7 @@ func (c *Client) PostContactPoint(contactPoint alerting.ContactPoint) (PostConta resp, err := c.resty.R(). SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). SetBody(contactPoint). SetResult(&grafanaResp). Post("/api/v1/provisioning/contact-points") @@ -119,6 +120,7 @@ func (c *Client) PutContactPoint(uid string, contactPoint alerting.ContactPoint) resp, err := c.resty.R(). SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). SetBody(contactPoint). SetResult(&grafanaResp). Put(fmt.Sprintf("/api/v1/provisioning/contact-points/%s", uid)) diff --git a/observability-lib/api/datasource.go b/observability-lib/api/datasource.go index 1deefebfb..ee67a9be6 100644 --- a/observability-lib/api/datasource.go +++ b/observability-lib/api/datasource.go @@ -27,7 +27,7 @@ func (c *Client) GetDataSourceByName(name string) (*Datasource, *resty.Response, statusCode := resp.StatusCode() if statusCode != 200 { - return nil, resp, fmt.Errorf("error fetching datasource, received unexpected status code %d: %s", statusCode, resp.String()) + return nil, resp, fmt.Errorf("error fetching datasource %s, received unexpected status code %d: %s", name, statusCode, resp.String()) } return &grafanaResp, resp, nil } diff --git a/observability-lib/api/notification-policy.go b/observability-lib/api/notification-policy.go index 3a96f9b17..5812e81d2 100644 --- a/observability-lib/api/notification-policy.go +++ b/observability-lib/api/notification-policy.go @@ -8,31 +8,139 @@ import ( "github.com/grafana/grafana-foundation-sdk/go/alerting" ) -// AddNestedPolicy Add Nested Policy to Notification Policy Tree -func (c *Client) AddNestedPolicy(newNotificationPolicy alerting.NotificationPolicy) error { - notificationPolicyTree, _, err := c.GetNotificationPolicy() - if err != nil { - return err +func objectMatchersEqual(a alerting.ObjectMatchers, b alerting.ObjectMatchers) bool { + if len(a) != len(b) { + return false } - updatedNotificationPolicy := notificationPolicyTree - tagsEqual := false - for key, notificationPolicy := range updatedNotificationPolicy.Routes { - if notificationPolicy.ObjectMatchers != nil { - tagsEqual = reflect.DeepEqual(notificationPolicy.ObjectMatchers, newNotificationPolicy.ObjectMatchers) - if tagsEqual { - updatedNotificationPolicy.Routes[key] = newNotificationPolicy + + for i := range a { + foundMatch := false + for j := range b { + if reflect.DeepEqual(a[i], b[j]) { + foundMatch = true + break } } + if !foundMatch { + return false + } + } + + return true +} + +func PrintPolicyTree(policy alerting.NotificationPolicy, depth int) { + if depth == 0 { + fmt.Printf("| Root Policy | Receiver: %s\n", *policy.Receiver) + } + + for _, notificationPolicy := range policy.Routes { + for i := 0; i < depth; i++ { + fmt.Print("--") + } + fmt.Printf("| Matchers %s | Receiver: %s\n", *notificationPolicy.ObjectMatchers, *notificationPolicy.Receiver) + + if notificationPolicy.Routes != nil { + PrintPolicyTree(notificationPolicy, depth+1) + } + } +} + +func policyExist(parent alerting.NotificationPolicy, newNotificationPolicy alerting.NotificationPolicy) bool { + for _, notificationPolicy := range parent.Routes { + matchersEqual := false + if notificationPolicy.ObjectMatchers != nil { + matchersEqual = objectMatchersEqual(*notificationPolicy.ObjectMatchers, *newNotificationPolicy.ObjectMatchers) + } + receiversEqual := reflect.DeepEqual(notificationPolicy.Receiver, newNotificationPolicy.Receiver) + if matchersEqual && receiversEqual { + return true + } + if notificationPolicy.Routes != nil { + return policyExist(notificationPolicy, newNotificationPolicy) + } + } + return false +} + +func updateInPlace(parent *alerting.NotificationPolicy, newNotificationPolicy alerting.NotificationPolicy) bool { + for key, notificationPolicy := range parent.Routes { + matchersEqual := false + if notificationPolicy.ObjectMatchers != nil { + matchersEqual = objectMatchersEqual(*notificationPolicy.ObjectMatchers, *newNotificationPolicy.ObjectMatchers) + } + receiversEqual := reflect.DeepEqual(notificationPolicy.Receiver, newNotificationPolicy.Receiver) + if matchersEqual && receiversEqual { + parent.Routes[key] = newNotificationPolicy + return true + } + if notificationPolicy.Routes != nil { + return updateInPlace(&parent.Routes[key], newNotificationPolicy) + } } - if !tagsEqual { - updatedNotificationPolicy.Routes = append(updatedNotificationPolicy.Routes, newNotificationPolicy) + return false +} + +func deleteInPlace(parent *alerting.NotificationPolicy, newNotificationPolicy alerting.NotificationPolicy) bool { + for key, notificationPolicy := range parent.Routes { + matchersEqual := false + if notificationPolicy.ObjectMatchers != nil { + matchersEqual = objectMatchersEqual(*notificationPolicy.ObjectMatchers, *newNotificationPolicy.ObjectMatchers) + } + receiversEqual := reflect.DeepEqual(notificationPolicy.Receiver, newNotificationPolicy.Receiver) + if matchersEqual && receiversEqual { + if len(parent.Routes) == 1 { + parent.Routes = nil + return true + } else if len(parent.Routes) > 1 { + parent.Routes = append(parent.Routes[:key], parent.Routes[key+1:]...) + return true + } else { + return false + } + } + if notificationPolicy.Routes != nil { + return deleteInPlace(&parent.Routes[key], newNotificationPolicy) + } } + return false +} - _, _, errPutNotificationPolicy := c.PutNotificationPolicy(alerting.NotificationPolicy(updatedNotificationPolicy)) +// DeleteNestedPolicy Delete Nested Policy from Notification Policy Tree +func (c *Client) DeleteNestedPolicy(newNotificationPolicy alerting.NotificationPolicy) error { + notificationPolicyTreeResponse, _, err := c.GetNotificationPolicy() + if err != nil { + return err + } + notificationPolicyTree := alerting.NotificationPolicy(notificationPolicyTreeResponse) + if !policyExist(notificationPolicyTree, newNotificationPolicy) { + return fmt.Errorf("notification policy not found") + } + deleteInPlace(¬ificationPolicyTree, newNotificationPolicy) + _, _, errPutNotificationPolicy := c.PutNotificationPolicy(notificationPolicyTree) if errPutNotificationPolicy != nil { return errPutNotificationPolicy } + return nil +} +// AddNestedPolicy Add Nested Policy to Notification Policy Tree +func (c *Client) AddNestedPolicy(newNotificationPolicy alerting.NotificationPolicy) error { + notificationPolicyTreeResponse, _, err := c.GetNotificationPolicy() + notificationPolicyTree := alerting.NotificationPolicy(notificationPolicyTreeResponse) + + if err != nil { + return err + } + if !policyExist(notificationPolicyTree, newNotificationPolicy) { + notificationPolicyTree.Routes = append(notificationPolicyTree.Routes, newNotificationPolicy) + } else { + updateInPlace(¬ificationPolicyTree, newNotificationPolicy) + } + _, _, errPutNotificationPolicy := c.PutNotificationPolicy(notificationPolicyTree) + if errPutNotificationPolicy != nil { + return errPutNotificationPolicy + } return nil } diff --git a/observability-lib/api/notification-policy_test.go b/observability-lib/api/notification-policy_test.go new file mode 100644 index 000000000..3891a0458 --- /dev/null +++ b/observability-lib/api/notification-policy_test.go @@ -0,0 +1,265 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/stretchr/testify/require" +) + +func Pointer[T any](d T) *T { + return &d +} + +func TestObjectMatchersEqual(t *testing.T) { + t.Run("returns true if the two object matchers are equal", func(t *testing.T) { + a := alerting.ObjectMatchers{{"team", "=", "chainlink"}} + b := alerting.ObjectMatchers{{"team", "=", "chainlink"}} + + result := objectMatchersEqual(a, b) + require.True(t, result) + }) + + t.Run("returns true if the two object matchers with multiple matches are equal", func(t *testing.T) { + a := alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + {"severity", "=", "critical"}, + } + b := alerting.ObjectMatchers{ + {"severity", "=", "critical"}, + {"team", "=", "chainlink"}, + } + + result := objectMatchersEqual(a, b) + require.True(t, result) + }) + + t.Run("returns false if the two object matchers with multiple matches are different", func(t *testing.T) { + a := alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + {"severity", "=", "critical"}, + } + b := alerting.ObjectMatchers{ + {"severity", "=", "warning"}, + {"team", "=", "chainlink"}, + } + + result := objectMatchersEqual(a, b) + require.False(t, result) + }) +} + +func TestPolicyExists(t *testing.T) { + t.Run("policyExists return true if policy exists", func(t *testing.T) { + notificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + }, + }, + }, + }, + } + + newNotificationPolicy := alerting.NotificationPolicy{ + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + } + result := policyExist(*notificationPolicyTree, newNotificationPolicy) + require.True(t, result) + }) + + t.Run("policyExists return false if policy does not exists", func(t *testing.T) { + notificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + }, + }, + }, + }, + } + + newNotificationPolicy := alerting.NotificationPolicy{ + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"key", "=", "value"}, + }, + } + result := policyExist(*notificationPolicyTree, newNotificationPolicy) + require.False(t, result) + }) + + t.Run("updateInPlace should update notification policy if already exists", func(t *testing.T) { + notificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + }, + }, + }, + }, + } + + newNotificationPolicy := alerting.NotificationPolicy{ + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + Continue: Pointer(true), + } + + expectedNotificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + Continue: Pointer(true), + }, + }, + }, + }, + } + + updateInPlace(notificationPolicyTree, newNotificationPolicy) + require.Equal(t, expectedNotificationPolicyTree, notificationPolicyTree) + }) + + t.Run("deleteInPlace should delete notification policy if exists", func(t *testing.T) { + notificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + }, + { + Receiver: Pointer("slack2"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink2"}, + }, + }, + { + Receiver: Pointer("slack3"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink3"}, + }, + }, + }, + } + + newNotificationPolicy := alerting.NotificationPolicy{ + Receiver: Pointer("slack2"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink2"}, + }, + } + + expectedNotificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + }, + { + Receiver: Pointer("slack3"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink3"}, + }, + }, + }, + } + deleteInPlace(notificationPolicyTree, newNotificationPolicy) + require.Equal(t, expectedNotificationPolicyTree, notificationPolicyTree) + }) + + t.Run("deleteInPlace should delete notification policy if exists", func(t *testing.T) { + notificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + }, + }, + }, + }, + } + + newNotificationPolicy := alerting.NotificationPolicy{ + Receiver: Pointer("pagerduty"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"env", "=", "production"}, + }, + } + + expectedNotificationPolicyTree := &alerting.NotificationPolicy{ + Receiver: Pointer("grafana-default-email"), + Routes: []alerting.NotificationPolicy{ + { + Receiver: Pointer("slack"), + ObjectMatchers: &alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + }, + }, + }, + } + deleteInPlace(notificationPolicyTree, newNotificationPolicy) + require.Equal(t, expectedNotificationPolicyTree, notificationPolicyTree) + }) + +} diff --git a/observability-lib/api/rule-group.go b/observability-lib/api/rule-group.go new file mode 100644 index 000000000..1d4bde5f3 --- /dev/null +++ b/observability-lib/api/rule-group.go @@ -0,0 +1,33 @@ +package api + +import ( + "fmt" + + "github.com/go-resty/resty/v2" + "github.com/grafana/grafana-foundation-sdk/go/alerting" +) + +type UpdateAlertRuleGroupResponse struct{} + +// UpdateAlertRuleGroup Update a specific alert rule group +func (c *Client) UpdateAlertRuleGroup(folderUID string, alertRuleGroup alerting.RuleGroup) (UpdateAlertRuleGroupResponse, *resty.Response, error) { + var grafanaResp UpdateAlertRuleGroupResponse + + resp, err := c.resty.R(). + SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). + SetBody(alertRuleGroup). + SetResult(&grafanaResp). + Put(fmt.Sprintf("/api/v1/provisioning/folder/%s/rule-groups/%s", folderUID, *alertRuleGroup.Title)) + + if err != nil { + return UpdateAlertRuleGroupResponse{}, resp, fmt.Errorf("error making API request: %w", err) + } + + statusCode := resp.StatusCode() + if statusCode != 200 { + return UpdateAlertRuleGroupResponse{}, resp, fmt.Errorf("error updating alert rule group, received unexpected status code %d: %s", statusCode, resp.String()) + } + + return grafanaResp, resp, nil +} diff --git a/observability-lib/api/rule.go b/observability-lib/api/rule.go index e004df6d7..ddbf9e718 100644 --- a/observability-lib/api/rule.go +++ b/observability-lib/api/rule.go @@ -25,6 +25,22 @@ func (c *Client) GetAlertRulesByDashboardUID(dashboardUID string) (GetAllAlertRu return alerts, nil } +// GetAlertRulesByFolderUIDAndGroupName Get alert rules by folder UID and GroupName +func (c *Client) GetAlertRulesByFolderUIDAndGroupName(folderUID string, ruleGroupName string) (GetAllAlertRulesResponse, error) { + var alerts []alerting.Rule + + alertsRule, _, err := c.GetAlertRules() + if err != nil { + return nil, err + } + for _, rule := range alertsRule { + if rule.FolderUID != "" && (rule.FolderUID == folderUID) && (rule.RuleGroup == ruleGroupName) { + alerts = append(alerts, rule) + } + } + return alerts, nil +} + // GetAlertRules Get all alert rules func (c *Client) GetAlertRules() (GetAllAlertRulesResponse, *resty.Response, error) { var grafanaResp GetAllAlertRulesResponse @@ -70,6 +86,31 @@ func (c *Client) PostAlertRule(alertRule alerting.Rule) (PostAlertRuleResponse, return grafanaResp, resp, nil } +type UpdateAlertRuleResponse struct{} + +// UpdateAlertRule Update a specific alert rule by UID +func (c *Client) UpdateAlertRule(uid string, alertRule alerting.Rule) (UpdateAlertRuleResponse, *resty.Response, error) { + var grafanaResp UpdateAlertRuleResponse + + resp, err := c.resty.R(). + SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). + SetBody(alertRule). + SetResult(&grafanaResp). + Put(fmt.Sprintf("/api/v1/provisioning/alert-rules/%s", uid)) + + if err != nil { + return UpdateAlertRuleResponse{}, resp, fmt.Errorf("error making API request: %w", err) + } + + statusCode := resp.StatusCode() + if statusCode != 200 { + return UpdateAlertRuleResponse{}, resp, fmt.Errorf("error updating alert rule, received unexpected status code %d: %s", statusCode, resp.String()) + } + + return grafanaResp, resp, nil +} + type DeleteAlertRuleResponse struct{} // DeleteAlertRule Delete a specific alert rule by UID diff --git a/observability-lib/cmd/api/api.go b/observability-lib/cmd/api/api.go new file mode 100644 index 000000000..d6c407f76 --- /dev/null +++ b/observability-lib/cmd/api/api.go @@ -0,0 +1,31 @@ +package api + +import ( + "github.com/smartcontractkit/chainlink-common/observability-lib/cmd/api/contact_point" + "github.com/smartcontractkit/chainlink-common/observability-lib/cmd/api/dashboard" + "github.com/smartcontractkit/chainlink-common/observability-lib/cmd/api/notification_policy" + "github.com/spf13/cobra" +) + +var Cmd = &cobra.Command{ + Use: "api [resources]", + Short: "Select resources to perform actions", +} + +func init() { + Cmd.AddCommand(contact_point.Cmd) + Cmd.AddCommand(dashboard.Cmd) + Cmd.AddCommand(notification_policy.Cmd) + + Cmd.PersistentFlags().String("grafana-url", "", "Grafana URL") + errURL := Cmd.MarkPersistentFlagRequired("grafana-url") + if errURL != nil { + panic(errURL) + } + + Cmd.PersistentFlags().String("grafana-token", "", "Grafana API token") + errToken := Cmd.MarkPersistentFlagRequired("grafana-token") + if errToken != nil { + panic(errToken) + } +} diff --git a/observability-lib/cmd/api/contact_point/contact-point.go b/observability-lib/cmd/api/contact_point/contact-point.go new file mode 100644 index 000000000..e32f202c6 --- /dev/null +++ b/observability-lib/cmd/api/contact_point/contact-point.go @@ -0,0 +1,14 @@ +package contact_point + +import ( + "github.com/spf13/cobra" +) + +var Cmd = &cobra.Command{ + Use: "contact-point [actions]", + Short: "Perform actions on contact point", +} + +func init() { + Cmd.AddCommand(listCmd, deleteCmd) +} diff --git a/observability-lib/cmd/api/contact_point/delete.go b/observability-lib/cmd/api/contact_point/delete.go new file mode 100644 index 000000000..01af1940e --- /dev/null +++ b/observability-lib/cmd/api/contact_point/delete.go @@ -0,0 +1,35 @@ +package contact_point + +import ( + "errors" + + "github.com/smartcontractkit/chainlink-common/observability-lib/api" + "github.com/spf13/cobra" +) + +var deleteCmd = &cobra.Command{ + Use: "delete [name]", + Short: "Delete contact point by name", + RunE: func(cmd *cobra.Command, args []string) error { + grafanaClient := api.NewClient( + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) + + contactPoint, err := grafanaClient.GetContactPointByName(args[0]) + if err != nil { + return err + } + + if contactPoint == nil { + return errors.New("contact point not found") + } + + _, _, errDelete := grafanaClient.DeleteContactPoint(*contactPoint.Uid) + if errDelete != nil { + return errDelete + } + + return nil + }, +} diff --git a/observability-lib/cmd/api/contact_point/list.go b/observability-lib/cmd/api/contact_point/list.go new file mode 100644 index 000000000..ca62b79f7 --- /dev/null +++ b/observability-lib/cmd/api/contact_point/list.go @@ -0,0 +1,28 @@ +package contact_point + +import ( + "github.com/smartcontractkit/chainlink-common/observability-lib/api" + "github.com/spf13/cobra" +) + +var listCmd = &cobra.Command{ + Use: "list", + Short: "List contact point", + RunE: func(cmd *cobra.Command, args []string) error { + grafanaClient := api.NewClient( + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) + + contactPoints, _, err := grafanaClient.GetContactPoints() + if err != nil { + return err + } + + for _, contactPoint := range contactPoints { + cmd.Printf("| Name: %s | UID: %s\n", *contactPoint.Name, *contactPoint.Uid) + } + + return nil + }, +} diff --git a/observability-lib/cmd/api/dashboard/dashboard.go b/observability-lib/cmd/api/dashboard/dashboard.go new file mode 100644 index 000000000..8c46d0abe --- /dev/null +++ b/observability-lib/cmd/api/dashboard/dashboard.go @@ -0,0 +1,14 @@ +package dashboard + +import ( + "github.com/spf13/cobra" +) + +var Cmd = &cobra.Command{ + Use: "dashboard [actions]", + Short: "Perform actions on dashboard", +} + +func init() { + Cmd.AddCommand(deleteCmd) +} diff --git a/observability-lib/cmd/api/dashboard/delete.go b/observability-lib/cmd/api/dashboard/delete.go new file mode 100644 index 000000000..5b40f41a5 --- /dev/null +++ b/observability-lib/cmd/api/dashboard/delete.go @@ -0,0 +1,35 @@ +package dashboard + +import ( + "errors" + + "github.com/smartcontractkit/chainlink-common/observability-lib/api" + "github.com/spf13/cobra" +) + +var deleteCmd = &cobra.Command{ + Use: "delete [name]", + Short: "Delete dashboard by name", + RunE: func(cmd *cobra.Command, args []string) error { + grafanaClient := api.NewClient( + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) + + delDashboard, _, err := grafanaClient.GetDashboardByName(args[0]) + if err != nil { + return err + } + + if delDashboard.UID == nil { + return errors.New("contact point not found") + } + + _, errDelete := grafanaClient.DeleteDashboardByUID(*delDashboard.UID) + if errDelete != nil { + return errDelete + } + + return nil + }, +} diff --git a/observability-lib/cmd/api/notification_policy/delete.go b/observability-lib/cmd/api/notification_policy/delete.go new file mode 100644 index 000000000..918a1d5dd --- /dev/null +++ b/observability-lib/cmd/api/notification_policy/delete.go @@ -0,0 +1,61 @@ +package notification_policy + +import ( + "errors" + "strings" + + "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/smartcontractkit/chainlink-common/observability-lib/api" + "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" + "github.com/spf13/cobra" +) + +var deleteCmd = &cobra.Command{ + Use: "delete [receiver]", + Short: "Delete notification policy", + RunE: func(cmd *cobra.Command, args []string) error { + grafanaClient := api.NewClient( + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) + + if len(args) != 1 { + return errors.New("receiver argument missing") + } + + matchers, err := cmd.Flags().GetStringArray("matchers") + if err != nil { + return err + } + if matchers != nil && len(matchers) > 0 { + objectMatchers := alerting.ObjectMatchers{} + notificationPolicy := alerting.NotificationPolicy{ + Receiver: grafana.Pointer(args[0]), + } + for _, matcher := range matchers { + objectMatcher := strings.Split(matcher, ",") + if len(objectMatcher) != 3 { + return errors.New("invalid matcher format must be key,operator,value") + } + + objectMatchers = append(objectMatchers, objectMatcher) + } + notificationPolicy.ObjectMatchers = &objectMatchers + errDelete := grafanaClient.DeleteNestedPolicy(notificationPolicy) + + if errDelete != nil { + return errDelete + } + } + + return nil + }, +} + +func init() { + deleteCmd.Flags().StringArray("matchers", []string{}, "Object matchers, in the form of key,operator,value e.g. 'key,=,value'") + errMatchers := deleteCmd.MarkFlagRequired("matchers") + if errMatchers != nil { + panic(errMatchers) + } +} diff --git a/observability-lib/cmd/api/notification_policy/list.go b/observability-lib/cmd/api/notification_policy/list.go new file mode 100644 index 000000000..d20971561 --- /dev/null +++ b/observability-lib/cmd/api/notification_policy/list.go @@ -0,0 +1,26 @@ +package notification_policy + +import ( + "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/smartcontractkit/chainlink-common/observability-lib/api" + "github.com/spf13/cobra" +) + +var listCmd = &cobra.Command{ + Use: "list", + Short: "List notification policy", + RunE: func(cmd *cobra.Command, args []string) error { + grafanaClient := api.NewClient( + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) + + notificationPolicyTree, _, err := grafanaClient.GetNotificationPolicy() + if err != nil { + return err + } + + api.PrintPolicyTree(alerting.NotificationPolicy(notificationPolicyTree), 0) + return nil + }, +} diff --git a/observability-lib/cmd/api/notification_policy/notification-policy.go b/observability-lib/cmd/api/notification_policy/notification-policy.go new file mode 100644 index 000000000..8361c3a2a --- /dev/null +++ b/observability-lib/cmd/api/notification_policy/notification-policy.go @@ -0,0 +1,14 @@ +package notification_policy + +import ( + "github.com/spf13/cobra" +) + +var Cmd = &cobra.Command{ + Use: "notification-policy [actions]", + Short: "Perform actions on notification policy", +} + +func init() { + Cmd.AddCommand(listCmd, deleteCmd) +} diff --git a/observability-lib/cmd/builder.go b/observability-lib/cmd/builder.go deleted file mode 100644 index fdd849075..000000000 --- a/observability-lib/cmd/builder.go +++ /dev/null @@ -1,114 +0,0 @@ -package cmd - -import ( - "errors" - - atlasdon "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/atlas-don" - "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/capabilities" - corenode "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/core-node" - corenodecomponents "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/core-node-components" - k8sresources "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/k8s-resources" - nopocr "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/nop-ocr" - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type TypeDashboard string - -const ( - TypeDashboardCoreNode TypeDashboard = "core-node" - TypeDashboardCoreNodeComponents TypeDashboard = "core-node-components" - TypeDashboardCoreNodeResources TypeDashboard = "core-node-resources" - TypeDashboardDONOCR TypeDashboard = "don-ocr" - TypeDashboardDONOCR2 TypeDashboard = "don-ocr2" - TypeDashboardDONOCR3 TypeDashboard = "don-ocr3" - TypeDashboardNOPOCR2 TypeDashboard = "nop-ocr2" - TypeDashboardNOPOCR3 TypeDashboard = "nop-ocr3" - TypeDashboardCapabilities TypeDashboard = "capabilities" -) - -type OCRVersion string - -const ( - OCRVersionOCR OCRVersion = "ocr" - OCRVersionOCR2 OCRVersion = "ocr2" - OCRVersionOCR3 OCRVersion = "ocr3" -) - -type BuildOptions struct { - Name string - Platform grafana.TypePlatform - TypeDashboard TypeDashboard - MetricsDataSource *grafana.DataSource - LogsDataSource *grafana.DataSource - SlackChannel string - SlackWebhookURL string - AlertsTags map[string]string - AlertsFilters string -} - -func BuildDashboardWithType(options *BuildOptions) (*grafana.Dashboard, error) { - switch options.TypeDashboard { - case TypeDashboardCoreNode: - return corenode.NewDashboard(&corenode.Props{ - Name: options.Name, - Platform: options.Platform, - MetricsDataSource: options.MetricsDataSource, - SlackChannel: options.SlackChannel, - SlackWebhookURL: options.SlackWebhookURL, - AlertsTags: options.AlertsTags, - AlertsFilters: options.AlertsFilters, - }) - case TypeDashboardCoreNodeComponents: - return corenodecomponents.NewDashboard(&corenodecomponents.Props{ - Name: options.Name, - Platform: options.Platform, - MetricsDataSource: options.MetricsDataSource, - LogsDataSource: options.LogsDataSource, - }) - case TypeDashboardCoreNodeResources: - if options.Platform != grafana.TypePlatformKubernetes { - return nil, errors.New("core-node-resources dashboard is only available for kubernetes") - } - return k8sresources.NewDashboard(&k8sresources.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - }) - case TypeDashboardDONOCR: - return atlasdon.NewDashboard(&atlasdon.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - OCRVersion: string(OCRVersionOCR), - }) - case TypeDashboardDONOCR2: - return atlasdon.NewDashboard(&atlasdon.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - OCRVersion: string(OCRVersionOCR2), - }) - case TypeDashboardDONOCR3: - return atlasdon.NewDashboard(&atlasdon.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - OCRVersion: string(OCRVersionOCR3), - }) - case TypeDashboardNOPOCR2: - return nopocr.NewDashboard(&nopocr.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - OCRVersion: string(OCRVersionOCR2), - }) - case TypeDashboardNOPOCR3: - return nopocr.NewDashboard(&nopocr.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - OCRVersion: string(OCRVersionOCR3), - }) - case TypeDashboardCapabilities: - return capabilities.NewDashboard(&capabilities.Props{ - Name: options.Name, - MetricsDataSource: options.MetricsDataSource, - }) - default: - return nil, errors.New("invalid dashboard type") - } -} diff --git a/observability-lib/cmd/delete.go b/observability-lib/cmd/delete.go deleted file mode 100644 index f60783c5e..000000000 --- a/observability-lib/cmd/delete.go +++ /dev/null @@ -1,36 +0,0 @@ -package cmd - -import ( - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - "github.com/spf13/cobra" -) - -var DeleteCmd = &cobra.Command{ - Use: "delete", - Short: "Delete Grafana Dashboard", - RunE: func(cmd *cobra.Command, args []string) error { - return grafana.DeleteDashboard(&grafana.DeleteOptions{ - GrafanaURL: cmd.Flag("grafana-url").Value.String(), - GrafanaToken: cmd.Flag("grafana-token").Value.String(), - Name: cmd.Flag("dashboard-name").Value.String(), - }) - }, -} - -func init() { - DeleteCmd.Flags().String("dashboard-name", "", "Name of the dashboard to deploy") - errName := DeleteCmd.MarkFlagRequired("dashboard-name") - if errName != nil { - panic(errName) - } - DeleteCmd.Flags().String("grafana-url", "", "Grafana URL") - errURL := DeleteCmd.MarkFlagRequired("grafana-url") - if errURL != nil { - panic(errURL) - } - DeleteCmd.Flags().String("grafana-token", "", "Grafana API token") - errToken := DeleteCmd.MarkFlagRequired("grafana-token") - if errToken != nil { - panic(errToken) - } -} diff --git a/observability-lib/cmd/deploy.go b/observability-lib/cmd/deploy.go deleted file mode 100644 index daa63fd3f..000000000 --- a/observability-lib/cmd/deploy.go +++ /dev/null @@ -1,105 +0,0 @@ -package cmd - -import ( - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - "github.com/spf13/cobra" -) - -var DeployCmd = &cobra.Command{ - Use: "deploy", - Short: "Deploy Grafana dashboard and associated alerts", - RunE: func(cmd *cobra.Command, args []string) error { - alertsTags, errAlertsTags := cmd.Flags().GetStringToString("alerts-tags") - if errAlertsTags != nil { - return errAlertsTags - } - - metricsDataSource, errMetricsDataSource := grafana.GetDataSourceFromGrafana( - cmd.Flag("metrics-datasource").Value.String(), - cmd.Flag("grafana-url").Value.String(), - cmd.Flag("grafana-token").Value.String(), - ) - - if errMetricsDataSource != nil { - return errMetricsDataSource - } - - var logsDataSource *grafana.DataSource - if cmd.Flag("logs-datasource").Value.String() != "" { - var errLogsDataSource error - logsDataSource, errLogsDataSource = grafana.GetDataSourceFromGrafana( - cmd.Flag("logs-datasource").Value.String(), - cmd.Flag("grafana-url").Value.String(), - cmd.Flag("grafana-token").Value.String(), - ) - - if errLogsDataSource != nil { - return errLogsDataSource - } - } - - dashboard, err := BuildDashboardWithType(&BuildOptions{ - Name: cmd.Flag("dashboard-name").Value.String(), - Platform: grafana.TypePlatform(cmd.Flag("platform").Value.String()), - TypeDashboard: TypeDashboard(cmd.Flag("type").Value.String()), - MetricsDataSource: metricsDataSource, - LogsDataSource: logsDataSource, - SlackChannel: cmd.Flag("slack-channel").Value.String(), - SlackWebhookURL: cmd.Flag("slack-webhook").Value.String(), - AlertsTags: alertsTags, - AlertsFilters: cmd.Flag("alerts-filters").Value.String(), - }) - if err != nil { - return err - } - - errDeploy := dashboard.DeployToGrafana(&grafana.DeployOptions{ - GrafanaURL: cmd.Flag("grafana-url").Value.String(), - GrafanaToken: cmd.Flag("grafana-token").Value.String(), - FolderName: cmd.Flag("dashboard-folder").Value.String(), - EnableAlerts: cmd.Flag("enable-alerts").Value.String() == "true", - NotificationTemplates: cmd.Flag("notification-templates").Value.String(), - }) - if errDeploy != nil { - return errDeploy - } - - return nil - }, -} - -func init() { - DeployCmd.Flags().String("dashboard-name", "", "Name of the dashboard to deploy") - errName := DeployCmd.MarkFlagRequired("dashboard-name") - if errName != nil { - panic(errName) - } - DeployCmd.Flags().String("dashboard-folder", "", "Dashboard folder") - errFolder := DeployCmd.MarkFlagRequired("dashboard-folder") - if errFolder != nil { - panic(errFolder) - } - DeployCmd.Flags().String("grafana-url", "", "Grafana URL") - errURL := DeployCmd.MarkFlagRequired("grafana-url") - if errURL != nil { - panic(errURL) - } - DeployCmd.Flags().String("grafana-token", "", "Grafana API token") - errToken := DeployCmd.MarkFlagRequired("grafana-token") - if errToken != nil { - panic(errToken) - } - DeployCmd.Flags().String("metrics-datasource", "Prometheus", "Metrics datasource name") - DeployCmd.Flags().String("logs-datasource", "", "Logs datasource name") - DeployCmd.Flags().String("platform", "docker", "Platform where the dashboard is deployed (docker or kubernetes)") - DeployCmd.Flags().String("type", "core-node", "Dashboard type can be either core-node | core-node-components | core-node-resources | don-ocr | don-ocr2 | don-ocr3 | nop-ocr2 | nop-ocr3") - DeployCmd.Flags().Bool("enable-alerts", false, "Deploy alerts") - DeployCmd.Flags().StringToString("alerts-tags", map[string]string{ - "team": "chainlink-team", - }, "Alerts tags") - DeployCmd.Flags().String("notification-templates", "", "Filepath in yaml format, will create notification templates depending on key-value pairs in the yaml file") - DeployCmd.Flags().String("slack-channel", "", "Slack channel, required when setting up slack contact points") - DeployCmd.Flags().String("slack-webhook", "", "Slack webhook URL, required when setting up slack contact points") - DeployCmd.Flags().String("slack-token", "", "Slack token, required when setting up slack contact points and slack webhook is not provided") - DeployCmd.Flags().String("alerts-filters", "", "Alerts Filters applied to the queries") -} diff --git a/observability-lib/cmd/generate.go b/observability-lib/cmd/generate.go deleted file mode 100644 index 821c3ee90..000000000 --- a/observability-lib/cmd/generate.go +++ /dev/null @@ -1,46 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - "github.com/spf13/cobra" -) - -var GenerateCmd = &cobra.Command{ - Use: "generate", - Short: "Generate Grafana Dashboard JSON", - RunE: func(cmd *cobra.Command, args []string) error { - dashboard, err := BuildDashboardWithType(&BuildOptions{ - Name: cmd.Flag("dashboard-name").Value.String(), - Platform: grafana.TypePlatform(cmd.Flag("platform").Value.String()), - TypeDashboard: TypeDashboard(cmd.Flag("type").Value.String()), - MetricsDataSource: grafana.NewDataSource(cmd.Flag("metrics-datasource").Value.String(), ""), - LogsDataSource: grafana.NewDataSource(cmd.Flag("logs-datasource").Value.String(), ""), - }) - if err != nil { - return err - } - - dashboardJSON, errDashboardJSON := dashboard.GenerateJSON() - if errDashboardJSON != nil { - return errDashboardJSON - } - - fmt.Print(string(dashboardJSON)) - - return nil - }, -} - -func init() { - GenerateCmd.Flags().String("dashboard-name", "", "Name of the dashboard to deploy") - errName := GenerateCmd.MarkFlagRequired("dashboard-name") - if errName != nil { - panic(errName) - } - GenerateCmd.Flags().String("metrics-datasource", "Prometheus", "Metrics datasource name") - GenerateCmd.Flags().String("logs-datasource", "", "Logs datasource name") - GenerateCmd.Flags().String("platform", "docker", "Platform where the dashboard is deployed (docker or kubernetes)") - GenerateCmd.Flags().String("type", "core-node", "Dashboard type can be either core-node | core-node-components | core-node-resources | don-ocr | don-ocr2 | don-ocr3") -} diff --git a/observability-lib/cmd/log.go b/observability-lib/cmd/log.go deleted file mode 100644 index 8e643f1f9..000000000 --- a/observability-lib/cmd/log.go +++ /dev/null @@ -1,28 +0,0 @@ -package cmd - -import ( - "os" - - "github.com/rs/zerolog" - "github.com/rs/zerolog/log" -) - -const ( - LogLevelEnvVar = "DASHBOARD_LOG_LEVEL" -) - -var ( - Logger zerolog.Logger -) - -func init() { - lvlStr := os.Getenv(LogLevelEnvVar) - if lvlStr == "" { - lvlStr = "info" - } - lvl, err := zerolog.ParseLevel(lvlStr) - if err != nil { - panic(err) - } - Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).Level(lvl) -} diff --git a/observability-lib/cmd/notification-templates.yaml b/observability-lib/cmd/notification-templates.yaml deleted file mode 100644 index f28351cb1..000000000 --- a/observability-lib/cmd/notification-templates.yaml +++ /dev/null @@ -1,56 +0,0 @@ -slack: |- - {{ define "slack.chainlink.text" }} - {{- $root := . -}} - {{ range .Alerts }} - {{ template "slack.print_alert" . }} - {{ end }} - {{ end }} - - {{ define "slack.print_alert" }} - *Summary:* ```{{ .Annotations.summary }}``` - {{ if gt (len .Annotations.description) 0 }}*Description:* ```{{ .Annotations.description }}```{{ end }} - *Labels:* ```{{- range .Labels.SortedPairs }} - {{- if and (ne .Name "alertname") (ne .Name "grafana_folder") (ne .Name "severity") }} - • {{ .Name }}: {{ .Value }} - {{- end }} - {{- end }}``` - {{- if gt (len .GeneratorURL ) 0 }} - <{{ .GeneratorURL }}|:grafana: Grafana Alert URL> - {{- end }} - {{- if gt (len .DashboardURL ) 0 }} - <{{ .DashboardURL }}|:desktop_computer: Dashboard URL> - {{- end }} - {{- if gt (len .PanelURL ) 0 }} - <{{ .PanelURL }}|:bar_chart: Panel URL> - {{- end }} - {{- if gt (len .SilenceURL ) 0 }} - <{{ .SilenceURL }}|:no_bell: Silence alert> - {{- end }} - {{- if gt (len .Annotations.runbook_url ) 0 }} - <{{ .Annotations.runbook_url }}|:spiral_note_pad: Runbook> - {{- end }} - {{ end }} - - {{ define "slack.chainlink.color" }} - {{- if ne .Status "firing" -}} - good - {{- else if eq .CommonLabels.severity "critical" -}} - danger - {{- else if eq .CommonLabels.severity "warning" -}} - warning - {{- end -}} - {{ end }} - - {{ define "alert_severity_prefix_emoji" -}} - {{- if ne .Status "firing" -}} - :white_check_mark: - {{- else if eq .CommonLabels.severity "critical" -}} - :red_circle: - {{- else if eq .CommonLabels.severity "warning" -}} - :warning: - {{- end -}} - {{- end -}} - - {{ define "slack.chainlink.title" }} - {{ template "alert_severity_prefix_emoji" . }} [{{- if gt (len .Alerts.Resolved) 0}}{{ .Status | toUpper }}{{- else }}{{ .CommonLabels.severity | toUpper }}{{- end }}:{{ .Alerts | len }}] {{ .CommonLabels.alertname }} - {{ end }} \ No newline at end of file diff --git a/observability-lib/cmd/root.go b/observability-lib/cmd/root.go new file mode 100644 index 000000000..df7e375f2 --- /dev/null +++ b/observability-lib/cmd/root.go @@ -0,0 +1,23 @@ +package cmd + +import ( + "log" + + "github.com/smartcontractkit/chainlink-common/observability-lib/cmd/api" + "github.com/spf13/cobra" +) + +var rootCmd = &cobra.Command{ + Use: "observability-lib [command]", + Short: "observability-lib CLI to perform actions on observability resources", +} + +func init() { + rootCmd.AddCommand(api.Cmd) +} + +func Execute() { + if err := rootCmd.Execute(); err != nil { + log.Fatalln(err) + } +} diff --git a/observability-lib/dashboards/atlas-don/component.go b/observability-lib/dashboards/atlas-don/component.go deleted file mode 100644 index 5cb9f4e4f..000000000 --- a/observability-lib/dashboards/atlas-don/component.go +++ /dev/null @@ -1,787 +0,0 @@ -package atlasdon - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/common" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - if props.OCRVersion == "" { - return nil, fmt.Errorf("OCRVersion is required") - } - - if props.MetricsDataSource == nil { - return nil, fmt.Errorf("MetricsDataSource is required") - } else { - if props.MetricsDataSource.Name == "" { - return nil, fmt.Errorf("MetricsDataSource.Name is required") - } - if props.MetricsDataSource.UID == "" { - return nil, fmt.Errorf("MetricsDataSource.UID is required") - } - } - - props.platformOpts = platformPanelOpts(props.OCRVersion) - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"DON", props.OCRVersion}, - Refresh: "30s", - TimeFrom: "now-30m", - TimeTo: "now", - }) - - builder.AddVars(vars(props)...) - - builder.AddRow("Summary") - builder.AddPanel(summary(props)...) - - builder.AddRow("OCR Contract Oracle") - builder.AddPanel(ocrContractConfigOracle(props)...) - - builder.AddRow("DON Nodes") - builder.AddPanel(ocrContractConfigNodes(props)...) - - builder.AddRow("Price Reporting") - builder.AddPanel(priceReporting(props)...) - - builder.AddRow("Round / Epoch Progression") - builder.AddPanel(roundEpochProgression(props)...) - - builder.AddRow("OCR Contract Config Delta") - builder.AddPanel(ocrContractConfigDelta(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Job", - Name: "job", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{namespace` + p.platformOpts.LabelFilters["namespace"] + `}, job)`, - })) - - variableFeedID := "feed_id" - if p.OCRVersion == "ocr3" { - variableFeedID = "feed_id_name" - } - - variableQueryContract := grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Contract", - Name: "contract", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(` + p.OCRVersion + `_contract_config_f{job="$job"}, contract)`, - }) - - variableQueryFeedID := grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Feed ID", - Name: variableFeedID, - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(` + p.OCRVersion + `_contract_config_f{job="$job", contract="$contract"}, ` + variableFeedID + `)`, - Multi: true, - }) - - variables = append(variables, variableQueryContract) - - switch p.OCRVersion { - case "ocr2": - variables = append(variables, variableQueryFeedID) - case "ocr3": - variables = append(variables, variableQueryFeedID) - } - - return variables -} - -func summary(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Telemetry Down", - Description: "Which jobs are not receiving any telemetry?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_telemetry_down{` + p.platformOpts.LabelQuery + `} == 1`, - Legend: "{{job}} | {{report_type}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Oracle Down", - Description: "Which NOPs are not providing any telemetry?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_oracle_telemetry_down_except_telemetry_down{job=~"${job}", oracle!="csa_unknown"} == 1`, - Legend: "{{oracle}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Feeds reporting failure", - Description: "Which feeds are failing to report?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_feed_reporting_failure_except_feed_telemetry_down{job=~"${job}", oracle!="csa_unknown"} == 1`, - Legend: "{{feed_id_name}} on {{job}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Feed telemetry Down", - Description: "Which feeds are not receiving any telemetry?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_feed_telemetry_down_except_telemetry_down{job=~"${job}"} == 1`, - Legend: "{{feed_id_name}} on {{job}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Oracles no observations", - Description: "Which NOPs are not providing observations?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_oracle_blind_except_telemetry_down{job=~"${job}"} == 1`, - Legend: "{{oracle}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Oracles not contributing observations to feeds", - Description: "Which oracles are failing to make observations on feeds they should be participating in?", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `bool:` + p.OCRVersion + `_oracle_feed_no_observations_except_oracle_blind_except_feed_reporting_failure_except_feed_telemetry_down{job=~"${job}"} == 1`, - Legend: "{{oracle}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "green"}, - {Value: grafana.Pointer[float64](0.99), Color: "red"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - return panels -} - -func ocrContractConfigOracle(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "OCR Contract Oracle Active", - Description: "set to one as long as an oracle is on a feed", - Span: 24, - Height: 8, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(` + p.OCRVersion + `_contract_oracle_active{` + p.platformOpts.LabelQuery + `}) by (contract, oracle)`, - Legend: "{{oracle}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - return panels -} - -func ocrContractConfigNodes(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - var variableFeedID string - switch p.OCRVersion { - case "ocr": - variableFeedID = "contract" - case "ocr2": - variableFeedID = "feed_id" - case "ocr3": - variableFeedID = "feed_id_name" - } - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Number of NOPs", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_n{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + variableFeedID + `}}`, - }, - { - Expr: `` + p.OCRVersion + `_contract_config_r_max{` + p.platformOpts.LabelQuery + `}`, - Legend: `Max nodes`, - }, - { - Expr: `avg(2 * ` + p.OCRVersion + `_contract_config_f{` + p.platformOpts.LabelQuery + `} + 1)`, - Legend: `Min nodes`, - }, - }, - Min: grafana.Pointer[float64](0), - }, - })) - - return panels -} - -func priceReporting(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - telemetryP2PReceivedTotal := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "P2P messages received", - Description: "From an individual node's perspective, how many messages are they receiving from other nodes? Uses ocr_telemetry_p2p_received_total", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum by (sender, receiver) (increase(` + p.OCRVersion + `_telemetry_p2p_received_total{job=~"${job}"}[5m]))`, - Legend: `{{sender}} > {{receiver}}`, - }, - }, - }, - }) - - telemetryP2PReceivedTotalRate := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "P2P messages received Rate", - Description: "From an individual node's perspective, how many messages are they receiving from other nodes? Uses ocr_telemetry_p2p_received_total", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum by (sender, receiver) (rate(` + p.OCRVersion + `_telemetry_p2p_received_total{job=~"${job}"}[5m]))`, - Legend: `{{sender}} > {{receiver}}`, - }, - }, - }, - }) - - telemetryObservationAsk := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Ask observation in MessageObserve sent", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_observation_ask{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryObservation := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Price observation in MessageObserve sent", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_observation{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryObservationBid := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Bid observation in MessageObserve sent", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_observation_bid{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryMessageProposeObservationAsk := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Ask MessagePropose observations", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_message_propose_observation_ask{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryMessageProposeObservation := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Price MessagePropose observations", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_message_propose_observation{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryMessageProposeObservationBid := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Bid MessagePropose observations", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_message_propose_observation_bid{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryMessageProposeObservationTotal := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Total number of observations included in MessagePropose", - Description: "How often is a node's observation included in the report?", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_message_propose_observation_total{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - telemetryMessageObserveTotal := grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Total MessageObserve sent", - Description: "From an individual node's perspective, how often are they sending an observation?", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `rate(` + p.OCRVersion + `_telemetry_message_observe_total{` + p.platformOpts.LabelQuery + `}[5m])`, - Legend: `{{oracle}}`, - }, - }, - }, - }) - - switch p.OCRVersion { - case "ocr": - panels = append(panels, telemetryP2PReceivedTotal) - panels = append(panels, telemetryP2PReceivedTotalRate) - panels = append(panels, telemetryObservation) - panels = append(panels, telemetryMessageObserveTotal) - case "ocr2": - panels = append(panels, telemetryP2PReceivedTotal) - panels = append(panels, telemetryP2PReceivedTotalRate) - panels = append(panels, telemetryObservation) - panels = append(panels, telemetryMessageObserveTotal) - case "ocr3": - panels = append(panels, telemetryP2PReceivedTotal) - panels = append(panels, telemetryP2PReceivedTotalRate) - panels = append(panels, telemetryObservationAsk) - panels = append(panels, telemetryObservation) - panels = append(panels, telemetryObservationBid) - panels = append(panels, telemetryMessageProposeObservationAsk) - panels = append(panels, telemetryMessageProposeObservation) - panels = append(panels, telemetryMessageProposeObservationBid) - panels = append(panels, telemetryMessageProposeObservationTotal) - panels = append(panels, telemetryMessageObserveTotal) - } - - return panels -} - -func roundEpochProgression(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - var variableFeedID string - switch p.OCRVersion { - case "ocr": - variableFeedID = "contract" - case "ocr2": - variableFeedID = "feed_id" - case "ocr3": - variableFeedID = "feed_id_name" - } - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Agreed Epoch Progression", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_feed_agreed_epoch{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + variableFeedID + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Round Epoch Progression", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_telemetry_epoch_round{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{oracle}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Rounds Started", - Description: `Tracks individual nodes firing "new round" message via telemetry (not part of P2P messages)`, - Span: 12, - Height: 6, - Decimals: 1, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `rate(` + p.OCRVersion + `_telemetry_round_started_total{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{oracle}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Telemetry Ingested", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `rate(` + p.OCRVersion + `_telemetry_ingested_total{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{oracle}}`, - }, - }, - }, - })) - - return panels -} - -func ocrContractConfigDelta(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Relative Deviation Threshold", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_alpha{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Max Contract Value Age Seconds", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_c_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Observation Grace Period Seconds", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_grace_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Bad Epoch Timeout Seconds", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_progress_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Resend Interval Seconds", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_resend_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Round Interval Seconds", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_round_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Transmission Stage Timeout Second", - Span: 8, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `` + p.OCRVersion + `_contract_config_delta_stage_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{contract}}", - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - return panels -} diff --git a/observability-lib/dashboards/atlas-don/component_test.go b/observability-lib/dashboards/atlas-don/component_test.go deleted file mode 100644 index 919c01bc6..000000000 --- a/observability-lib/dashboards/atlas-don/component_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package atlasdon_test - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - atlasdon "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/atlas-don" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := atlasdon.NewDashboard(&atlasdon.Props{ - Name: "DON OCR Dashboard", - MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), - OCRVersion: "ocr2", - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "DON OCR Dashboard", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/atlas-don/platform.go b/observability-lib/dashboards/atlas-don/platform.go deleted file mode 100644 index 8ac1bdbd2..000000000 --- a/observability-lib/dashboards/atlas-don/platform.go +++ /dev/null @@ -1,56 +0,0 @@ -package atlasdon - -import ( - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type platformOpts struct { - LabelFilters map[string]string - LabelFilter string - LegendString string - LabelQuery string -} - -type Props struct { - Name string // Name is the name of the dashboard - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics - OCRVersion string // OCRVersion is the version of the OCR (ocr, ocr2, ocr3) - platformOpts platformOpts -} - -// PlatformPanelOpts generate different queries depending on params -func platformPanelOpts(ocrVersion string) platformOpts { - po := platformOpts{ - LabelFilters: map[string]string{ - "contract": `=~"${contract}"`, - }, - } - - variableFeedID := "feed_id" - if ocrVersion == "ocr3" { - variableFeedID = "feed_id_name" - } - - switch ocrVersion { - case "ocr2": - po.LabelFilters[variableFeedID] = `=~"${` + variableFeedID + `}"` - case "ocr3": - po.LabelFilters[variableFeedID] = `=~"${` + variableFeedID + `}"` - } - namespace := "otpe" - if ocrVersion == "ocr2" { - namespace = "otpe2" - } else if ocrVersion == "ocr3" { - namespace = "otpe3" - } - - po.LabelFilters["namespace"] = `="` + namespace + `"` - po.LabelFilters["job"] = `=~"${job}"` - po.LabelFilter = "job" - po.LegendString = "job" - - for key, value := range po.LabelFilters { - po.LabelQuery += key + value + ", " - } - return po -} diff --git a/observability-lib/dashboards/atlas-don/test-output.json b/observability-lib/dashboards/atlas-don/test-output.json deleted file mode 100644 index 3f28f5859..000000000 --- a/observability-lib/dashboards/atlas-don/test-output.json +++ /dev/null @@ -1,1484 +0,0 @@ -{ - "Dashboard": { - "title": "DON OCR Dashboard", - "tags": [ - "DON", - "ocr2" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-30m", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "row", - "collapsed": false, - "title": "Summary", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 0, - "targets": [ - { - "expr": "bool:ocr2_telemetry_down{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", } == 1", - "format": "", - "legendFormat": "{{job}} | {{report_type}}", - "refId": "" - } - ], - "title": "Telemetry Down", - "description": "Which jobs are not receiving any telemetry?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 1, - "targets": [ - { - "expr": "bool:ocr2_oracle_telemetry_down_except_telemetry_down{job=~\"${job}\", oracle!=\"csa_unknown\"} == 1", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Oracle Down", - "description": "Which NOPs are not providing any telemetry?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 8, - "y": 1 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 2, - "targets": [ - { - "expr": "bool:ocr2_feed_reporting_failure_except_feed_telemetry_down{job=~\"${job}\", oracle!=\"csa_unknown\"} == 1", - "format": "", - "legendFormat": "{{feed_id_name}} on {{job}}", - "refId": "" - } - ], - "title": "Feeds reporting failure", - "description": "Which feeds are failing to report?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 16, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 3, - "targets": [ - { - "expr": "bool:ocr2_feed_telemetry_down_except_telemetry_down{job=~\"${job}\"} == 1", - "format": "", - "legendFormat": "{{feed_id_name}} on {{job}}", - "refId": "" - } - ], - "title": "Feed telemetry Down", - "description": "Which feeds are not receiving any telemetry?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 5 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 4, - "targets": [ - { - "expr": "bool:ocr2_oracle_blind_except_telemetry_down{job=~\"${job}\"} == 1", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Oracles no observations", - "description": "Which NOPs are not providing observations?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 8, - "y": 5 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 5, - "targets": [ - { - "expr": "bool:ocr2_oracle_feed_no_observations_except_oracle_blind_except_feed_reporting_failure_except_feed_telemetry_down{job=~\"${job}\"} == 1", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Oracles not contributing observations to feeds", - "description": "Which oracles are failing to make observations on feeds they should be participating in?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 16, - "y": 5 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "green" - }, - { - "value": 0.99, - "color": "red" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "OCR Contract Oracle", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 9 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 6, - "targets": [ - { - "expr": "sum(ocr2_contract_oracle_active{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }) by (contract, oracle)", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "OCR Contract Oracle Active", - "description": "set to one as long as an oracle is on a feed", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 8, - "w": 24, - "x": 0, - "y": 10 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "DON Nodes", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 18 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 7, - "targets": [ - { - "expr": "ocr2_contract_config_n{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{feed_id}}", - "refId": "" - }, - { - "expr": "ocr2_contract_config_r_max{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "Max nodes", - "refId": "" - }, - { - "expr": "avg(2 * ocr2_contract_config_f{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", } + 1)", - "format": "", - "legendFormat": "Min nodes", - "refId": "" - } - ], - "title": "Number of NOPs", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 19 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "min": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Price Reporting", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 25 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 8, - "targets": [ - { - "expr": "sum by (sender, receiver) (increase(ocr2_telemetry_p2p_received_total{job=~\"${job}\"}[5m]))", - "format": "", - "legendFormat": "{{sender}} \u003e {{receiver}}", - "refId": "" - } - ], - "title": "P2P messages received", - "description": "From an individual node's perspective, how many messages are they receiving from other nodes? Uses ocr_telemetry_p2p_received_total", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 26 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 9, - "targets": [ - { - "expr": "sum by (sender, receiver) (rate(ocr2_telemetry_p2p_received_total{job=~\"${job}\"}[5m]))", - "format": "", - "legendFormat": "{{sender}} \u003e {{receiver}}", - "refId": "" - } - ], - "title": "P2P messages received Rate", - "description": "From an individual node's perspective, how many messages are they receiving from other nodes? Uses ocr_telemetry_p2p_received_total", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 32 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 10, - "targets": [ - { - "expr": "ocr2_telemetry_observation{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Price observation in MessageObserve sent", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 38 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 11, - "targets": [ - { - "expr": "rate(ocr2_telemetry_message_observe_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[5m])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Total MessageObserve sent", - "description": "From an individual node's perspective, how often are they sending an observation?", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 44 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Round / Epoch Progression", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 50 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 12, - "targets": [ - { - "expr": "ocr2_telemetry_feed_agreed_epoch{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{feed_id}}", - "refId": "" - } - ], - "title": "Agreed Epoch Progression", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 51 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 13, - "targets": [ - { - "expr": "ocr2_telemetry_epoch_round{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Round Epoch Progression", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 51 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 14, - "targets": [ - { - "expr": "rate(ocr2_telemetry_round_started_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[1m])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Rounds Started", - "description": "Tracks individual nodes firing \"new round\" message via telemetry (not part of P2P messages)", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 57 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 15, - "targets": [ - { - "expr": "rate(ocr2_telemetry_ingested_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[1m])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Telemetry Ingested", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 57 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "OCR Contract Config Delta", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 63 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 16, - "targets": [ - { - "expr": "ocr2_contract_config_alpha{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Relative Deviation Threshold", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 64 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 17, - "targets": [ - { - "expr": "ocr2_contract_config_delta_c_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Max Contract Value Age Seconds", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 8, - "y": 64 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 18, - "targets": [ - { - "expr": "ocr2_contract_config_delta_grace_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Observation Grace Period Seconds", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 16, - "y": 64 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 19, - "targets": [ - { - "expr": "ocr2_contract_config_delta_progress_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Bad Epoch Timeout Seconds", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 68 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 20, - "targets": [ - { - "expr": "ocr2_contract_config_delta_resend_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Resend Interval Seconds", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 8, - "y": 68 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 21, - "targets": [ - { - "expr": "ocr2_contract_config_delta_round_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Round Interval Seconds", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 16, - "y": 68 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 22, - "targets": [ - { - "expr": "ocr2_contract_config_delta_stage_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Transmission Stage Timeout Second", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 72 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "query", - "name": "job", - "label": "Job", - "query": "label_values(up{namespace=\"otpe2\"}, job)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "contract", - "label": "Contract", - "query": "label_values(ocr2_contract_config_f{job=\"$job\"}, contract)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "feed_id", - "label": "Feed ID", - "query": "label_values(ocr2_contract_config_f{job=\"$job\", contract=\"$contract\"}, feed_id)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": true, - "sort": 1 - } - ] - }, - "annotations": {} - }, - "Alerts": null, - "ContactPoints": null, - "NotificationPolicies": null -} \ No newline at end of file diff --git a/observability-lib/dashboards/capabilities/component.go b/observability-lib/dashboards/capabilities/component.go deleted file mode 100644 index 27c035c97..000000000 --- a/observability-lib/dashboards/capabilities/component.go +++ /dev/null @@ -1,219 +0,0 @@ -package capabilities - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type Props struct { - Name string // Name is the name of the dashboard - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics -} - -// NewDashboard creates a Capabilities dashboard -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - if props.MetricsDataSource == nil { - return nil, fmt.Errorf("MetricsDataSource is required") - } else { - if props.MetricsDataSource.Name == "" { - return nil, fmt.Errorf("MetricsDataSource.Name is required") - } - } - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"Capabilities"}, - Refresh: "30s", - TimeFrom: "now-7d", - TimeTo: "now", - }) - - builder.AddVars(vars(props)...) - - builder.AddRow("Common indicators for capabilities") - builder.AddPanel(capabilitiesCommon(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Environment", - Name: "env", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up, env)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Cluster", - Name: "cluster", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env"}, cluster)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Namespace", - Name: "namespace", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster"}, namespace)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Job", - Name: "job", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace"}, job)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Pod", - Name: "pod", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Capability", - Name: "capability", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, - Multi: false, - })) - - return variables -} - -func capabilitiesCommon(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Execution Time", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_execution_time_ms`, - Legend: "{{capability}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Runs Count", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_runs_count`, - Legend: "{{capability}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Runs Fault Count", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_runs_fault_count`, - Legend: "{{capability}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Runs Invalid Count", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_runs_invalid_count`, - Legend: "{{capability}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Runs Unauthorized Count", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_runs_unauthorized_count`, - Legend: "{{capability}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Runs No Resource Count", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `capability_runs_no_resource_count`, - Legend: "{{capability}}", - }, - }, - }, - })) - - return panels -} diff --git a/observability-lib/dashboards/capabilities/component_test.go b/observability-lib/dashboards/capabilities/component_test.go deleted file mode 100644 index 48cd0003d..000000000 --- a/observability-lib/dashboards/capabilities/component_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package capabilities_test - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/capabilities" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := capabilities.NewDashboard(&capabilities.Props{ - Name: "Capabilities Dashboard", - MetricsDataSource: grafana.NewDataSource("Prometheus", ""), - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "Capabilities Dashboard", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/capabilities/test-output.json b/observability-lib/dashboards/capabilities/test-output.json deleted file mode 100644 index 192b6fe67..000000000 --- a/observability-lib/dashboards/capabilities/test-output.json +++ /dev/null @@ -1,460 +0,0 @@ -{ - "Dashboard": { - "title": "Capabilities Dashboard", - "tags": [ - "Capabilities" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-7d", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "row", - "collapsed": false, - "title": "Common indicators for capabilities", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 0, - "targets": [ - { - "expr": "capability_execution_time_ms", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Execution Time", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 0, - "y": 1 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 1, - "targets": [ - { - "expr": "capability_runs_count", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Runs Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 6, - "y": 1 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 2, - "targets": [ - { - "expr": "capability_runs_fault_count", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Runs Fault Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 12, - "y": 1 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 3, - "targets": [ - { - "expr": "capability_runs_invalid_count", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Runs Invalid Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 18, - "y": 1 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 4, - "targets": [ - { - "expr": "capability_runs_unauthorized_count", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Runs Unauthorized Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 0, - "y": 5 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 5, - "targets": [ - { - "expr": "capability_runs_no_resource_count", - "format": "", - "legendFormat": "{{capability}}", - "refId": "" - } - ], - "title": "Runs No Resource Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 6, - "y": 5 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "query", - "name": "env", - "label": "Environment", - "query": "label_values(up, env)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "cluster", - "label": "Cluster", - "query": "label_values(up{env=\"$env\"}, cluster)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "namespace", - "label": "Namespace", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, namespace)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "job", - "label": "Job", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\"}, job)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "pod", - "label": "Pod", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "capability", - "label": "Capability", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - } - ] - }, - "annotations": {} - }, - "Alerts": null, - "ContactPoints": null, - "NotificationPolicies": null -} \ No newline at end of file diff --git a/observability-lib/dashboards/core-node-components/component.go b/observability-lib/dashboards/core-node-components/component.go deleted file mode 100644 index 5e2d824b7..000000000 --- a/observability-lib/dashboards/core-node-components/component.go +++ /dev/null @@ -1,207 +0,0 @@ -package corenodecomponents - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/common" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - props.platformOpts = platformPanelOpts() - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"Core", "Node", "Components"}, - Refresh: "30s", - TimeFrom: "now-30m", - TimeTo: "now", - }) - - builder.AddVars(vars(props)...) - builder.AddPanel(panelsGeneralInfo(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - variables = append(variables, grafana.NewIntervalVariable(&grafana.IntervalVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Interval", - Name: "interval", - }, - Interval: "30s,1m,5m,15m,30m,1h,6h,12h", - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Environment", - Name: "env", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up, env)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Cluster", - Name: "cluster", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env"}, cluster)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Blockchain", - Name: "blockchain", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster"}, blockchain)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Product", - Name: "product", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", blockchain="$blockchain"}, product)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Network Type", - Name: "network_type", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", blockchain="$blockchain", product="$product"}, network_type)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Component", - Name: "component", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", blockchain="$blockchain", network_type="$network_type"}, component)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Service", - Name: "service", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", blockchain="$blockchain", network_type="$network_type", component="$component"}, service)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Service ID", - Name: "service_id", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(health{cluster="$cluster", blockchain="$blockchain", network_type="$network_type", component="$component", service="$service"}, service_id)`, - Multi: true, - IncludeAll: true, - })) - - return variables -} - -func panelsGeneralInfo(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Components Health Avg by Service", - Span: 24, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * avg(avg_over_time(health{` + p.platformOpts.LabelQuery + `service_id=~"${service_id}"}[$interval])) by (service_id, version, service, cluster, env)`, - Legend: "{{service_id}}", - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - }, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationVertical, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Components Health by Service", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * (health{` + p.platformOpts.LabelQuery + `service_id=~"${service_id}"})`, - Legend: "{{service_id}}", - }, - }, - Min: grafana.Pointer[float64](0), - Max: grafana.Pointer[float64](100), - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Components Health Avg by Service", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * (avg(avg_over_time(health{` + p.platformOpts.LabelQuery + `service_id=~"${service_id}"}[$interval])) by (service_id, version, service, cluster, env))`, - Legend: "{{service_id}}", - }, - }, - Min: grafana.Pointer[float64](0), - Max: grafana.Pointer[float64](100), - }, - })) - - panels = append(panels, grafana.NewLogPanel(&grafana.LogPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.LogsDataSource.Name, - Title: "Logs with severity >= error", - Span: 24, - Height: 6, - Query: []grafana.Query{ - { - Expr: `{env="${env}", cluster="${cluster}", product="${product}", network_type="${network_type}", instance=~"${service}"} | json | level=~"(error|panic|fatal|crit)"`, - Legend: "", - }, - }, - }, - })) - - return panels -} diff --git a/observability-lib/dashboards/core-node-components/component_test.go b/observability-lib/dashboards/core-node-components/component_test.go deleted file mode 100644 index 8d581da90..000000000 --- a/observability-lib/dashboards/core-node-components/component_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package corenodecomponents_test - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - corenodecomponents "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/core-node-components" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := corenodecomponents.NewDashboard(&corenodecomponents.Props{ - Name: "Core Node Components Dashboard", - MetricsDataSource: grafana.NewDataSource("Prometheus", ""), - LogsDataSource: grafana.NewDataSource("Loki", ""), - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "Core Node Components Dashboard", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/core-node-components/platform.go b/observability-lib/dashboards/core-node-components/platform.go deleted file mode 100644 index 6605e40a2..000000000 --- a/observability-lib/dashboards/core-node-components/platform.go +++ /dev/null @@ -1,39 +0,0 @@ -package corenodecomponents - -import "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - -type platformOpts struct { - // Platform is infrastructure deployment platform: docker or k8s - Platform string - LabelFilters map[string]string - LabelFilter string - LegendString string - LabelQuery string -} - -type Props struct { - Name string // Name is the name of the dashboard - Platform grafana.TypePlatform // Platform is infrastructure deployment platform: docker or k8s - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics - LogsDataSource *grafana.DataSource // LogsDataSource is the datasource for querying logs - platformOpts platformOpts -} - -// PlatformPanelOpts generate different queries for "docker" and "k8s" deployment platforms -func platformPanelOpts() platformOpts { - po := platformOpts{ - LabelFilters: map[string]string{ - "env": `=~"${env}"`, - "cluster": `=~"${cluster}"`, - "blockchain": `=~"${blockchain}"`, - "product": `=~"${product}"`, - "network_type": `=~"${network_type}"`, - "component": `=~"${component}"`, - "service": `=~"${service}"`, - }, - } - for key, value := range po.LabelFilters { - po.LabelQuery += key + value + ", " - } - return po -} diff --git a/observability-lib/dashboards/core-node-components/test-output.json b/observability-lib/dashboards/core-node-components/test-output.json deleted file mode 100644 index f722b9335..000000000 --- a/observability-lib/dashboards/core-node-components/test-output.json +++ /dev/null @@ -1,410 +0,0 @@ -{ - "Dashboard": { - "title": "Core Node Components Dashboard", - "tags": [ - "Core", - "Node", - "Components" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-30m", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "stat", - "id": 0, - "targets": [ - { - "expr": "100 * avg(avg_over_time(health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env)", - "format": "", - "legendFormat": "{{service_id}}", - "refId": "" - } - ], - "title": "Components Health Avg by Service", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 24, - "x": 0, - "y": 0 - }, - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "vertical" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 80, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 1, - "targets": [ - { - "expr": "100 * (health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"})", - "format": "", - "legendFormat": "{{service_id}}", - "refId": "" - } - ], - "title": "Components Health by Service", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 4 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "min": 0, - "max": 100, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 2, - "targets": [ - { - "expr": "100 * (avg(avg_over_time(health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env))", - "format": "", - "legendFormat": "{{service_id}}", - "refId": "" - } - ], - "title": "Components Health Avg by Service", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 10 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "min": 0, - "max": 100, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "logs", - "id": 3, - "targets": [ - { - "expr": "{env=\"${env}\", cluster=\"${cluster}\", product=\"${product}\", network_type=\"${network_type}\", instance=~\"${service}\"} | json | level=~\"(error|panic|fatal|crit)\"", - "format": "", - "legendFormat": "", - "refId": "" - } - ], - "title": "Logs with severity \u003e= error", - "description": "", - "transparent": false, - "datasource": { - "uid": "Loki" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 16 - }, - "fieldConfig": { - "defaults": { - "noValue": "No data" - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "interval", - "name": "interval", - "label": "Interval", - "query": "30s,1m,5m,15m,30m,1h,6h,12h", - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - } - }, - { - "type": "query", - "name": "env", - "label": "Environment", - "query": "label_values(up, env)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "cluster", - "label": "Cluster", - "query": "label_values(up{env=\"$env\"}, cluster)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "blockchain", - "label": "Blockchain", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, blockchain)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "product", - "label": "Product", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\"}, product)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "network_type", - "label": "Network Type", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", product=\"$product\"}, network_type)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "component", - "label": "Component", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\"}, component)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "service", - "label": "Service", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\", component=\"$component\"}, service)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "service_id", - "label": "Service ID", - "query": "label_values(health{cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\", component=\"$component\", service=\"$service\"}, service_id)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": true, - "sort": 1, - "includeAll": true - } - ] - }, - "annotations": {} - }, - "Alerts": null, - "ContactPoints": null, - "NotificationPolicies": null -} \ No newline at end of file diff --git a/observability-lib/dashboards/core-node/component.go b/observability-lib/dashboards/core-node/component.go deleted file mode 100644 index ec3392201..000000000 --- a/observability-lib/dashboards/core-node/component.go +++ /dev/null @@ -1,2160 +0,0 @@ -package corenode - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/alerting" - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/common" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - "github.com/grafana/grafana-foundation-sdk/go/expr" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -// NewDashboard creates a DON dashboard for the given OCR version -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - if props.Platform == "" { - return nil, fmt.Errorf("Platform is required") - } - - if props.MetricsDataSource == nil { - return nil, fmt.Errorf("MetricsDataSource is required") - } else { - if props.MetricsDataSource.Name == "" { - return nil, fmt.Errorf("MetricsDataSource.Name is required") - } - if props.MetricsDataSource.UID == "" { - return nil, fmt.Errorf("MetricsDataSource.UID is required") - } - } - - props.platformOpts = platformPanelOpts(props.Platform) - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"Core", "Node"}, - Refresh: "30s", - TimeFrom: "now-30m", - TimeTo: "now", - AlertsTags: props.AlertsTags, - }) - - if props.SlackChannel != "" && props.SlackWebhookURL != "" { - builder.AddContactPoint(grafana.NewContactPoint(&grafana.ContactPointOptions{ - Name: "chainlink-slack", - Type: "slack", - Settings: map[string]interface{}{ - "url": props.SlackWebhookURL, - "recipient": props.SlackChannel, - "username": "Chainlink Alerts", - "title": `{{ template "slack.chainlink.title" . }}`, - "text": `{{ template "slack.chainlink.text" . }}`, - "color": `{{ template "slack.chainlink.color" . }}`, - }, - })) - } - - notificationPolicyOptions := &grafana.NotificationPolicyOptions{ - Receiver: "chainlink-slack", - GroupBy: []string{"grafana_folder", "alertname"}, - } - for name, value := range props.AlertsTags { - notificationPolicyOptions.ObjectMatchers = append(notificationPolicyOptions.ObjectMatchers, alerting.ObjectMatcher{name, "=", value}) - } - - builder.AddNotificationPolicy(grafana.NewNotificationPolicy(notificationPolicyOptions)) - - builder.AddVars(vars(props)...) - - builder.AddRow("Headlines") - builder.AddPanel(headlines(props)...) - - builder.AddRow("AppDBConnections") - builder.AddPanel(appDBConnections(props)...) - - builder.AddRow("SQLQueries") - builder.AddPanel(sqlQueries(props)...) - - builder.AddRow("HeadTracker") - builder.AddPanel(headTracker(props)...) - - builder.AddRow("HeadReporter") - builder.AddPanel(headReporter(props)...) - - builder.AddRow("TxManager") - builder.AddPanel(txManager(props)...) - - builder.AddRow("LogPoller") - builder.AddPanel(logPoller(props)...) - - builder.AddRow("Feeds Jobs") - builder.AddPanel(feedsJobs(props)...) - - builder.AddRow("Mailbox") - builder.AddPanel(mailbox(props)...) - - builder.AddRow("Logs Counters") - builder.AddPanel(logsCounters(props)...) - - builder.AddRow("Logs Rate") - builder.AddPanel(logsRate(props)...) - - builder.AddRow("EvmPoolLifecycle") - builder.AddPanel(evmPoolLifecycle(props)...) - - builder.AddRow("Node RPC State") - builder.AddPanel(nodesRPC(props)...) - - builder.AddRow("EVM Pool RPC Node Metrics (App)") - builder.AddPanel(evmNodeRPC(props)...) - - builder.AddRow("EVM Pool RPC Node Latencies (App)") - builder.AddPanel(evmPoolRPCNodeLatencies(props)...) - - builder.AddRow("Block History Estimator") - builder.AddPanel(evmBlockHistoryEstimator(props)...) - - builder.AddRow("Pipeline Metrics (Runner)") - builder.AddPanel(pipelines(props)...) - - builder.AddRow("HTTP API") - builder.AddPanel(httpAPI(props)...) - - builder.AddRow("PromHTTP") - builder.AddPanel(promHTTP(props)...) - - builder.AddRow("Go Metrics") - builder.AddPanel(goMetrics(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - if p.platformOpts.Platform == "kubernetes" { - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Environment", - Name: "env", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up, env)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Cluster", - Name: "cluster", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env"}, cluster)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Namespace", - Name: "namespace", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster"}, namespace)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Blockchain", - Name: "blockchain", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace"}, blockchain)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Product", - Name: "product", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain"}, product)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Network Type", - Name: "network_type", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product"}, network_type)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Job", - Name: "job", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product", network_type="$network_type"}, job)`, - Multi: true, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Pod", - Name: "pod", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, - Multi: true, - IncludeAll: true, - })) - } else { - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Instance", - Name: "instance", - }, - Datasource: p.MetricsDataSource.Name, - Query: fmt.Sprintf("label_values(%s)", p.platformOpts.LabelFilter), - Multi: true, - IncludeAll: true, - })) - } - - return variables -} - -func headlines(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "App Version", - Description: "app version with commit and branch links", - Span: 12, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `version{` + p.platformOpts.LabelQuery + `}`, - Legend: "Version: {{version}} https://github.com/smartcontractkit/chainlink/commit/{{commit}} https://github.com/smartcontractkit/chainlink/tree/release/{{version}}", - Instant: true, - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - TextMode: common.BigValueTextModeName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Uptime", - Description: "instance uptime", - Span: 12, - Height: 4, - Decimals: 2, - Unit: "s", - Query: []grafana.Query{ - { - Expr: `uptime_seconds{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "ETH Balance Summary", - Span: 12, - Height: 4, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `sum(eth_balance{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, account)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, - Instant: true, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - }, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Solana Balance Summary", - Span: 12, - Height: 4, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `sum(solana_balance{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, account)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, - Instant: true, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - }, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Health Avg by Service over 15m", - Span: 16, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * (avg(avg_over_time(health{` + p.platformOpts.LabelQuery + `}[15m])) by (` + p.platformOpts.LabelFilter + `, service_id, version, service, cluster, env))`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{service_id}}`, - }, - }, - Min: grafana.Pointer[float64](0), - Max: grafana.Pointer[float64](100), - AlertOptions: &grafana.AlertOptions{ - Summary: `Uptime less than 90% over last 15 minutes on one component in a Node`, - Description: `Component {{ index $labels "service_id" }} uptime in the last 15m is {{ index $values "A" }}%`, - RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - For: "15m", - Tags: map[string]string{ - "severity": "warning", - }, - Query: []grafana.RuleQuery{ - { - Expr: `health{` + p.AlertsFilters + `}`, - RefID: "A", - Datasource: p.MetricsDataSource.UID, - }, - }, - QueryRefCondition: "D", - Condition: []grafana.ConditionQuery{ - { - RefID: "B", - ReduceExpression: &grafana.ReduceExpression{ - Expression: "A", - Reducer: expr.TypeReduceReducerMean, - }, - }, - { - RefID: "C", - MathExpression: &grafana.MathExpression{ - Expression: "$B * 100", - }, - }, - { - RefID: "D", - ThresholdExpression: &grafana.ThresholdExpression{ - Expression: "C", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{90, 0}, - Type: expr.TypeThresholdTypeLt, - }, - }, - }, - }, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Health Avg by Service over 15m with health < 90%", - Description: "Only displays services with health average < 90%", - Span: 8, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * avg(avg_over_time(health{` + p.platformOpts.LabelQuery + `}[15m])) by (` + p.platformOpts.LabelFilter + `, service_id, version, service, cluster, env) < 90`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{service_id}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "green"}, - {Value: grafana.Pointer[float64](1), Color: "red"}, - {Value: grafana.Pointer[float64](80), Color: "orange"}, - {Value: grafana.Pointer[float64](99), Color: "green"}, - }, - }, - NoValue: "All services healthy", - }, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "ETH Balance", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `sum(eth_balance{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, account)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, - }, - }, - AlertOptions: &grafana.AlertOptions{ - Summary: `ETH Balance is lower than threshold`, - Description: `ETH Balance critically low at {{ index $values "A" }} on {{ index $labels "` + p.platformOpts.LabelFilter + `" }}`, - RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - For: "15m", - NoDataState: alerting.RuleNoDataStateOK, - Tags: map[string]string{ - "severity": "critical", - }, - Query: []grafana.RuleQuery{ - { - Expr: `eth_balance{` + p.AlertsFilters + `}`, - Instant: true, - RefID: "A", - Datasource: p.MetricsDataSource.UID, - }, - }, - QueryRefCondition: "B", - Condition: []grafana.ConditionQuery{ - { - RefID: "B", - ThresholdExpression: &grafana.ThresholdExpression{ - Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, - }, - }, - }, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "SOL Balance", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `sum(solana_balance{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, account)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, - }, - }, - AlertOptions: &grafana.AlertOptions{ - Summary: `Solana Balance is lower than threshold`, - Description: `Solana Balance critically low at {{ index $values "A" }} on {{ index $labels "` + p.platformOpts.LabelFilter + `" }}`, - RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - For: "15m", - NoDataState: alerting.RuleNoDataStateOK, - Tags: map[string]string{ - "severity": "critical", - }, - Query: []grafana.RuleQuery{ - { - Expr: `solana_balance{` + p.AlertsFilters + `}`, - Instant: true, - RefID: "A", - Datasource: p.MetricsDataSource.UID, - }, - }, - QueryRefCondition: "B", - Condition: []grafana.ConditionQuery{ - { - RefID: "B", - ThresholdExpression: &grafana.ThresholdExpression{ - Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, - }, - }, - }, - }, - }, - }, - })) - - if p.platformOpts.Platform == "kubernetes" { - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Utilisation (from requests)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_requests{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: `{{pod}}`, - Instant: true, - }, - }, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Utilisation (from limits)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_limits{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: `{{pod}}`, - Instant: true, - }, - }, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Utilisation (from requests)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(container_memory_working_set_bytes{job="kubelet", metrics_path="/metrics/cadvisor", cluster="$cluster", namespace="$namespace", pod="$pod", image!=""}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_requests{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: `{{pod}}`, - Instant: true, - }, - }, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Utilisation (from limits)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(container_memory_working_set_bytes{job="kubelet", metrics_path="/metrics/cadvisor", cluster="$cluster", namespace="$namespace", pod="$pod", container!="", image!=""}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_limits{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: `{{pod}}`, - Instant: true, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Usage", - Span: 12, - Height: 8, - Decimals: 3, - Query: []grafana.Query{ - { - Expr: `sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{pod=~"$pod", namespace=~"${namespace}"}) by (pod)`, - Legend: "{{pod}}", - }, - { - Expr: `sum(kube_pod_container_resource_requests{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="cpu"})`, - Legend: "Requests", - }, - { - Expr: `sum(kube_pod_container_resource_limits{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="cpu"})`, - Legend: "Limits", - }, - }, - }, - ScaleDistribution: common.ScaleDistributionLog, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Usage", - Span: 12, - Height: 8, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `sum(container_memory_rss{pod=~"$pod", namespace=~"${namespace}", container!=""}) by (pod)`, - Legend: "{{pod}}", - }, - { - Expr: `sum(kube_pod_container_resource_requests{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="memory"})`, - Legend: "Requests", - }, - { - Expr: `sum(kube_pod_container_resource_limits{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="memory"})`, - Legend: "Limits", - }, - }, - }, - ScaleDistribution: common.ScaleDistributionLog, - })) - } - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Open File Descriptors", - Span: 6, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `process_open_fds{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - GraphMode: common.BigValueGraphModeArea, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Go Version", - Span: 4, - Height: 4, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `go_info{` + p.platformOpts.LabelQuery + `}`, - Legend: "{{exported_version}}", - Instant: true, - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - TextMode: common.BigValueTextModeName, - })) - - return panels -} - -func appDBConnections(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "DB Connections", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "Conn", - Query: []grafana.Query{ - { - Expr: `sum(db_conns_max{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - Max`, - }, - { - Expr: `sum(db_conns_open{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - Open`, - }, - { - Expr: `sum(db_conns_used{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - Used`, - }, - { - Expr: `sum(db_conns_wait{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - Wait`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "DB Wait Count", - Span: 12, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(db_wait_count{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "DB Wait Time", - Span: 12, - Height: 6, - Unit: "Sec", - Query: []grafana.Query{ - { - Expr: `sum(db_wait_time_seconds{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - return panels -} - -func sqlQueries(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "SQL Query Timeout Percent", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(0.9, sum(rate(sql_query_timeout_percent_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le))`, - Legend: "p90", - }, - { - Expr: `histogram_quantile(0.95, sum(rate(sql_query_timeout_percent_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le))`, - Legend: "p95", - }, - { - Expr: `histogram_quantile(0.99, sum(rate(sql_query_timeout_percent_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le))`, - Legend: "p99", - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - return panels -} - -func headTracker(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Head Tracker Current Head", - Span: 18, - Height: 6, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `sum(head_tracker_current_head{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Head Tracker Current Head Summary", - Span: 6, - Height: 6, - Query: []grafana.Query{ - { - Expr: `head_tracker_current_head{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - Instant: true, - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Head Tracker Heads Received Rate", - Span: 24, - Height: 6, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `rate(head_tracker_heads_received{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - AlertOptions: &grafana.AlertOptions{ - Summary: `No Headers Received`, - Description: `{{ index $labels "` + p.platformOpts.LabelFilter + `" }} on ChainID {{ index $labels "ChainID" }} has received {{ index $values "A" }} heads over 10 minutes.`, - RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - For: "10m", - NoDataState: alerting.RuleNoDataStateOK, - Tags: map[string]string{ - "severity": "critical", - }, - Query: []grafana.RuleQuery{ - { - Expr: `increase(head_tracker_heads_received{` + p.AlertsFilters + `}[10m])`, - Instant: true, - RefID: "A", - Datasource: p.MetricsDataSource.UID, - }, - }, - QueryRefCondition: "B", - Condition: []grafana.ConditionQuery{ - { - RefID: "B", - ThresholdExpression: &grafana.ThresholdExpression{ - Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, - }, - }, - }, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Head Tracker Very Old Head", - Span: 12, - Height: 6, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `head_tracker_very_old_head{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Head Tracker Connection Errors Rate", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `rate(head_tracker_connection_errors{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} - -func headReporter(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Unconfirmed Transactions", - Span: 8, - Height: 6, - Unit: "Tx", - Query: []grafana.Query{ - { - Expr: `sum(unconfirmed_transactions{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Unconfirmed TX Age", - Span: 8, - Height: 6, - Unit: "s", - Query: []grafana.Query{ - { - Expr: `sum(max_unconfirmed_tx_age{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Unconfirmed TX Blocks", - Span: 8, - Height: 6, - Unit: "Blocks", - Query: []grafana.Query{ - { - Expr: `sum(max_unconfirmed_blocks{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} - -func txManager(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - txStatus := map[string]string{ - "num_confirmed_transactions": "Confirmed", - "num_successful_transactions": "Successful", - "num_tx_reverted": "Reverted", - "num_gas_bumps": "Gas Bumps", - "fwd_tx_count": "Forwarded", - "tx_attempt_count": "Attempts", - "gas_bump_exceeds_limit": "Gas Bump Exceeds Limit", - } - - for status, title := range txStatus { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "TX Manager " + title, - Span: 6, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(tx_manager_` + status + `{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, - }, - }, - }, - })) - } - - txUntilStatus := map[string]string{ - "broadcast": "The amount of time elapsed from when a transaction is enqueued to until it is broadcast", - "confirmed": "The amount of time elapsed from a transaction being broadcast to being included in a block", - } - - for status, description := range txUntilStatus { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "TX Manager Time Until " + status, - Description: description, - Span: 6, - Height: 6, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_` + status + `_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le, ` + p.platformOpts.LabelFilter + `, blockchain, chainID)) / 1e6`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, - }, - }, - }, - })) - } - - return panels -} - -func logPoller(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Goroutines per ChainId", - Description: "goroutines per chainId", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `count(log_poller_query_duration_sum{` + p.platformOpts.LabelQuery + `}) by (evmChainID)`, - Legend: "chainId: {{evmChainID}}", - }, - }, - }, - ColorMode: common.BigValueColorModeValue, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "RPS", - Description: "requests per second", - Span: 12, - Height: 6, - Decimals: 2, - Unit: "reqps", - Query: []grafana.Query{ - { - Expr: `avg by (query, ` + p.platformOpts.LabelFilter + `) (sum by (query, job) (rate(log_poller_query_duration_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])))`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{query}}`, - }, - { - Expr: `avg (sum by(` + p.platformOpts.LabelFilter + `) (rate(log_poller_query_duration_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])))`, - Legend: "Total", - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "RPS by Type", - Span: 12, - Height: 6, - Decimals: 2, - Unit: "reqps", - Query: []grafana.Query{ - { - Expr: `avg by (` + p.platformOpts.LabelFilter + `, type) (sum by (type, ` + p.platformOpts.LabelFilter + `) (rate(log_poller_query_duration_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])))`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{type}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Avg number of logs returned", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `avg by (` + p.platformOpts.LabelFilter + `, query) (log_poller_query_dataset_size{` + p.platformOpts.LabelQuery + `})`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{query}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Max number of logs returned", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `max by (` + p.platformOpts.LabelFilter + `, query) (log_poller_query_dataset_size{` + p.platformOpts.LabelQuery + `})`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{query}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Logs returned by chain", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `max by (evmChainID) (log_poller_query_dataset_size{` + p.platformOpts.LabelQuery + `})`, - Legend: "{{evmChainID}}", - }, - }, - }, - })) - - quantiles := []string{"0.5", "0.9", "0.99"} - for _, quantile := range quantiles { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: `Queries duration by query ` + quantile + ` quantile`, - Span: 24, - Height: 6, - Decimals: 2, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(` + quantile + `, sum(rate(log_poller_query_duration_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le, ` + p.platformOpts.LabelFilter + `, query)) / 1e6`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{query}}`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - } - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Number of logs inserted", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `avg by (evmChainID) (log_poller_logs_inserted{` + p.platformOpts.LabelQuery + `})`, - Legend: "{{evmChainID}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Logs insertion rate", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `avg by (evmChainID) (rate(log_poller_logs_inserted{` + p.platformOpts.LabelQuery + `}[$__rate_interval]))`, - Legend: "{{evmChainID}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Number of blocks inserted", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `avg by (evmChainID) (log_poller_blocks_inserted{` + p.platformOpts.LabelQuery + `})`, - Legend: "{{evmChainID}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Blocks insertion rate", - Span: 12, - Height: 6, - Decimals: 2, - Query: []grafana.Query{ - { - Expr: `avg by (evmChainID) (rate(log_poller_blocks_inserted{` + p.platformOpts.LabelQuery + `}[$__rate_interval]))`, - Legend: "{{evmChainID}}", - }, - }, - }, - })) - - return panels -} - -func feedsJobs(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Feeds Job Proposal Requests", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(feeds_job_proposal_requests{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Feeds Job Proposal Count", - Description: "", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(feeds_job_proposal_count{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} - -func mailbox(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Mailbox Load Percent", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `sum(mailbox_load_percent{` + p.platformOpts.LabelQuery + `}) by (capacity, name, ` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - Capacity: {{capacity}} - {{name}}`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - return panels -} - -func logsCounters(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - logStatuses := []string{"panic", "fatal", "critical", "warn", "error"} - for _, status := range logStatuses { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Logs Counter - " + status, - Span: 8, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(log_` + status + `_count{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - ` + status, - }, - }, - }, - })) - } - - return panels -} - -func logsRate(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - logStatuses := []string{"panic", "fatal", "critical", "warn", "error"} - for _, status := range logStatuses { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Logs Rate - " + status, - Span: 8, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(rate(log_` + status + `_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - error`, - }, - }, - }, - })) - } - - return panels -} - -func evmPoolLifecycle(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool Highest Seen Block", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_highest_seen_block{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool Num Seen Blocks", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_num_seen_blocks{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool Node Polls Total", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_polls_total{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool Node Polls Failed", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_polls_failed{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool Node Polls Success", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "Block", - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_polls_success{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} - -func nodesRPC(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - nodeRPCStates := []string{"Alive", "Closed", "Dialed", "InvalidChainID", "OutOfSync", "Undialed", "Unreachable", "Unusable"} - for _, state := range nodeRPCStates { - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Node RPC " + state, - Span: 6, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(multi_node_states{` + p.platformOpts.LabelQuery + `state="` + state + `"}) by (` + p.platformOpts.LabelFilter + `, chainId)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{chainId}}`, - }, - }, - }, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - } - - return panels -} - -func evmNodeRPC(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node Calls Success Rate", - Span: 24, - Height: 6, - Unit: "percentunit", - Max: grafana.Pointer[float64](1), - Query: []grafana.Query{ - { - Expr: `sum(increase(evm_pool_rpc_node_calls_success{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `, evmChainID, nodeName)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{nodeName}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.8), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node Dials Failure Rate", - Span: 24, - Height: 6, - Unit: "percentunit", - Max: grafana.Pointer[float64](1), - Query: []grafana.Query{ - { - Expr: `sum(increase(evm_pool_rpc_node_dials_failed{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `, evmChainID, nodeName)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{evmChainID}} - {{nodeName}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.8), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node Transitions", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_num_transitions_to_alive{` + p.platformOpts.LabelQuery + `}`, - Legend: "Alive", - }, - { - Expr: `evm_pool_rpc_node_num_transitions_to_in_sync{` + p.platformOpts.LabelQuery + `}`, - Legend: "InSync", - }, - { - Expr: `evm_pool_rpc_node_num_transitions_to_out_of_sync{` + p.platformOpts.LabelQuery + `}`, - Legend: "OutOfSync", - }, - { - Expr: `evm_pool_rpc_node_num_transitions_to_unreachable{` + p.platformOpts.LabelQuery + `}`, - Legend: "UnReachable", - }, - { - Expr: `evm_pool_rpc_node_num_transitions_to_invalid_chain_id{` + p.platformOpts.LabelQuery + `}`, - Legend: "InvalidChainID", - }, - { - Expr: `evm_pool_rpc_node_num_transitions_to_unusable{` + p.platformOpts.LabelQuery + `}`, - Legend: "TransitionToUnusable", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node States", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `evm_pool_rpc_node_states{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{evmChainID}} - {{state}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node Verifies Success Rate", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `sum(increase(evm_pool_rpc_node_verifies_success{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, evmChainID, nodeName) * 100`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{evmChainID}} - {{nodeName}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "EVM Pool RPC Node Verifies Failure Rate", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `sum(increase(evm_pool_rpc_node_verifies_failed{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, evmChainID, nodeName) * 100`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{evmChainID}} - {{nodeName}}`, - }, - }, - }, - })) - - return panels -} - -func evmPoolRPCNodeLatencies(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - quantiles := []string{"0.90", "0.95", "0.99"} - for _, quantile := range quantiles { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: `EVM Pool RPC Node Calls Latency ` + quantile + ` quantile`, - Span: 24, - Height: 6, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(` + quantile + `, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LabelFilter + `, le, rpcCallName)) / 1e6`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{rpcCallName}}`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - } - - return panels -} - -func evmBlockHistoryEstimator(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Gas Updater All Gas Price Percentiles", - Description: "Gas price at given percentile", - Span: 24, - Height: 6, - Unit: "gwei", - Query: []grafana.Query{ - { - Expr: `sum(gas_updater_all_gas_price_percentiles{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, evmChainID, percentile)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{evmChainID}} - {{percentile}}`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Gas Updater All Tip Cap Percentiles", - Description: "Tip cap at given percentile", - Span: 24, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(gas_updater_all_tip_cap_percentiles{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `, evmChainID, percentile)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{evmChainID}} - {{percentile}}`, - }, - }, - }, - LegendOptions: &grafana.LegendOptions{ - DisplayMode: common.LegendDisplayModeList, - Placement: common.LegendPlacementRight, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Gas Updater Set Gas Price", - Span: 12, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(gas_updater_set_gas_price{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Gas Updater Set Tip Cap", - Span: 12, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(gas_updater_set_tip_cap{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Gas Updater Current Base Fee", - Span: 12, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(gas_updater_current_base_fee{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Block History Estimator Connectivity Failure Count", - Span: 12, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(block_history_estimator_connectivity_failure_count{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}}`, - }, - }, - }, - })) - - return panels -} - -func pipelines(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Pipeline Task Execution Time", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "s", - Query: []grafana.Query{ - { - Expr: `pipeline_task_execution_time{` + p.platformOpts.LabelQuery + `} / 1e6`, - Legend: `{{` + p.platformOpts.LegendString + `}} JobID: {{job_id}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Pipeline Run Errors", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `pipeline_run_errors{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} JobID: {{job_id}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Pipeline Run Total Time to Completion", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "s", - Query: []grafana.Query{ - { - Expr: `pipeline_run_total_time_to_completion{` + p.platformOpts.LabelQuery + `} / 1e6`, - Legend: `{{` + p.platformOpts.LegendString + `}} JobID: {{job_id}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Pipeline Tasks Total Finished", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `pipeline_tasks_total_finished{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} JobID: {{job_id}}`, - }, - }, - }, - })) - - return panels -} - -func httpAPI(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Request Duration p95", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "s", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(0.95, sum(rate(service_gonic_request_duration_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, le, path, method))`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{method}} - {{path}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Request Total Rate over interval", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(rate(service_gonic_requests_total{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, path, method, code)`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{method}} - {{path}} - {{code}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Average Request Size", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `avg(rate(service_gonic_request_size_bytes_sum{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `)/avg(rate(service_gonic_request_size_bytes_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Response Size", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `avg(rate(service_gonic_response_size_bytes_sum{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `)/avg(rate(service_gonic_response_size_bytes_count{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} - -func promHTTP(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "HTTP rate by return code", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(rate(promhttp_metric_handler_requests_total{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (` + p.platformOpts.LegendString + `, code)`, - Legend: `{{` + p.platformOpts.LegendString + `}} - {{code}}`, - }, - }, - }, - })) - - return panels -} - -func goMetrics(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Threads", - Span: 24, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `sum(go_threads{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LegendString + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Heap Allocations Stats", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `sum(go_memstats_heap_alloc_bytes{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LegendString + `)`, - Legend: "", - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Heap allocations Graph", - Span: 24, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `sum(go_memstats_heap_alloc_bytes{` + p.platformOpts.LabelQuery + `}) by (` + p.platformOpts.LegendString + `)`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Heap Usage", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `go_memstats_heap_alloc_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Alloc`, - }, - { - Expr: `go_memstats_heap_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Sys`, - }, - { - Expr: `go_memstats_heap_idle_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Idle`, - }, - { - Expr: `go_memstats_heap_inuse_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - InUse`, - }, - { - Expr: `go_memstats_heap_released_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Released`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory in Off-Heap", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `go_memstats_mspan_inuse_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Total InUse`, - }, - { - Expr: `go_memstats_mspan_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Total Sys`, - }, - { - Expr: `go_memstats_mcache_inuse_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Cache InUse`, - }, - { - Expr: `go_memstats_mcache_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Cache Sys`, - }, - { - Expr: `go_memstats_buck_hash_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Hash Sys`, - }, - { - Expr: `go_memstats_gc_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - GC Sys`, - }, - { - Expr: `go_memstats_other_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - bytes of memory are used for other runtime allocations`, - }, - { - Expr: `go_memstats_next_gc_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Next GC`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory in Stack", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `go_memstats_stack_inuse_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - InUse`, - }, - { - Expr: `go_memstats_stack_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}} - Sys`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Total Used Memory", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `go_memstats_sys_bytes{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Number of Live Objects", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `go_memstats_mallocs_total{` + p.platformOpts.LabelQuery + `} - go_memstats_frees_total{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Rate of Objects Allocated", - Span: 12, - Height: 6, - Decimals: 1, - Query: []grafana.Query{ - { - Expr: `rate(go_memstats_mallocs_total{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Rate of a Pointer Dereferences", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "ops", - Query: []grafana.Query{ - { - Expr: `rate(go_memstats_lookups_total{` + p.platformOpts.LabelQuery + `}[1m])`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Goroutines", - Span: 12, - Height: 6, - Decimals: 1, - Unit: "ops", - Query: []grafana.Query{ - { - Expr: `go_goroutines{` + p.platformOpts.LabelQuery + `}`, - Legend: `{{` + p.platformOpts.LegendString + `}}`, - }, - }, - }, - })) - - return panels -} diff --git a/observability-lib/dashboards/core-node/component_test.go b/observability-lib/dashboards/core-node/component_test.go deleted file mode 100644 index 33a408f48..000000000 --- a/observability-lib/dashboards/core-node/component_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package corenode_test - -import ( - "os" - "testing" - - corenode "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/core-node" - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - "github.com/stretchr/testify/require" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := corenode.NewDashboard(&corenode.Props{ - Name: "Core Node Dashboard", - Platform: grafana.TypePlatformDocker, - MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "Core Node Dashboard", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/core-node/platform.go b/observability-lib/dashboards/core-node/platform.go deleted file mode 100644 index 7a8ae8e26..000000000 --- a/observability-lib/dashboards/core-node/platform.go +++ /dev/null @@ -1,53 +0,0 @@ -package corenode - -import ( - "fmt" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type platformOpts struct { - // Platform is infrastructure deployment platform: docker or k8s - Platform grafana.TypePlatform - LabelFilters map[string]string - LabelFilter string - LegendString string - LabelQuery string -} - -type Props struct { - Name string // Name is the name of the dashboard - Platform grafana.TypePlatform // Platform is infrastructure deployment platform: docker or k8s - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics - SlackChannel string // SlackChannel is the channel to send alerts to - SlackWebhookURL string // SlackWebhookURL is the URL to send alerts to - AlertsTags map[string]string // AlertsTags is the tags to map with notification policy - AlertsFilters string // AlertsFilters is the filters to apply to alerts - platformOpts platformOpts -} - -// PlatformPanelOpts generate different queries for "docker" and "k8s" deployment platforms -func platformPanelOpts(platform grafana.TypePlatform) platformOpts { - po := platformOpts{ - LabelFilters: map[string]string{}, - Platform: platform, - } - switch platform { - case grafana.TypePlatformKubernetes: - po.LabelFilters["namespace"] = `=~"${namespace}"` - po.LabelFilters["job"] = `=~"${job}"` - po.LabelFilters["pod"] = `=~"${pod}"` - po.LabelFilter = "job" - po.LegendString = "pod" - case grafana.TypePlatformDocker: - po.LabelFilters["instance"] = `=~"${instance}"` - po.LabelFilter = "instance" - po.LegendString = "instance" - default: - panic(fmt.Sprintf("failed to generate Platform dependent queries, unknown platform: %s", platform)) - } - for key, value := range po.LabelFilters { - po.LabelQuery += key + value + ", " - } - return po -} diff --git a/observability-lib/dashboards/core-node/test-output.json b/observability-lib/dashboards/core-node/test-output.json deleted file mode 100644 index 1ce52864b..000000000 --- a/observability-lib/dashboards/core-node/test-output.json +++ /dev/null @@ -1,6090 +0,0 @@ -{ - "Dashboard": { - "title": "Core Node Dashboard", - "tags": [ - "Core", - "Node" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-30m", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "row", - "collapsed": false, - "title": "Headlines", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 0, - "targets": [ - { - "expr": "version{instance=~\"${instance}\", }", - "instant": true, - "range": false, - "format": "", - "legendFormat": "Version: {{version}} https://github.com/smartcontractkit/chainlink/commit/{{commit}} https://github.com/smartcontractkit/chainlink/tree/release/{{version}}", - "refId": "" - } - ], - "title": "App Version", - "description": "app version with commit and branch links", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 1, - "targets": [ - { - "expr": "uptime_seconds{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Uptime", - "description": "instance uptime", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "s", - "decimals": 2, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 2, - "targets": [ - { - "expr": "sum(eth_balance{instance=~\"${instance}\", }) by (instance, account)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{instance}} - {{account}}", - "refId": "" - } - ], - "title": "ETH Balance Summary", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 5 - }, - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 3, - "targets": [ - { - "expr": "sum(solana_balance{instance=~\"${instance}\", }) by (instance, account)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{instance}} - {{account}}", - "refId": "" - } - ], - "title": "Solana Balance Summary", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 5 - }, - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 4, - "targets": [ - { - "expr": "100 * (avg(avg_over_time(health{instance=~\"${instance}\", }[15m])) by (instance, service_id, version, service, cluster, env))", - "format": "", - "legendFormat": "{{instance}} - {{service_id}}", - "refId": "" - } - ], - "title": "Health Avg by Service over 15m", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 16, - "x": 0, - "y": 9 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "min": 0, - "max": 100, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 5, - "targets": [ - { - "expr": "100 * avg(avg_over_time(health{instance=~\"${instance}\", }[15m])) by (instance, service_id, version, service, cluster, env) \u003c 90", - "format": "", - "legendFormat": "{{instance}} - {{service_id}}", - "refId": "" - } - ], - "title": "Health Avg by Service over 15m with health \u003c 90%", - "description": "Only displays services with health average \u003c 90%", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 16, - "y": 9 - }, - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "green" - }, - { - "value": 1, - "color": "red" - }, - { - "value": 80, - "color": "orange" - }, - { - "value": 99, - "color": "green" - } - ] - }, - "noValue": "All services healthy" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 6, - "targets": [ - { - "expr": "sum(eth_balance{instance=~\"${instance}\", }) by (instance, account)", - "format": "", - "legendFormat": "{{instance}} - {{account}}", - "refId": "" - } - ], - "title": "ETH Balance", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 15 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 7, - "targets": [ - { - "expr": "sum(solana_balance{instance=~\"${instance}\", }) by (instance, account)", - "format": "", - "legendFormat": "{{instance}} - {{account}}", - "refId": "" - } - ], - "title": "SOL Balance", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 15 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 8, - "targets": [ - { - "expr": "process_open_fds{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Open File Descriptors", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 0, - "y": 21 - }, - "options": { - "graphMode": "area", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 9, - "targets": [ - { - "expr": "go_info{instance=~\"${instance}\", }", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{exported_version}}", - "refId": "" - } - ], - "title": "Go Version", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 4, - "x": 6, - "y": 21 - }, - "options": { - "graphMode": "none", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "AppDBConnections", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 25 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 10, - "targets": [ - { - "expr": "sum(db_conns_max{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - Max", - "refId": "" - }, - { - "expr": "sum(db_conns_open{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - Open", - "refId": "" - }, - { - "expr": "sum(db_conns_used{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - Used", - "refId": "" - }, - { - "expr": "sum(db_conns_wait{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - Wait", - "refId": "" - } - ], - "title": "DB Connections", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 26 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Conn", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 11, - "targets": [ - { - "expr": "sum(db_wait_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "DB Wait Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 32 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 12, - "targets": [ - { - "expr": "sum(db_wait_time_seconds{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "DB Wait Time", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 32 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Sec", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "SQLQueries", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 38 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 13, - "targets": [ - { - "expr": "histogram_quantile(0.9, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", - "format": "", - "legendFormat": "p90", - "refId": "" - }, - { - "expr": "histogram_quantile(0.95, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", - "format": "", - "legendFormat": "p95", - "refId": "" - }, - { - "expr": "histogram_quantile(0.99, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", - "format": "", - "legendFormat": "p99", - "refId": "" - } - ], - "title": "SQL Query Timeout Percent", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 39 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "HeadTracker", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 45 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 14, - "targets": [ - { - "expr": "sum(head_tracker_current_head{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Head Tracker Current Head", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 18, - "x": 0, - "y": 46 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 15, - "targets": [ - { - "expr": "head_tracker_current_head{instance=~\"${instance}\", }", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Head Tracker Current Head Summary", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 18, - "y": 46 - }, - "options": { - "graphMode": "none", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 16, - "targets": [ - { - "expr": "rate(head_tracker_heads_received{instance=~\"${instance}\", }[1m])", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Head Tracker Heads Received Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 52 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 17, - "targets": [ - { - "expr": "head_tracker_very_old_head{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Head Tracker Very Old Head", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 58 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 18, - "targets": [ - { - "expr": "rate(head_tracker_connection_errors{instance=~\"${instance}\", }[1m])", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Head Tracker Connection Errors Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 58 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "HeadReporter", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 64 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 19, - "targets": [ - { - "expr": "sum(unconfirmed_transactions{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Unconfirmed Transactions", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 0, - "y": 65 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Tx", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 20, - "targets": [ - { - "expr": "sum(max_unconfirmed_tx_age{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Unconfirmed TX Age", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 8, - "y": 65 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "s", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 21, - "targets": [ - { - "expr": "sum(max_unconfirmed_blocks{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Unconfirmed TX Blocks", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 16, - "y": 65 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Blocks", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "TxManager", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 71 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 22, - "targets": [ - { - "expr": "sum(tx_manager_num_confirmed_transactions{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Confirmed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 0, - "y": 72 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 23, - "targets": [ - { - "expr": "sum(tx_manager_num_successful_transactions{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Successful", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 6, - "y": 72 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 24, - "targets": [ - { - "expr": "sum(tx_manager_num_tx_reverted{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Reverted", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 12, - "y": 72 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 25, - "targets": [ - { - "expr": "sum(tx_manager_num_gas_bumps{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Gas Bumps", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 18, - "y": 72 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 26, - "targets": [ - { - "expr": "sum(tx_manager_fwd_tx_count{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Forwarded", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 0, - "y": 78 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 27, - "targets": [ - { - "expr": "sum(tx_manager_tx_attempt_count{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Attempts", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 6, - "y": 78 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 28, - "targets": [ - { - "expr": "sum(tx_manager_gas_bump_exceeds_limit{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Gas Bump Exceeds Limit", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 12, - "y": 78 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 29, - "targets": [ - { - "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_broadcast_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Time Until broadcast", - "description": "The amount of time elapsed from when a transaction is enqueued to until it is broadcast", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 18, - "y": 78 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 30, - "targets": [ - { - "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_confirmed_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", - "refId": "" - } - ], - "title": "TX Manager Time Until confirmed", - "description": "The amount of time elapsed from a transaction being broadcast to being included in a block", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 0, - "y": 84 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "LogPoller", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 90 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 31, - "targets": [ - { - "expr": "count(log_poller_query_duration_sum{instance=~\"${instance}\", }) by (evmChainID)", - "format": "", - "legendFormat": "chainId: {{evmChainID}}", - "refId": "" - } - ], - "title": "Goroutines per ChainId", - "description": "goroutines per chainId", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 91 - }, - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 32, - "targets": [ - { - "expr": "avg by (query, instance) (sum by (query, job) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - }, - { - "expr": "avg (sum by(instance) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", - "format": "", - "legendFormat": "Total", - "refId": "" - } - ], - "title": "RPS", - "description": "requests per second", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 91 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "reqps", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 33, - "targets": [ - { - "expr": "avg by (instance, type) (sum by (type, instance) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", - "format": "", - "legendFormat": "{{instance}} - {{type}}", - "refId": "" - } - ], - "title": "RPS by Type", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 97 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "reqps", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 34, - "targets": [ - { - "expr": "avg by (instance, query) (log_poller_query_dataset_size{instance=~\"${instance}\", })", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - } - ], - "title": "Avg number of logs returned", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 97 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 35, - "targets": [ - { - "expr": "max by (instance, query) (log_poller_query_dataset_size{instance=~\"${instance}\", })", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - } - ], - "title": "Max number of logs returned", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 103 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 36, - "targets": [ - { - "expr": "max by (evmChainID) (log_poller_query_dataset_size{instance=~\"${instance}\", })", - "format": "", - "legendFormat": "{{evmChainID}}", - "refId": "" - } - ], - "title": "Logs returned by chain", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 103 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 37, - "targets": [ - { - "expr": "histogram_quantile(0.5, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - } - ], - "title": "Queries duration by query 0.5 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 109 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 38, - "targets": [ - { - "expr": "histogram_quantile(0.9, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - } - ], - "title": "Queries duration by query 0.9 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 115 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 39, - "targets": [ - { - "expr": "histogram_quantile(0.99, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{query}}", - "refId": "" - } - ], - "title": "Queries duration by query 0.99 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 121 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 40, - "targets": [ - { - "expr": "avg by (evmChainID) (log_poller_logs_inserted{instance=~\"${instance}\", })", - "format": "", - "legendFormat": "{{evmChainID}}", - "refId": "" - } - ], - "title": "Number of logs inserted", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 127 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 41, - "targets": [ - { - "expr": "avg by (evmChainID) (rate(log_poller_logs_inserted{instance=~\"${instance}\", }[$__rate_interval]))", - "format": "", - "legendFormat": "{{evmChainID}}", - "refId": "" - } - ], - "title": "Logs insertion rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 127 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 42, - "targets": [ - { - "expr": "avg by (evmChainID) (log_poller_blocks_inserted{instance=~\"${instance}\", })", - "format": "", - "legendFormat": "{{evmChainID}}", - "refId": "" - } - ], - "title": "Number of blocks inserted", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 133 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 43, - "targets": [ - { - "expr": "avg by (evmChainID) (rate(log_poller_blocks_inserted{instance=~\"${instance}\", }[$__rate_interval]))", - "format": "", - "legendFormat": "{{evmChainID}}", - "refId": "" - } - ], - "title": "Blocks insertion rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 133 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Feeds Jobs", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 139 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 44, - "targets": [ - { - "expr": "sum(feeds_job_proposal_requests{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Feeds Job Proposal Requests", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 140 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 45, - "targets": [ - { - "expr": "sum(feeds_job_proposal_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Feeds Job Proposal Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 140 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Mailbox", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 146 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 46, - "targets": [ - { - "expr": "sum(mailbox_load_percent{instance=~\"${instance}\", }) by (capacity, name, instance)", - "format": "", - "legendFormat": "{{instance}} - Capacity: {{capacity}} - {{name}}", - "refId": "" - } - ], - "title": "Mailbox Load Percent", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 147 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Logs Counters", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 153 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 47, - "targets": [ - { - "expr": "sum(log_panic_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - panic", - "refId": "" - } - ], - "title": "Logs Counter - panic", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 0, - "y": 154 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 48, - "targets": [ - { - "expr": "sum(log_fatal_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - fatal", - "refId": "" - } - ], - "title": "Logs Counter - fatal", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 8, - "y": 154 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 49, - "targets": [ - { - "expr": "sum(log_critical_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - critical", - "refId": "" - } - ], - "title": "Logs Counter - critical", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 16, - "y": 154 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 50, - "targets": [ - { - "expr": "sum(log_warn_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - warn", - "refId": "" - } - ], - "title": "Logs Counter - warn", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 0, - "y": 160 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 51, - "targets": [ - { - "expr": "sum(log_error_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Counter - error", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 8, - "y": 160 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Logs Rate", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 166 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 52, - "targets": [ - { - "expr": "sum(rate(log_panic_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Rate - panic", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 0, - "y": 167 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 53, - "targets": [ - { - "expr": "sum(rate(log_fatal_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Rate - fatal", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 8, - "y": 167 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 54, - "targets": [ - { - "expr": "sum(rate(log_critical_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Rate - critical", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 16, - "y": 167 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 55, - "targets": [ - { - "expr": "sum(rate(log_warn_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Rate - warn", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 0, - "y": 173 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 56, - "targets": [ - { - "expr": "sum(rate(log_error_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}} - error", - "refId": "" - } - ], - "title": "Logs Rate - error", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 8, - "x": 8, - "y": 173 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "EvmPoolLifecycle", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 179 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 57, - "targets": [ - { - "expr": "evm_pool_rpc_node_highest_seen_block{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "EVM Pool Highest Seen Block", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 180 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 58, - "targets": [ - { - "expr": "evm_pool_rpc_node_num_seen_blocks{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "EVM Pool Num Seen Blocks", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 180 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 59, - "targets": [ - { - "expr": "evm_pool_rpc_node_polls_total{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "EVM Pool Node Polls Total", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 186 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 60, - "targets": [ - { - "expr": "evm_pool_rpc_node_polls_failed{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "EVM Pool Node Polls Failed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 186 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 61, - "targets": [ - { - "expr": "evm_pool_rpc_node_polls_success{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "EVM Pool Node Polls Success", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 192 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "Block", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Node RPC State", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 198 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 62, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Alive\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Alive", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 0, - "y": 199 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 63, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Closed\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Closed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 6, - "y": 199 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 64, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Dialed\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Dialed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 12, - "y": 199 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 65, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"InvalidChainID\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC InvalidChainID", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 18, - "y": 199 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 66, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"OutOfSync\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC OutOfSync", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 0, - "y": 205 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 67, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Undialed\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Undialed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 6, - "y": 205 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 68, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Unreachable\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Unreachable", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 12, - "y": 205 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 69, - "targets": [ - { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Unusable\"}) by (instance, chainId)", - "format": "", - "legendFormat": "{{instance}} - {{chainId}}", - "refId": "" - } - ], - "title": "Node RPC Unusable", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 6, - "x": 18, - "y": 205 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "EVM Pool RPC Node Metrics (App)", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 211 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 70, - "targets": [ - { - "expr": "sum(increase(evm_pool_rpc_node_calls_success{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName)", - "format": "", - "legendFormat": "{{instance}} - {{nodeName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Calls Success Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 212 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 0, - "max": 1, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 71, - "targets": [ - { - "expr": "sum(increase(evm_pool_rpc_node_dials_failed{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName)", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Dials Failure Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 218 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 0, - "max": 1, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 72, - "targets": [ - { - "expr": "evm_pool_rpc_node_num_transitions_to_alive{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "Alive", - "refId": "" - }, - { - "expr": "evm_pool_rpc_node_num_transitions_to_in_sync{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "InSync", - "refId": "" - }, - { - "expr": "evm_pool_rpc_node_num_transitions_to_out_of_sync{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "OutOfSync", - "refId": "" - }, - { - "expr": "evm_pool_rpc_node_num_transitions_to_unreachable{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "UnReachable", - "refId": "" - }, - { - "expr": "evm_pool_rpc_node_num_transitions_to_invalid_chain_id{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "InvalidChainID", - "refId": "" - }, - { - "expr": "evm_pool_rpc_node_num_transitions_to_unusable{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "TransitionToUnusable", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Transitions", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 224 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 73, - "targets": [ - { - "expr": "evm_pool_rpc_node_states{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{state}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node States", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 224 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 74, - "targets": [ - { - "expr": "sum(increase(evm_pool_rpc_node_verifies_success{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Verifies Success Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 230 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 75, - "targets": [ - { - "expr": "sum(increase(evm_pool_rpc_node_verifies_failed{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Verifies Failure Rate", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 230 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "EVM Pool RPC Node Latencies (App)", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 236 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 76, - "targets": [ - { - "expr": "histogram_quantile(0.90, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{rpcCallName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Calls Latency 0.90 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 237 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 77, - "targets": [ - { - "expr": "histogram_quantile(0.95, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{rpcCallName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Calls Latency 0.95 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 243 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 78, - "targets": [ - { - "expr": "histogram_quantile(0.99, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", - "format": "", - "legendFormat": "{{instance}} - {{rpcCallName}}", - "refId": "" - } - ], - "title": "EVM Pool RPC Node Calls Latency 0.99 quantile", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 249 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Block History Estimator", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 255 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 79, - "targets": [ - { - "expr": "sum(gas_updater_all_gas_price_percentiles{instance=~\"${instance}\", }) by (instance, evmChainID, percentile)", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{percentile}}", - "refId": "" - } - ], - "title": "Gas Updater All Gas Price Percentiles", - "description": "Gas price at given percentile", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 256 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "gwei", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 80, - "targets": [ - { - "expr": "sum(gas_updater_all_tip_cap_percentiles{instance=~\"${instance}\", }) by (instance, evmChainID, percentile)", - "format": "", - "legendFormat": "{{instance}} - {{evmChainID}} - {{percentile}}", - "refId": "" - } - ], - "title": "Gas Updater All Tip Cap Percentiles", - "description": "Tip cap at given percentile", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 262 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "right", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 81, - "targets": [ - { - "expr": "sum(gas_updater_set_gas_price{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Gas Updater Set Gas Price", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 268 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 82, - "targets": [ - { - "expr": "sum(gas_updater_set_tip_cap{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Gas Updater Set Tip Cap", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 268 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 83, - "targets": [ - { - "expr": "sum(gas_updater_current_base_fee{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Gas Updater Current Base Fee", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 274 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 84, - "targets": [ - { - "expr": "sum(block_history_estimator_connectivity_failure_count{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Block History Estimator Connectivity Failure Count", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 274 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Pipeline Metrics (Runner)", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 280 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 85, - "targets": [ - { - "expr": "pipeline_task_execution_time{instance=~\"${instance}\", } / 1e6", - "format": "", - "legendFormat": "{{instance}} JobID: {{job_id}}", - "refId": "" - } - ], - "title": "Pipeline Task Execution Time", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 281 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "s", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 86, - "targets": [ - { - "expr": "pipeline_run_errors{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} JobID: {{job_id}}", - "refId": "" - } - ], - "title": "Pipeline Run Errors", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 287 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 87, - "targets": [ - { - "expr": "pipeline_run_total_time_to_completion{instance=~\"${instance}\", } / 1e6", - "format": "", - "legendFormat": "{{instance}} JobID: {{job_id}}", - "refId": "" - } - ], - "title": "Pipeline Run Total Time to Completion", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 293 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "s", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 88, - "targets": [ - { - "expr": "pipeline_tasks_total_finished{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} JobID: {{job_id}}", - "refId": "" - } - ], - "title": "Pipeline Tasks Total Finished", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 299 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "HTTP API", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 305 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 89, - "targets": [ - { - "expr": "histogram_quantile(0.95, sum(rate(service_gonic_request_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, path, method))", - "format": "", - "legendFormat": "{{instance}} - {{method}} - {{path}}", - "refId": "" - } - ], - "title": "Request Duration p95", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 306 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "s", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 90, - "targets": [ - { - "expr": "sum(rate(service_gonic_requests_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, path, method, code)", - "format": "", - "legendFormat": "{{instance}} - {{method}} - {{path}} - {{code}}", - "refId": "" - } - ], - "title": "Request Total Rate over interval", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 312 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 91, - "targets": [ - { - "expr": "avg(rate(service_gonic_request_size_bytes_sum{instance=~\"${instance}\", }[$__rate_interval])) by (instance)/avg(rate(service_gonic_request_size_bytes_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Average Request Size", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 318 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 92, - "targets": [ - { - "expr": "avg(rate(service_gonic_response_size_bytes_sum{instance=~\"${instance}\", }[$__rate_interval])) by (instance)/avg(rate(service_gonic_response_size_bytes_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Response Size", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 318 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "PromHTTP", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 324 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 93, - "targets": [ - { - "expr": "sum(rate(promhttp_metric_handler_requests_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, code)", - "format": "", - "legendFormat": "{{instance}} - {{code}}", - "refId": "" - } - ], - "title": "HTTP rate by return code", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 325 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Go Metrics", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 331 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 94, - "targets": [ - { - "expr": "sum(go_threads{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Threads", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 332 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 95, - "targets": [ - { - "expr": "sum(go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "", - "refId": "" - } - ], - "title": "Heap Allocations Stats", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 338 - }, - "options": { - "graphMode": "none", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 96, - "targets": [ - { - "expr": "sum(go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }) by (instance)", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Heap allocations Graph", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 24, - "x": 0, - "y": 344 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 97, - "targets": [ - { - "expr": "go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Alloc", - "refId": "" - }, - { - "expr": "go_memstats_heap_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Sys", - "refId": "" - }, - { - "expr": "go_memstats_heap_idle_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Idle", - "refId": "" - }, - { - "expr": "go_memstats_heap_inuse_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - InUse", - "refId": "" - }, - { - "expr": "go_memstats_heap_released_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Released", - "refId": "" - } - ], - "title": "Heap Usage", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 350 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 98, - "targets": [ - { - "expr": "go_memstats_mspan_inuse_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Total InUse", - "refId": "" - }, - { - "expr": "go_memstats_mspan_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Total Sys", - "refId": "" - }, - { - "expr": "go_memstats_mcache_inuse_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Cache InUse", - "refId": "" - }, - { - "expr": "go_memstats_mcache_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Cache Sys", - "refId": "" - }, - { - "expr": "go_memstats_buck_hash_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Hash Sys", - "refId": "" - }, - { - "expr": "go_memstats_gc_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - GC Sys", - "refId": "" - }, - { - "expr": "go_memstats_other_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - bytes of memory are used for other runtime allocations", - "refId": "" - }, - { - "expr": "go_memstats_next_gc_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Next GC", - "refId": "" - } - ], - "title": "Memory in Off-Heap", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 350 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 99, - "targets": [ - { - "expr": "go_memstats_stack_inuse_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - InUse", - "refId": "" - }, - { - "expr": "go_memstats_stack_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}} - Sys", - "refId": "" - } - ], - "title": "Memory in Stack", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 356 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 100, - "targets": [ - { - "expr": "go_memstats_sys_bytes{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Total Used Memory", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 356 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 101, - "targets": [ - { - "expr": "go_memstats_mallocs_total{instance=~\"${instance}\", } - go_memstats_frees_total{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Number of Live Objects", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 362 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 102, - "targets": [ - { - "expr": "rate(go_memstats_mallocs_total{instance=~\"${instance}\", }[1m])", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Rate of Objects Allocated", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 362 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 103, - "targets": [ - { - "expr": "rate(go_memstats_lookups_total{instance=~\"${instance}\", }[1m])", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Rate of a Pointer Dereferences", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 368 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ops", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 104, - "targets": [ - { - "expr": "go_goroutines{instance=~\"${instance}\", }", - "format": "", - "legendFormat": "{{instance}}", - "refId": "" - } - ], - "title": "Goroutines", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 368 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "ops", - "decimals": 1, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "query", - "name": "instance", - "label": "Instance", - "query": "label_values(instance)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": true, - "sort": 1, - "includeAll": true - } - ] - }, - "annotations": {} - }, - "Alerts": [ - { - "annotations": { - "description": "Component {{ index $labels \"service_id\" }} uptime in the last 15m is {{ index $values \"A\" }}%", - "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - "summary": "Uptime less than 90% over last 15 minutes on one component in a Node" - }, - "condition": "D", - "data": [ - { - "datasourceUid": "1", - "model": { - "expr": "health{}", - "legendFormat": "__auto", - "refId": "A" - }, - "refId": "A", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "expression": "A", - "intervalMs": 1000, - "maxDataPoints": 43200, - "reducer": "mean", - "refId": "B", - "type": "reduce" - }, - "refId": "B", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "expression": "$B * 100", - "intervalMs": 1000, - "maxDataPoints": 43200, - "refId": "C", - "type": "math" - }, - "refId": "C", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "conditions": [ - { - "evaluator": { - "params": [ - 90, - 0 - ], - "type": "lt" - } - } - ], - "expression": "C", - "intervalMs": 1000, - "maxDataPoints": 43200, - "refId": "D", - "type": "threshold" - }, - "refId": "D", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - } - ], - "execErrState": "Alerting", - "folderUID": "", - "for": "15m", - "labels": { - "severity": "warning" - }, - "noDataState": "NoData", - "orgID": 0, - "ruleGroup": "", - "title": "Health Avg by Service over 15m" - }, - { - "annotations": { - "description": "ETH Balance critically low at {{ index $values \"A\" }} on {{ index $labels \"instance\" }}", - "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - "summary": "ETH Balance is lower than threshold" - }, - "condition": "B", - "data": [ - { - "datasourceUid": "1", - "model": { - "expr": "eth_balance{}", - "instant": true, - "range": false, - "legendFormat": "__auto", - "refId": "A" - }, - "refId": "A", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "conditions": [ - { - "evaluator": { - "params": [ - 1, - 0 - ], - "type": "lt" - } - } - ], - "expression": "A", - "intervalMs": 1000, - "maxDataPoints": 43200, - "refId": "B", - "type": "threshold" - }, - "refId": "B", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - } - ], - "execErrState": "Alerting", - "folderUID": "", - "for": "15m", - "labels": { - "severity": "critical" - }, - "noDataState": "OK", - "orgID": 0, - "ruleGroup": "", - "title": "ETH Balance" - }, - { - "annotations": { - "description": "Solana Balance critically low at {{ index $values \"A\" }} on {{ index $labels \"instance\" }}", - "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - "summary": "Solana Balance is lower than threshold" - }, - "condition": "B", - "data": [ - { - "datasourceUid": "1", - "model": { - "expr": "solana_balance{}", - "instant": true, - "range": false, - "legendFormat": "__auto", - "refId": "A" - }, - "refId": "A", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "conditions": [ - { - "evaluator": { - "params": [ - 1, - 0 - ], - "type": "lt" - } - } - ], - "expression": "A", - "intervalMs": 1000, - "maxDataPoints": 43200, - "refId": "B", - "type": "threshold" - }, - "refId": "B", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - } - ], - "execErrState": "Alerting", - "folderUID": "", - "for": "15m", - "labels": { - "severity": "critical" - }, - "noDataState": "OK", - "orgID": 0, - "ruleGroup": "", - "title": "SOL Balance" - }, - { - "annotations": { - "description": "{{ index $labels \"instance\" }} on ChainID {{ index $labels \"ChainID\" }} has received {{ index $values \"A\" }} heads over 10 minutes.", - "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - "summary": "No Headers Received" - }, - "condition": "B", - "data": [ - { - "datasourceUid": "1", - "model": { - "expr": "increase(head_tracker_heads_received{}[10m])", - "instant": true, - "range": false, - "legendFormat": "__auto", - "refId": "A" - }, - "refId": "A", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - }, - { - "datasourceUid": "__expr__", - "model": { - "conditions": [ - { - "evaluator": { - "params": [ - 1, - 0 - ], - "type": "lt" - } - } - ], - "expression": "A", - "intervalMs": 1000, - "maxDataPoints": 43200, - "refId": "B", - "type": "threshold" - }, - "refId": "B", - "relativeTimeRange": { - "from": 600, - "to": 0 - } - } - ], - "execErrState": "Alerting", - "folderUID": "", - "for": "10m", - "labels": { - "severity": "critical" - }, - "noDataState": "OK", - "orgID": 0, - "ruleGroup": "", - "title": "Head Tracker Heads Received Rate" - } - ], - "ContactPoints": null, - "NotificationPolicies": [ - { - "group_by": [ - "grafana_folder", - "alertname" - ], - "receiver": "chainlink-slack" - } - ] -} \ No newline at end of file diff --git a/observability-lib/dashboards/k8s-resources/component.go b/observability-lib/dashboards/k8s-resources/component.go deleted file mode 100644 index 1268113a7..000000000 --- a/observability-lib/dashboards/k8s-resources/component.go +++ /dev/null @@ -1,416 +0,0 @@ -package k8sresources - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/common" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type Props struct { - Name string // Name is the name of the dashboard - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics -} - -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"Core", "Node", "Kubernetes", "Resources"}, - Refresh: "30s", - TimeFrom: "now-30m", - TimeTo: "now", - }) - - builder.AddVars(vars(props)...) - - builder.AddRow("Headlines") - builder.AddPanel(headlines(props)...) - - builder.AddRow("Pod Status") - builder.AddPanel(podStatus(props)...) - - builder.AddRow("Resources Usage") - builder.AddPanel(resourcesUsage(props)...) - - builder.AddRow("Network Usage") - builder.AddPanel(networkUsage(props)...) - - builder.AddRow("Disk Usage") - builder.AddPanel(diskUsage(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Environment", - Name: "env", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up, env)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Cluster", - Name: "cluster", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env"}, cluster)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Namespace", - Name: "namespace", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster"}, namespace)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Job", - Name: "job", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace"}, job)`, - Multi: false, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Pod", - Name: "pod", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, - Multi: false, - })) - - return variables -} - -func headlines(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Utilisation (from requests)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_requests{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: "{{pod}}", - Instant: true, - }, - }, - }, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Utilisation (from limits)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_limits{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: "{{pod}}", - Instant: true, - }, - }, - }, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Utilisation (from requests)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(container_memory_working_set_bytes{job="kubelet", metrics_path="/metrics/cadvisor", cluster="$cluster", namespace="$namespace", pod="$pod", image!=""}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_requests{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: "{{pod}}", - Instant: true, - }, - }, - }, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Utilisation (from limits)", - Span: 6, - Height: 4, - Decimals: 1, - Unit: "percent", - Query: []grafana.Query{ - { - Expr: `100 * sum(container_memory_working_set_bytes{job="kubelet", metrics_path="/metrics/cadvisor", cluster="$cluster", namespace="$namespace", pod="$pod", container!="", image!=""}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_limits{cluster="$cluster", namespace="$namespace", pod="$pod"}) by (container)`, - Legend: "{{pod}}", - Instant: true, - }, - }, - }, - Orientation: common.VizOrientationHorizontal, - })) - - return panels -} - -func podStatus(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Pod Restarts", - Description: "Number of pod restarts", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `sum(increase(kube_pod_container_status_restarts_total{pod=~"$pod", namespace=~"${namespace}"}[$__rate_interval])) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "OOM Events", - Description: "Out-of-memory number of events", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `sum(container_oom_events_total{pod=~"$pod", namespace=~"${namespace}"}) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "OOM Killed", - Span: 8, - Height: 4, - Query: []grafana.Query{ - { - Expr: `kube_pod_container_status_last_terminated_reason{reason="OOMKilled", pod=~"$pod", namespace=~"${namespace}"}`, - Legend: "{{pod}}", - }, - }, - }, - ColorMode: common.BigValueColorModeNone, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - Orientation: common.VizOrientationHorizontal, - })) - - return panels -} - -func resourcesUsage(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "CPU Usage", - Span: 12, - Height: 6, - Decimals: 3, - Query: []grafana.Query{ - { - Expr: `sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{pod=~"$pod", namespace=~"${namespace}"}) by (pod)`, - Legend: "{{pod}}", - }, - { - Expr: `sum(kube_pod_container_resource_requests{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="cpu"})`, - Legend: "Requests", - }, - { - Expr: `sum(kube_pod_container_resource_limits{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="cpu"})`, - Legend: "Limits", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Memory Usage", - Span: 12, - Height: 6, - Unit: "bytes", - Query: []grafana.Query{ - { - Expr: `sum(container_memory_rss{pod=~"$pod", namespace=~"${namespace}", container!=""}) by (pod)`, - Legend: "{{pod}}", - }, - { - Expr: `sum(kube_pod_container_resource_requests{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="memory"})`, - Legend: "Requests", - }, - { - Expr: `sum(kube_pod_container_resource_limits{job="kube-state-metrics", cluster="$cluster", namespace="$namespace", pod="$pod", resource="memory"})`, - Legend: "Limits", - }, - }, - }, - })) - - return panels -} - -func networkUsage(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Receive Bandwidth", - Span: 12, - Height: 6, - Unit: "bps", - Query: []grafana.Query{ - { - Expr: `sum(irate(container_network_receive_bytes_total{pod=~"$pod", namespace=~"${namespace}"}[$__rate_interval])) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Transmit Bandwidth", - Span: 12, - Height: 6, - Unit: "bps", - Query: []grafana.Query{ - { - Expr: `sum(irate(container_network_transmit_bytes_total{pod=~"$pod", namespace=~"${namespace}"}[$__rate_interval])) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Average Container Bandwidth: Received", - Span: 12, - Height: 6, - Unit: "bps", - Query: []grafana.Query{ - { - Expr: `avg(irate(container_network_receive_bytes_total{pod=~"$pod", namespace=~"${namespace}"}[$__rate_interval])) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Average Container Bandwidth: Transmitted", - Span: 12, - Height: 6, - Unit: "bps", - Query: []grafana.Query{ - { - Expr: `avg(irate(container_network_transmit_bytes_total{pod=~"$pod", namespace=~"${namespace}"}[$__rate_interval])) by (pod)`, - Legend: "{{pod}}", - }, - }, - }, - })) - - return panels -} - -func diskUsage(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "IOPS(Read+Write)", - Span: 12, - Height: 6, - Decimals: 2, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `ceil(sum by(container, pod) (rate(container_fs_reads_total{job="kubelet", metrics_path="/metrics/cadvisor", container!="", cluster="$cluster", namespace="$namespace", pod="$pod"}[$__rate_interval]) + rate(container_fs_writes_total{job="kubelet", metrics_path="/metrics/cadvisor", container!="", cluster="$cluster", namespace="$namespace", pod="$pod"}[$__rate_interval])))`, - Legend: "{{pod}}", - }, - }, - }, - })) - - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "ThroughPut(Read+Write)", - Span: 12, - Height: 6, - Decimals: 2, - Unit: "short", - Query: []grafana.Query{ - { - Expr: `sum by(container, pod) (rate(container_fs_reads_bytes_total{job="kubelet", metrics_path="/metrics/cadvisor", container!="", cluster="$cluster", namespace="$namespace", pod="$pod"}[$__rate_interval]) + rate(container_fs_writes_bytes_total{job="kubelet", metrics_path="/metrics/cadvisor", container!="", cluster="$cluster", namespace="$namespace", pod="$pod"}[$__rate_interval]))`, - Legend: "{{pod}}", - }, - }, - }, - })) - - return panels -} diff --git a/observability-lib/dashboards/k8s-resources/component_test.go b/observability-lib/dashboards/k8s-resources/component_test.go deleted file mode 100644 index 32ebbde54..000000000 --- a/observability-lib/dashboards/k8s-resources/component_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package k8sresources_test - -import ( - "os" - "testing" - - k8sresources "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/k8s-resources" - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - "github.com/stretchr/testify/require" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := k8sresources.NewDashboard(&k8sresources.Props{ - Name: "K8s resources", - MetricsDataSource: grafana.NewDataSource("Prometheus", ""), - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "K8s resources", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/k8s-resources/test-output.json b/observability-lib/dashboards/k8s-resources/test-output.json deleted file mode 100644 index 4667b4d38..000000000 --- a/observability-lib/dashboards/k8s-resources/test-output.json +++ /dev/null @@ -1,977 +0,0 @@ -{ - "Dashboard": { - "title": "K8s resources", - "tags": [ - "Core", - "Node", - "Kubernetes", - "Resources" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-30m", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "row", - "collapsed": false, - "title": "Headlines", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 0, - "targets": [ - { - "expr": "100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_requests{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "CPU Utilisation (from requests)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 0, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 1, - "targets": [ - { - "expr": "100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_limits{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "CPU Utilisation (from limits)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 6, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 2, - "targets": [ - { - "expr": "100 * sum(container_memory_working_set_bytes{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", image!=\"\"}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_requests{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Memory Utilisation (from requests)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 12, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 3, - "targets": [ - { - "expr": "100 * sum(container_memory_working_set_bytes{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", container!=\"\", image!=\"\"}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_limits{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", - "instant": true, - "range": false, - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Memory Utilisation (from limits)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 6, - "x": 18, - "y": 1 - }, - "options": { - "graphMode": "none", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "percent", - "decimals": 1, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Pod Status", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 5 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 4, - "targets": [ - { - "expr": "sum(increase(kube_pod_container_status_restarts_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Pod Restarts", - "description": "Number of pod restarts", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 0, - "y": 6 - }, - "options": { - "graphMode": "line", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 5, - "targets": [ - { - "expr": "sum(container_oom_events_total{pod=~\"$pod\", namespace=~\"${namespace}\"}) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "OOM Events", - "description": "Out-of-memory number of events", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 8, - "y": 6 - }, - "options": { - "graphMode": "line", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 6, - "targets": [ - { - "expr": "kube_pod_container_status_last_terminated_reason{reason=\"OOMKilled\", pod=~\"$pod\", namespace=~\"${namespace}\"}", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "OOM Killed", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 4, - "w": 8, - "x": 16, - "y": 6 - }, - "options": { - "graphMode": "line", - "colorMode": "none", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "horizontal" - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 0, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Resources Usage", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 10 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 7, - "targets": [ - { - "expr": "sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{pod=~\"$pod\", namespace=~\"${namespace}\"}) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - }, - { - "expr": "sum(kube_pod_container_resource_requests{job=\"kube-state-metrics\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", resource=\"cpu\"})", - "format": "", - "legendFormat": "Requests", - "refId": "" - }, - { - "expr": "sum(kube_pod_container_resource_limits{job=\"kube-state-metrics\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", resource=\"cpu\"})", - "format": "", - "legendFormat": "Limits", - "refId": "" - } - ], - "title": "CPU Usage", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 11 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "", - "decimals": 3, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 8, - "targets": [ - { - "expr": "sum(container_memory_rss{pod=~\"$pod\", namespace=~\"${namespace}\", container!=\"\"}) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - }, - { - "expr": "sum(kube_pod_container_resource_requests{job=\"kube-state-metrics\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", resource=\"memory\"})", - "format": "", - "legendFormat": "Requests", - "refId": "" - }, - { - "expr": "sum(kube_pod_container_resource_limits{job=\"kube-state-metrics\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", resource=\"memory\"})", - "format": "", - "legendFormat": "Limits", - "refId": "" - } - ], - "title": "Memory Usage", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 11 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bytes", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Network Usage", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 17 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 9, - "targets": [ - { - "expr": "sum(irate(container_network_receive_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Receive Bandwidth", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 18 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bps", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 10, - "targets": [ - { - "expr": "sum(irate(container_network_transmit_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Transmit Bandwidth", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 18 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bps", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 11, - "targets": [ - { - "expr": "avg(irate(container_network_receive_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Average Container Bandwidth: Received", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 24 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bps", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 12, - "targets": [ - { - "expr": "avg(irate(container_network_transmit_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "Average Container Bandwidth: Transmitted", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 24 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "bps", - "decimals": 0, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Disk Usage", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 30 - }, - "id": 0, - "panels": null - }, - { - "type": "timeseries", - "id": 13, - "targets": [ - { - "expr": "ceil(sum by(container, pod) (rate(container_fs_reads_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]) + rate(container_fs_writes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval])))", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "IOPS(Read+Write)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 0, - "y": 31 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - }, - { - "type": "timeseries", - "id": 14, - "targets": [ - { - "expr": "sum by(container, pod) (rate(container_fs_reads_bytes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]) + rate(container_fs_writes_bytes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]))", - "format": "", - "legendFormat": "{{pod}}", - "refId": "" - } - ], - "title": "ThroughPut(Read+Write)", - "description": "", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 6, - "w": 12, - "x": 12, - "y": 31 - }, - "options": { - "legend": { - "displayMode": "list", - "placement": "bottom", - "showLegend": true, - "calcs": [] - }, - "tooltip": { - "mode": "", - "sort": "" - } - }, - "fieldConfig": { - "defaults": { - "unit": "short", - "decimals": 2, - "noValue": "No data", - "custom": { - "fillOpacity": 2, - "scaleDistribution": { - "type": "linear" - } - } - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "query", - "name": "env", - "label": "Environment", - "query": "label_values(up, env)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "cluster", - "label": "Cluster", - "query": "label_values(up{env=\"$env\"}, cluster)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "namespace", - "label": "Namespace", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, namespace)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "job", - "label": "Job", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\"}, job)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "pod", - "label": "Pod", - "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - } - ] - }, - "annotations": {} - }, - "Alerts": null, - "ContactPoints": null, - "NotificationPolicies": null -} \ No newline at end of file diff --git a/observability-lib/dashboards/nop-ocr/component.go b/observability-lib/dashboards/nop-ocr/component.go deleted file mode 100644 index 2c58a40ce..000000000 --- a/observability-lib/dashboards/nop-ocr/component.go +++ /dev/null @@ -1,352 +0,0 @@ -package nopocr - -import ( - "fmt" - - "github.com/grafana/grafana-foundation-sdk/go/cog" - "github.com/grafana/grafana-foundation-sdk/go/common" - "github.com/grafana/grafana-foundation-sdk/go/dashboard" - - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" -) - -type Props struct { - Name string // Name is the name of the dashboard - MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics - OCRVersion string // OCRVersion is the version of the OCR (ocr, ocr2, ocr3) -} - -func NewDashboard(props *Props) (*grafana.Dashboard, error) { - if props.Name == "" { - return nil, fmt.Errorf("Name is required") - } - - builder := grafana.NewBuilder(&grafana.BuilderOptions{ - Name: props.Name, - Tags: []string{"NOP", "Health", props.OCRVersion}, - Refresh: "30s", - TimeFrom: "now-1d", - TimeTo: "now", - }) - - builder.AddVars(vars(props)...) - - builder.AddRow("Per Contract") - builder.AddPanel(perContract(props)...) - - builder.AddRow("Per NOP") - builder.AddPanel(perNOP(props)...) - - return builder.Build() -} - -func vars(p *Props) []cog.Builder[dashboard.VariableModel] { - var variables []cog.Builder[dashboard.VariableModel] - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Environment", - Name: "env", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(` + p.OCRVersion + `_contract_config_f{}, env)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "Contract", - Name: "contract", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(` + p.OCRVersion + `_contract_oracle_active{env="$env"}, contract)`, - })) - - variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ - VariableOption: &grafana.VariableOption{ - Label: "NOP", - Name: "oracle", - }, - Datasource: p.MetricsDataSource.Name, - Query: `label_values(` + p.OCRVersion + `_contract_oracle_active{env="$env"}, oracle)`, - })) - - return variables -} - -func perContract(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Rounds Epoch Progression", - Description: "Rounds have stopped progressing for 90 seconds means NOP is unhealthy", - Span: 24, - Height: 10, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_epoch_round{env=~"${env}", contract=~"${contract}"}[90s])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{oracle}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Message Observe", - Description: "NOP have stopped sending messages for 3mins means NOP is unhealthy", - Span: 24, - Height: 10, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_message_observe_total{env=~"${env}", contract=~"${contract}"}[3m])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{oracle}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Observations included in report", - Description: "NOP observations were not including in report for 3mins means NOP is unhealthy", - Span: 24, - Height: 10, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_message_report_req_observation_total{env=~"${env}", contract=~"${contract}"}[3m])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{oracle}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - return panels -} - -func perNOP(p *Props) []*grafana.Panel { - var panels []*grafana.Panel - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Rounds Epoch Progression", - Description: "Rounds have stopped progressing for 5mins means NOP is unhealthy", - Span: 24, - Height: 32, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_epoch_round{env=~"${env}", oracle=~"${oracle}"}[90s])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{contract}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Message Observe", - Description: "NOP have stopped sending messages for 3mins means NOP is unhealthy", - Span: 24, - Height: 32, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_message_observe_total{env=~"${env}", oracle=~"${oracle}"}[3m])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{contract}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "Observations included in report", - Description: "NOP observations were not including in report for 3mins means NOP is unhealthy", - Span: 24, - Height: 32, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_message_report_req_observation_total{env=~"${env}", oracle=~"${oracle}"}[3m])) by (env, contract, feed_id, network_name, oracle) >bool 0)[$__range:])`, - Legend: `{{contract}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "P2P Connectivity", - Description: "Connectivity got interrupted for 60 seconds received from other nodes", - Span: 24, - Height: 32, - Decimals: 2, - Unit: "percentunit", - Query: []grafana.Query{ - { - Expr: `avg_over_time((sum(changes(` + p.OCRVersion + `_telemetry_p2p_received_total{env=~"${env}", receiver=~"${oracle}"}[3m])) by (sender, receiver) >bool 0)[$__range:])`, - Legend: `{{receiver}} < {{sender}}`, - }, - }, - Threshold: &grafana.ThresholdOptions{ - Mode: dashboard.ThresholdsModeAbsolute, - Steps: []dashboard.Threshold{ - {Value: nil, Color: "default"}, - {Value: grafana.Pointer[float64](0), Color: "red"}, - {Value: grafana.Pointer[float64](0.80), Color: "orange"}, - {Value: grafana.Pointer[float64](0.99), Color: "green"}, - }, - }, - Transform: &grafana.TransformOptions{ - ID: "renameByRegex", - Options: map[string]string{ - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "", - }, - }, - }, - TextSize: 10, - ValueSize: 18, - GraphMode: common.BigValueGraphModeLine, - TextMode: common.BigValueTextModeValueAndName, - })) - - return panels -} diff --git a/observability-lib/dashboards/nop-ocr/component_test.go b/observability-lib/dashboards/nop-ocr/component_test.go deleted file mode 100644 index 3f4c36692..000000000 --- a/observability-lib/dashboards/nop-ocr/component_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package nopocr_test - -import ( - "os" - "testing" - - nopocr "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/nop-ocr" - "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" - - "github.com/stretchr/testify/require" -) - -func TestNewDashboard(t *testing.T) { - t.Run("NewDashboard creates a dashboard", func(t *testing.T) { - testDashboard, err := nopocr.NewDashboard(&nopocr.Props{ - Name: "NOP OCR Dashboard", - MetricsDataSource: grafana.NewDataSource("Prometheus", ""), - }) - if err != nil { - t.Errorf("Error creating dashboard: %v", err) - } - require.IsType(t, grafana.Dashboard{}, *testDashboard) - require.Equal(t, "NOP OCR Dashboard", *testDashboard.Dashboard.Title) - json, errJSON := testDashboard.GenerateJSON() - if errJSON != nil { - t.Errorf("Error generating JSON: %v", errJSON) - } - - jsonCompared, errCompared := os.ReadFile("test-output.json") - if errCompared != nil { - t.Errorf("Error reading file: %v", errCompared) - } - - require.ElementsMatch(t, jsonCompared, json) - }) -} diff --git a/observability-lib/dashboards/nop-ocr/test-output.json b/observability-lib/dashboards/nop-ocr/test-output.json deleted file mode 100644 index 5cbabe132..000000000 --- a/observability-lib/dashboards/nop-ocr/test-output.json +++ /dev/null @@ -1,675 +0,0 @@ -{ - "Dashboard": { - "title": "NOP OCR Dashboard", - "tags": [ - "NOP", - "Health", - "" - ], - "timezone": "browser", - "graphTooltip": 0, - "time": { - "from": "now-1d", - "to": "now" - }, - "fiscalYearStartMonth": 0, - "refresh": "30s", - "schemaVersion": 0, - "panels": [ - { - "type": "row", - "collapsed": false, - "title": "Per Contract", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 0, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_epoch_round{env=~\"${env}\", contract=~\"${contract}\"}[90s])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Rounds Epoch Progression", - "description": "Rounds have stopped progressing for 90 seconds means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 10, - "w": 24, - "x": 0, - "y": 1 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 1, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_message_observe_total{env=~\"${env}\", contract=~\"${contract}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Message Observe", - "description": "NOP have stopped sending messages for 3mins means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 10, - "w": 24, - "x": 0, - "y": 11 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 2, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_message_report_req_observation_total{env=~\"${env}\", contract=~\"${contract}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{oracle}}", - "refId": "" - } - ], - "title": "Observations included in report", - "description": "NOP observations were not including in report for 3mins means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 10, - "w": 24, - "x": 0, - "y": 21 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "row", - "collapsed": false, - "title": "Per NOP", - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 31 - }, - "id": 0, - "panels": null - }, - { - "type": "stat", - "id": 3, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_epoch_round{env=~\"${env}\", oracle=~\"${oracle}\"}[90s])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Rounds Epoch Progression", - "description": "Rounds have stopped progressing for 5mins means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 32, - "w": 24, - "x": 0, - "y": 32 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 4, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_message_observe_total{env=~\"${env}\", oracle=~\"${oracle}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Message Observe", - "description": "NOP have stopped sending messages for 3mins means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 32, - "w": 24, - "x": 0, - "y": 64 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 5, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_message_report_req_observation_total{env=~\"${env}\", oracle=~\"${oracle}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{contract}}", - "refId": "" - } - ], - "title": "Observations included in report", - "description": "NOP observations were not including in report for 3mins means NOP is unhealthy", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 32, - "w": 24, - "x": 0, - "y": 96 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - }, - { - "type": "stat", - "id": 6, - "targets": [ - { - "expr": "avg_over_time((sum(changes(_telemetry_p2p_received_total{env=~\"${env}\", receiver=~\"${oracle}\"}[3m])) by (sender, receiver) \u003ebool 0)[$__range:])", - "format": "", - "legendFormat": "{{receiver}} \u003c {{sender}}", - "refId": "" - } - ], - "title": "P2P Connectivity", - "description": "Connectivity got interrupted for 60 seconds received from other nodes", - "transparent": false, - "datasource": { - "uid": "Prometheus" - }, - "gridPos": { - "h": 32, - "w": 24, - "x": 0, - "y": 128 - }, - "transformations": [ - { - "id": "renameByRegex", - "options": { - "regex": "/^(.*[\\\\\\/])/", - "renamePattern": "" - } - } - ], - "options": { - "graphMode": "line", - "colorMode": "value", - "justifyMode": "auto", - "textMode": "value_and_name", - "wideLayout": true, - "reduceOptions": { - "calcs": [ - "last" - ] - }, - "text": { - "titleSize": 10, - "valueSize": 18 - }, - "showPercentChange": false, - "orientation": "auto" - }, - "fieldConfig": { - "defaults": { - "unit": "percentunit", - "decimals": 2, - "thresholds": { - "mode": "absolute", - "steps": [ - { - "value": null, - "color": "default" - }, - { - "value": 0, - "color": "red" - }, - { - "value": 0.8, - "color": "orange" - }, - { - "value": 0.99, - "color": "green" - } - ] - }, - "noValue": "No data" - }, - "overrides": null - } - } - ], - "templating": { - "list": [ - { - "type": "query", - "name": "env", - "label": "Environment", - "query": "label_values(_contract_config_f{}, env)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "contract", - "label": "Contract", - "query": "label_values(_contract_oracle_active{env=\"$env\"}, contract)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - }, - { - "type": "query", - "name": "oracle", - "label": "NOP", - "query": "label_values(_contract_oracle_active{env=\"$env\"}, oracle)", - "datasource": { - "uid": "Prometheus" - }, - "current": { - "selected": true, - "text": [ - "All" - ], - "value": [ - "$__all" - ] - }, - "multi": false, - "sort": 1 - } - ] - }, - "annotations": {} - }, - "Alerts": null, - "ContactPoints": null, - "NotificationPolicies": null -} \ No newline at end of file diff --git a/observability-lib/go.mod b/observability-lib/go.mod index e74087574..544957751 100644 --- a/observability-lib/go.mod +++ b/observability-lib/go.mod @@ -3,9 +3,8 @@ module github.com/smartcontractkit/chainlink-common/observability-lib go 1.21.4 require ( - github.com/go-resty/resty/v2 v2.14.0 - github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008 - github.com/rs/zerolog v1.33.0 + github.com/go-resty/resty/v2 v2.15.3 + github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b github.com/spf13/cobra v1.8.1 github.com/stretchr/testify v1.9.0 gopkg.in/yaml.v3 v3.0.1 @@ -15,12 +14,9 @@ require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/pretty v0.3.1 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.10.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - golang.org/x/net v0.27.0 // indirect - golang.org/x/sys v0.22.0 // indirect + golang.org/x/net v0.30.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect ) diff --git a/observability-lib/go.sum b/observability-lib/go.sum index 040c1778f..02dadb6ce 100644 --- a/observability-lib/go.sum +++ b/observability-lib/go.sum @@ -1,14 +1,11 @@ -github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-resty/resty/v2 v2.14.0 h1:/rhkzsAqGQkozwfKS5aFAbb6TyKd3zyFRWcdRXLPCAU= -github.com/go-resty/resty/v2 v2.14.0/go.mod h1:IW6mekUOsElt9C7oWr0XRt9BNSD6D5rr9mhk6NjmNHg= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008 h1:QEqDMW+20VJTkqU892tb9FbvCtI1uxxGvyXwulRhpAU= -github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008/go.mod h1:WtWosval1KCZP9BGa42b8aVoJmVXSg0EvQXi9LDSVZQ= +github.com/go-resty/resty/v2 v2.15.3 h1:bqff+hcqAflpiF591hhJzNdkRsFhlB96CYfBwSFvql8= +github.com/go-resty/resty/v2 v2.15.3/go.mod h1:0fHAoK7JoBy/Ch36N8VFeMsK7xQOHhvWaC3iOktwmIU= +github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b h1:YxlugK0wL5hh86wT0hZSGw9cPTvacOUmHxjP15fsIlE= +github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b/go.mod h1:WtWosval1KCZP9BGa42b8aVoJmVXSg0EvQXi9LDSVZQ= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -18,21 +15,12 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= -github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= -github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= @@ -40,75 +28,10 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= -golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= -golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= -golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= -golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= -golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= -golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/observability-lib/grafana/alerts-group.go b/observability-lib/grafana/alerts-group.go new file mode 100644 index 000000000..e7f845a2f --- /dev/null +++ b/observability-lib/grafana/alerts-group.go @@ -0,0 +1,15 @@ +package grafana + +import ( + "github.com/grafana/grafana-foundation-sdk/go/alerting" +) + +type AlertGroupOptions struct { + Title string + Interval alerting.Duration // duration in seconds +} + +func NewAlertGroup(options *AlertGroupOptions) *alerting.RuleGroupBuilder { + return alerting.NewRuleGroupBuilder(options.Title). + Interval(options.Interval) +} diff --git a/observability-lib/grafana/alerts.go b/observability-lib/grafana/alerts.go index 2c426376f..e812f857f 100644 --- a/observability-lib/grafana/alerts.go +++ b/observability-lib/grafana/alerts.go @@ -2,6 +2,7 @@ package grafana import ( "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/grafana/grafana-foundation-sdk/go/cog" "github.com/grafana/grafana-foundation-sdk/go/expr" "github.com/grafana/grafana-foundation-sdk/go/prometheus" ) @@ -62,57 +63,41 @@ type ResampleExpression struct { type ThresholdExpression struct { Expression string - ThresholdConditionsOptions []ThresholdConditionsOption + ThresholdConditionsOptions ThresholdConditionsOption } +type TypeThresholdType string + +const ( + TypeThresholdTypeGt TypeThresholdType = "gt" + TypeThresholdTypeLt TypeThresholdType = "lt" + TypeThresholdTypeWithinRange TypeThresholdType = "within_range" + TypeThresholdTypeOutsideRange TypeThresholdType = "outside_range" +) + type ThresholdConditionsOption struct { Params []float64 - Type expr.TypeThresholdType + Type TypeThresholdType } -func newThresholdConditionsOptions(options []ThresholdConditionsOption) []struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` -} { - var conditions []struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` - } - for _, option := range options { - conditions = append(conditions, struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` - }{ - Evaluator: struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - }{ - Params: option.Params, - Type: option.Type, - }, - }) +func newThresholdConditionsOptions(options ThresholdConditionsOption) []cog.Builder[expr.ExprTypeThresholdConditions] { + var conditions []cog.Builder[expr.ExprTypeThresholdConditions] + + var params []float64 + params = append(params, options.Params...) + + if len(options.Params) == 1 { + params = append(params, 0) } + + conditions = append(conditions, expr.NewExprTypeThresholdConditionsBuilder(). + Evaluator( + expr.NewExprTypeThresholdConditionsEvaluatorBuilder(). + Params(params). + Type(expr.TypeThresholdType(options.Type)), + ), + ) + return conditions } @@ -173,8 +158,7 @@ func newConditionQuery(options ConditionQuery) *alerting.QueryBuilder { } type AlertOptions struct { - Name string - Datasource string + Title string Summary string Description string RunbookURL string @@ -185,6 +169,8 @@ type AlertOptions struct { Query []RuleQuery QueryRefCondition string Condition []ConditionQuery + PanelTitle string + RuleGroupTitle string } func NewAlertRule(options *AlertOptions) *alerting.RuleBuilder { @@ -204,18 +190,28 @@ func NewAlertRule(options *AlertOptions) *alerting.RuleBuilder { options.QueryRefCondition = "A" } - rule := alerting.NewRuleBuilder(options.Name). + annotations := map[string]string{ + "summary": options.Summary, + "description": options.Description, + "runbook_url": options.RunbookURL, + } + + if options.PanelTitle != "" { + annotations["panel_title"] = options.PanelTitle + } + + rule := alerting.NewRuleBuilder(options.Title). For(options.For). NoDataState(options.NoDataState). ExecErrState(options.RuleExecErrState). Condition(options.QueryRefCondition). - Annotations(map[string]string{ - "summary": options.Summary, - "description": options.Description, - "runbook_url": options.RunbookURL, - }). + Annotations(annotations). Labels(options.Tags) + if options.RuleGroupTitle != "" { + rule.RuleGroup(options.RuleGroupTitle) + } + for _, query := range options.Query { rule.WithQuery(newRuleQuery(query)) } diff --git a/observability-lib/grafana/builder.go b/observability-lib/grafana/builder.go index 11fa1305e..426d5cd5c 100644 --- a/observability-lib/grafana/builder.go +++ b/observability-lib/grafana/builder.go @@ -12,6 +12,7 @@ import ( type Builder struct { dashboardBuilder *dashboard.DashboardBuilder alertsBuilder []*alerting.RuleBuilder + alertGroupsBuilder []*alerting.RuleGroupBuilder contactPointsBuilder []*alerting.ContactPointBuilder notificationPoliciesBuilder []*alerting.NotificationPolicyBuilder panelCounter uint32 @@ -29,16 +30,23 @@ type BuilderOptions struct { } func NewBuilder(options *BuilderOptions) *Builder { - if options.TimeZone == "" { - options.TimeZone = common.TimeZoneBrowser - } + builder := &Builder{} - builder := &Builder{ - dashboardBuilder: dashboard.NewDashboardBuilder(options.Name). - Tags(options.Tags). - Refresh(options.Refresh). - Time(options.TimeFrom, options.TimeTo). - Timezone(options.TimeZone), + if options.Name != "" { + builder.dashboardBuilder = dashboard.NewDashboardBuilder(options.Name) + if options.Tags != nil { + builder.dashboardBuilder.Tags(options.Tags) + } + if options.Refresh != "" { + builder.dashboardBuilder.Refresh(options.Refresh) + } + if options.TimeFrom != "" && options.TimeTo != "" { + builder.dashboardBuilder.Time(options.TimeFrom, options.TimeTo) + } + if options.TimeZone == "" { + options.TimeZone = common.TimeZoneBrowser + } + builder.dashboardBuilder.Timezone(options.TimeZone) } if options.AlertsTags != nil { @@ -59,8 +67,8 @@ func (b *Builder) AddRow(title string) { } func (b *Builder) getPanelCounter() uint32 { - res := b.panelCounter b.panelCounter = inc(&b.panelCounter) + res := b.panelCounter return res } @@ -82,13 +90,24 @@ func (b *Builder) AddPanel(panel ...*Panel) { } else if item.logPanelBuilder != nil { item.logPanelBuilder.Id(panelID) b.dashboardBuilder.WithPanel(item.logPanelBuilder) + } else if item.heatmapBuilder != nil { + item.heatmapBuilder.Id(panelID) + b.dashboardBuilder.WithPanel(item.heatmapBuilder) } - if item.alertBuilder != nil { - b.alertsBuilder = append(b.alertsBuilder, item.alertBuilder) + if item.alertBuilders != nil && len(item.alertBuilders) > 0 { + b.AddAlert(item.alertBuilders...) } } } +func (b *Builder) AddAlert(alerts ...*alerting.RuleBuilder) { + b.alertsBuilder = append(b.alertsBuilder, alerts...) +} + +func (b *Builder) AddAlertGroup(alertGroups ...*alerting.RuleGroupBuilder) { + b.alertGroupsBuilder = append(b.alertGroupsBuilder, alertGroups...) +} + func (b *Builder) AddContactPoint(contactPoints ...*alerting.ContactPointBuilder) { b.contactPointsBuilder = append(b.contactPointsBuilder, contactPoints...) } @@ -97,10 +116,15 @@ func (b *Builder) AddNotificationPolicy(notificationPolicies ...*alerting.Notifi b.notificationPoliciesBuilder = append(b.notificationPoliciesBuilder, notificationPolicies...) } -func (b *Builder) Build() (*Dashboard, error) { - db, errBuildDashboard := b.dashboardBuilder.Build() - if errBuildDashboard != nil { - return nil, errBuildDashboard +func (b *Builder) Build() (*Observability, error) { + observability := Observability{} + + if b.dashboardBuilder != nil { + db, errBuildDashboard := b.dashboardBuilder.Build() + if errBuildDashboard != nil { + return nil, errBuildDashboard + } + observability.Dashboard = &db } var alerts []alerting.Rule @@ -125,6 +149,17 @@ func (b *Builder) Build() (*Dashboard, error) { alerts = append(alerts, alert) } } + observability.Alerts = alerts + + var alertGroups []alerting.RuleGroup + for _, alertGroupBuilder := range b.alertGroupsBuilder { + alertGroup, errBuildAlertGroup := alertGroupBuilder.Build() + if errBuildAlertGroup != nil { + return nil, errBuildAlertGroup + } + alertGroups = append(alertGroups, alertGroup) + } + observability.AlertGroups = alertGroups var contactPoints []alerting.ContactPoint for _, contactPointBuilder := range b.contactPointsBuilder { @@ -134,6 +169,7 @@ func (b *Builder) Build() (*Dashboard, error) { } contactPoints = append(contactPoints, contactPoint) } + observability.ContactPoints = contactPoints var notificationPolicies []alerting.NotificationPolicy for _, notificationPolicyBuilder := range b.notificationPoliciesBuilder { @@ -143,11 +179,7 @@ func (b *Builder) Build() (*Dashboard, error) { } notificationPolicies = append(notificationPolicies, notificationPolicy) } + observability.NotificationPolicies = notificationPolicies - return &Dashboard{ - Dashboard: &db, - Alerts: alerts, - ContactPoints: contactPoints, - NotificationPolicies: notificationPolicies, - }, nil + return &observability, nil } diff --git a/observability-lib/grafana/builder_test.go b/observability-lib/grafana/builder_test.go index 003072176..2c4f56253 100644 --- a/observability-lib/grafana/builder_test.go +++ b/observability-lib/grafana/builder_test.go @@ -3,6 +3,7 @@ package grafana_test import ( "testing" + "github.com/grafana/grafana-foundation-sdk/go/alerting" "github.com/stretchr/testify/require" "github.com/grafana/grafana-foundation-sdk/go/dashboard" @@ -21,18 +22,121 @@ func TestNewBuilder(t *testing.T) { TimeZone: "UTC", }) - db, err := builder.Build() + o, err := builder.Build() if err != nil { - t.Errorf("Error building dashboard: %v", err) + t.Errorf("Error during build: %v", err) + } + + require.NotEmpty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds a dashboard with alerts", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + Tags: []string{"foo", "bar"}, + Refresh: "1m", + TimeFrom: "now-1h", + TimeTo: "now", + TimeZone: "UTC", + }) + builder.AddAlert(grafana.NewAlertRule(&grafana.AlertOptions{ + Title: "Alert Title", + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + require.NotEmpty(t, o.Dashboard) + require.NotEmpty(t, o.Alerts) + require.Len(t, o.Alerts, 1) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds only alerts", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddAlert(grafana.NewAlertRule(&grafana.AlertOptions{ + Title: "Alert Title", + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + require.Empty(t, o.Dashboard) + require.NotEmpty(t, o.Alerts) + require.Len(t, o.Alerts, 1) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds an alert group", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddAlertGroup(grafana.NewAlertGroup(&grafana.AlertGroupOptions{ + Title: "Group Title", + Interval: 30, // duration in seconds + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) } + require.Empty(t, o.Dashboard) + require.NotEmpty(t, o.AlertGroups) + require.Len(t, o.AlertGroups, 1) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + t.Run("NewBuilder builds a contact point", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddContactPoint(grafana.NewContactPoint(&grafana.ContactPointOptions{ + Name: "slack", + Type: "slack", + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + + require.Empty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.NotEmpty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds a notification policy", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddNotificationPolicy(grafana.NewNotificationPolicy(&grafana.NotificationPolicyOptions{ + Receiver: "slack", + GroupBy: []string{"grafana_folder", "alertname"}, + ObjectMatchers: []alerting.ObjectMatcher{ + {"team", "=", "chainlink"}, + }, + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + + require.Empty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.Empty(t, o.ContactPoints) + require.NotEmpty(t, o.NotificationPolicies) }) } func TestBuilder_AddVars(t *testing.T) { t.Run("AddVars adds variables to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) variable := grafana.NewQueryVariable(&grafana.QueryVariableOptions{ VariableOption: &grafana.VariableOption{ @@ -44,30 +148,34 @@ func TestBuilder_AddVars(t *testing.T) { }) builder.AddVars(variable) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.Len(t, o.Dashboard.Templating.List, 1) }) } func TestBuilder_AddRow(t *testing.T) { t.Run("AddRow adds a row to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) builder.AddRow("Row Title") - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.IsType(t, dashboard.RowPanel{}, *o.Dashboard.Panels[0].RowPanel) }) } func TestBuilder_AddPanel(t *testing.T) { t.Run("AddPanel adds a panel to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) panel := grafana.NewStatPanel(&grafana.StatPanelOptions{ PanelOptions: &grafana.PanelOptions{ @@ -76,10 +184,10 @@ func TestBuilder_AddPanel(t *testing.T) { }) builder.AddPanel(panel) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.IsType(t, dashboard.Panel{}, *o.Dashboard.Panels[0].Panel) }) } diff --git a/observability-lib/grafana/dashboard.go b/observability-lib/grafana/dashboard.go index 425660969..303193705 100644 --- a/observability-lib/grafana/dashboard.go +++ b/observability-lib/grafana/dashboard.go @@ -3,6 +3,7 @@ package grafana import ( "encoding/json" "fmt" + "reflect" "github.com/grafana/grafana-foundation-sdk/go/alerting" "github.com/grafana/grafana-foundation-sdk/go/dashboard" @@ -16,20 +17,21 @@ const ( TypePlatformDocker TypePlatform = "docker" ) -type Dashboard struct { +type Observability struct { Dashboard *dashboard.Dashboard Alerts []alerting.Rule + AlertGroups []alerting.RuleGroup ContactPoints []alerting.ContactPoint NotificationPolicies []alerting.NotificationPolicy } -func (db *Dashboard) GenerateJSON() ([]byte, error) { - dashboardJSON, err := json.MarshalIndent(db, "", " ") +func (o *Observability) GenerateJSON() ([]byte, error) { + output, err := json.MarshalIndent(o, "", " ") if err != nil { return nil, err } - return dashboardJSON, nil + return output, nil } type DeployOptions struct { @@ -40,52 +42,160 @@ type DeployOptions struct { NotificationTemplates string } -func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { +func alertRuleExist(alerts []alerting.Rule, alert alerting.Rule) bool { + for _, a := range alerts { + if reflect.DeepEqual(a.Title, alert.Title) { + return true + } + } + return false +} + +func getAlertRuleByTitle(alerts []alerting.Rule, title string) *alerting.Rule { + for _, a := range alerts { + if a.Title == title { + return &a + } + } + return nil +} + +func getAlertRules(grafanaClient *api.Client, dashboardUID *string, folderUID string, alertGroups []alerting.RuleGroup) ([]alerting.Rule, error) { + var alertsRule []alerting.Rule + var errGetAlertRules error + + if dashboardUID != nil { + alertsRule, errGetAlertRules = grafanaClient.GetAlertRulesByDashboardUID(*dashboardUID) + if errGetAlertRules != nil { + return nil, errGetAlertRules + } + } else { + if alertGroups != nil && len(alertGroups) > 0 { + for _, alertGroup := range alertGroups { + alertsRulePerGroup, errGetAlertRulesPerGroup := grafanaClient.GetAlertRulesByFolderUIDAndGroupName(folderUID, *alertGroup.Title) + if errGetAlertRulesPerGroup != nil { + return nil, errGetAlertRulesPerGroup + } + alertsRule = append(alertsRule, alertsRulePerGroup...) + } + } + } + + return alertsRule, nil +} + +func (o *Observability) DeployToGrafana(options *DeployOptions) error { grafanaClient := api.NewClient( options.GrafanaURL, options.GrafanaToken, ) - folder, errFolder := grafanaClient.FindOrCreateFolder(options.FolderName) - if errFolder != nil { - return errFolder + // Create or update folder + var folder *api.Folder + var errFolder error + if options.FolderName != "" { + folder, errFolder = grafanaClient.FindOrCreateFolder(options.FolderName) + if errFolder != nil { + return errFolder + } } - newDashboard, _, errPostDashboard := grafanaClient.PostDashboard(api.PostDashboardRequest{ - Dashboard: db.Dashboard, - Overwrite: true, - FolderID: int(folder.ID), - }) - if errPostDashboard != nil { - return errPostDashboard + // Create or update dashboard + var newDashboard api.PostDashboardResponse + var errPostDashboard error + if folder != nil && o.Dashboard != nil { + newDashboard, _, errPostDashboard = grafanaClient.PostDashboard(api.PostDashboardRequest{ + Dashboard: o.Dashboard, + Overwrite: true, + FolderID: int(folder.ID), + }) + if errPostDashboard != nil { + return errPostDashboard + } } - // Create alerts for the dashboard - if options.EnableAlerts && db.Alerts != nil && len(db.Alerts) > 0 { - // Get alert rules for the dashboard - alertsRule, errGetAlertRules := grafanaClient.GetAlertRulesByDashboardUID(*newDashboard.UID) + // If disabling alerts delete alerts for the folder and alert groups scope + if folder != nil && !options.EnableAlerts && o.Alerts != nil && len(o.Alerts) > 0 { + alertsRule, errGetAlertRules := getAlertRules(grafanaClient, newDashboard.UID, folder.UID, o.AlertGroups) if errGetAlertRules != nil { return errGetAlertRules } - // delete alert rules for the dashboard for _, rule := range alertsRule { _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) if errDeleteAlertRule != nil { return errDeleteAlertRule } } + } + + // Create or update alerts + if folder != nil && options.EnableAlerts && o.Alerts != nil && len(o.Alerts) > 0 { + alertsRule, errGetAlertRules := getAlertRules(grafanaClient, newDashboard.UID, folder.UID, o.AlertGroups) + if errGetAlertRules != nil { + return errGetAlertRules + } + + // delete alert rules that are not defined anymore in the code + for _, rule := range alertsRule { + if !alertRuleExist(o.Alerts, rule) { + _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) + if errDeleteAlertRule != nil { + return errDeleteAlertRule + } + } + } + + // Create alert rules + for _, alert := range o.Alerts { + if folder.UID != "" { + alert.FolderUID = folder.UID + } + if o.Dashboard != nil { + if alert.RuleGroup == "" { + alert.RuleGroup = *o.Dashboard.Title + } + if alert.Annotations["panel_title"] != "" { + panelId := panelIDByTitle(o.Dashboard, alert.Annotations["panel_title"]) + // we can clean it up as it was only used to get the panelId + delete(alert.Annotations, "panel_title") + if panelId != "" { + // Both or none should be set + alert.Annotations["__panelId__"] = panelId + alert.Annotations["__dashboardUid__"] = *newDashboard.UID + } + } + } else { + if alert.RuleGroup == "" { + return fmt.Errorf("you must create at one rule group and set it to your alerts") + } + } - // Create alert rules for the dashboard - for _, alert := range db.Alerts { - alert.RuleGroup = *db.Dashboard.Title - alert.FolderUID = folder.UID - alert.Annotations["__dashboardUid__"] = *newDashboard.UID - alert.Annotations["__panelId__"] = panelIDByTitle(db.Dashboard, alert.Title) + if alertRuleExist(alertsRule, alert) { + // update alert rule if it already exists + alertToUpdate := getAlertRuleByTitle(alertsRule, alert.Title) + if alertToUpdate != nil { + _, _, errPutAlertRule := grafanaClient.UpdateAlertRule(*alertToUpdate.Uid, alert) + if errPutAlertRule != nil { + return errPutAlertRule + } + } + } else { + // create alert rule if it doesn't exist + _, _, errPostAlertRule := grafanaClient.PostAlertRule(alert) + if errPostAlertRule != nil { + return errPostAlertRule + } + } + } + } - _, _, errPostAlertRule := grafanaClient.PostAlertRule(alert) - if errPostAlertRule != nil { - return errPostAlertRule + // Update alert groups + if folder != nil { + for _, alertGroup := range o.AlertGroups { + _, _, errPostAlertGroup := grafanaClient.UpdateAlertRuleGroup(folder.UID, alertGroup) + if errPostAlertGroup != nil { + return errPostAlertGroup } } } @@ -107,8 +217,8 @@ func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { } // Create contact points for the alerts - if db.ContactPoints != nil && len(db.ContactPoints) > 0 { - for _, contactPoint := range db.ContactPoints { + if o.ContactPoints != nil && len(o.ContactPoints) > 0 { + for _, contactPoint := range o.ContactPoints { errCreateOrUpdateContactPoint := grafanaClient.CreateOrUpdateContactPoint(contactPoint) if errCreateOrUpdateContactPoint != nil { return errCreateOrUpdateContactPoint @@ -117,8 +227,8 @@ func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { } // Create notification policies for the alerts - if db.NotificationPolicies != nil && len(db.NotificationPolicies) > 0 { - for _, notificationPolicy := range db.NotificationPolicies { + if o.NotificationPolicies != nil && len(o.NotificationPolicies) > 0 { + for _, notificationPolicy := range o.NotificationPolicies { errAddNestedPolicy := grafanaClient.AddNestedPolicy(notificationPolicy) if errAddNestedPolicy != nil { return errAddNestedPolicy @@ -156,6 +266,19 @@ func DeleteDashboard(options *DeleteOptions) error { return errGetDashboard } + alertsRule, errGetAlertRules := grafanaClient.GetAlertRulesByDashboardUID(*db.UID) + if errGetAlertRules != nil { + return errGetAlertRules + } + + // delete existing alert rules for the dashboard if alerts are disabled + for _, rule := range alertsRule { + _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) + if errDeleteAlertRule != nil { + return errDeleteAlertRule + } + } + _, errDelete := grafanaClient.DeleteDashboardByUID(*db.UID) if errDelete != nil { return errDelete diff --git a/observability-lib/grafana/dashboard_test.go b/observability-lib/grafana/dashboard_test.go index 4a46ed17a..ecac1d04a 100644 --- a/observability-lib/grafana/dashboard_test.go +++ b/observability-lib/grafana/dashboard_test.go @@ -3,7 +3,6 @@ package grafana_test import ( "testing" - "github.com/grafana/grafana-foundation-sdk/go/expr" "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" "github.com/stretchr/testify/require" ) @@ -32,7 +31,9 @@ func TestGenerateJSON(t *testing.T) { Legend: `{{account}}`, }, }, - AlertOptions: &grafana.AlertOptions{ + }, + AlertsOptions: []grafana.AlertOptions{ + { Summary: `ETH Balance is lower than threshold`, Description: `ETH Balance critically low at {{ index $values "A" }}`, RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", @@ -54,11 +55,9 @@ func TestGenerateJSON(t *testing.T) { RefID: "B", ThresholdExpression: &grafana.ThresholdExpression{ Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{2, 0}, - Type: expr.TypeThresholdTypeLt, - }, + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{2}, + Type: grafana.TypeThresholdTypeLt, }, }, }, @@ -67,12 +66,12 @@ func TestGenerateJSON(t *testing.T) { }, })) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - json, err := db.GenerateJSON() + json, err := o.GenerateJSON() require.IsType(t, json, []byte{}) }) } diff --git a/observability-lib/grafana/panels.go b/observability-lib/grafana/panels.go index 4466a1085..818b41cdb 100644 --- a/observability-lib/grafana/panels.go +++ b/observability-lib/grafana/panels.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana-foundation-sdk/go/common" "github.com/grafana/grafana-foundation-sdk/go/dashboard" "github.com/grafana/grafana-foundation-sdk/go/gauge" + "github.com/grafana/grafana-foundation-sdk/go/heatmap" "github.com/grafana/grafana-foundation-sdk/go/logs" "github.com/grafana/grafana-foundation-sdk/go/prometheus" "github.com/grafana/grafana-foundation-sdk/go/stat" @@ -36,6 +37,7 @@ func newQuery(query Query) *prometheus.DataqueryBuilder { type LegendOptions struct { Placement common.LegendPlacement DisplayMode common.LegendDisplayMode + Calcs []string } func newLegend(options *LegendOptions) *common.VizLegendOptionsBuilder { @@ -49,8 +51,14 @@ func newLegend(options *LegendOptions) *common.VizLegendOptionsBuilder { builder := common.NewVizLegendOptionsBuilder(). ShowLegend(true). - Placement(options.Placement). - DisplayMode(options.DisplayMode) + Placement(options.Placement) + + if len(options.Calcs) > 0 { + options.DisplayMode = common.LegendDisplayModeTable + builder.Calcs(options.Calcs) + } + + builder.DisplayMode(options.DisplayMode) return builder } @@ -80,21 +88,62 @@ func newTransform(options *TransformOptions) dashboard.DataTransformerConfig { } } +type ToolTipOptions struct { + Mode common.TooltipDisplayMode + Sort common.SortOrder + MaxWidth *float64 + MaxHeight *float64 +} + +func newToolTip(options *ToolTipOptions) *common.VizTooltipOptionsBuilder { + if options.Mode == "" { + options.Mode = common.TooltipDisplayModeSingle + } + + if options.Sort == "" { + options.Sort = common.SortOrderNone + } + + builder := common.NewVizTooltipOptionsBuilder(). + Mode(options.Mode). + Sort(options.Sort) + + if options.MaxWidth != nil { + builder.MaxWidth(*options.MaxWidth) + } + + if options.MaxHeight != nil { + builder.MaxHeight(*options.MaxHeight) + } + + return builder +} + type PanelOptions struct { - Datasource string - Title string - Description string - Span uint32 - Height uint32 - Decimals float64 - Unit string - NoValue string - Min *float64 - Max *float64 - Query []Query - Threshold *ThresholdOptions - Transform *TransformOptions - AlertOptions *AlertOptions + Datasource string + Title string + Description string + Span uint32 + Height uint32 + Decimals float64 + Unit string + NoValue string + Min *float64 + Max *float64 + Query []Query + Threshold *ThresholdOptions + Transform *TransformOptions + ColorScheme dashboard.FieldColorModeId +} + +type Panel struct { + statPanelBuilder *stat.PanelBuilder + timeSeriesPanelBuilder *timeseries.PanelBuilder + gaugePanelBuilder *gauge.PanelBuilder + tablePanelBuilder *table.PanelBuilder + logPanelBuilder *logs.PanelBuilder + heatmapBuilder *heatmap.PanelBuilder + alertBuilders []*alerting.RuleBuilder } // panel defaults @@ -125,15 +174,7 @@ type StatPanelOptions struct { GraphMode common.BigValueGraphMode TextMode common.BigValueTextMode Orientation common.VizOrientation -} - -type Panel struct { - statPanelBuilder *stat.PanelBuilder - timeSeriesPanelBuilder *timeseries.PanelBuilder - gaugePanelBuilder *gauge.PanelBuilder - tablePanelBuilder *table.PanelBuilder - logPanelBuilder *logs.PanelBuilder - alertBuilder *alerting.RuleBuilder + Mappings []dashboard.ValueMapping } func NewStatPanel(options *StatPanelOptions) *Panel { @@ -170,6 +211,7 @@ func NewStatPanel(options *StatPanelOptions) *Panel { TextMode(options.TextMode). Orientation(options.Orientation). JustifyMode(options.JustifyMode). + Mappings(options.Mappings). ReduceOptions(common.NewReduceDataOptionsBuilder().Calcs([]string{"last"})) if options.Min != nil { @@ -204,13 +246,8 @@ func NewStatPanel(options *StatPanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - statPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ @@ -220,18 +257,17 @@ func NewStatPanel(options *StatPanelOptions) *Panel { type TimeSeriesPanelOptions struct { *PanelOptions + AlertsOptions []AlertOptions FillOpacity float64 ScaleDistribution common.ScaleDistribution LegendOptions *LegendOptions + ToolTipOptions *ToolTipOptions + ThresholdStyle common.GraphThresholdsStyleMode } func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { setDefaults(options.PanelOptions) - if options.FillOpacity == 0 { - options.FillOpacity = 2 - } - if options.ScaleDistribution == "" { options.ScaleDistribution = common.ScaleDistributionLinear } @@ -240,6 +276,10 @@ func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { options.LegendOptions = &LegendOptions{} } + if options.ToolTipOptions == nil { + options.ToolTipOptions = &ToolTipOptions{} + } + newPanel := timeseries.NewPanelBuilder(). Datasource(datasourceRef(options.Datasource)). Title(options.Title). @@ -253,7 +293,8 @@ func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { Legend(newLegend(options.LegendOptions)). ScaleDistribution(common.NewScaleDistributionConfigBuilder(). Type(options.ScaleDistribution), - ) + ). + Tooltip(newToolTip(options.ToolTipOptions)) if options.Min != nil { newPanel.Min(*options.Min) @@ -269,23 +310,36 @@ func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { if options.Threshold != nil { newPanel.Thresholds(newThresholds(options.Threshold)) + + if options.ThresholdStyle != "" { + newPanel.ThresholdsStyle(common.NewGraphThresholdsStyleConfigBuilder().Mode(options.ThresholdStyle)) + } } if options.Transform != nil { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) + } - return &Panel{ - timeSeriesPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), + var alertBuilders []*alerting.RuleBuilder + if options.AlertsOptions != nil && len(options.AlertsOptions) > 0 { + for _, alert := range options.AlertsOptions { + // this is used as an internal mechanism to set the panel title in the alert to associate panelId with alert + alert.PanelTitle = options.Title + // if name is provided use it, otherwise use panel title + if alert.Title == "" { + alert.Title = options.Title + } + alertBuilders = append(alertBuilders, NewAlertRule(&alert)) } } return &Panel{ timeSeriesPanelBuilder: newPanel, + alertBuilders: alertBuilders, } } @@ -303,7 +357,11 @@ func NewGaugePanel(options *GaugePanelOptions) *Panel { Span(options.Span). Height(options.Height). Decimals(options.Decimals). - Unit(options.Unit) + Unit(options.Unit). + ReduceOptions( + common.NewReduceDataOptionsBuilder(). + Calcs([]string{"lastNotNull"}).Values(false), + ) if options.Min != nil { newPanel.Min(*options.Min) @@ -325,15 +383,6 @@ func NewGaugePanel(options *GaugePanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - gaugePanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } - } - return &Panel{ gaugePanelBuilder: newPanel, } @@ -376,13 +425,8 @@ func NewTablePanel(options *TablePanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - tablePanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ @@ -392,6 +436,7 @@ func NewTablePanel(options *TablePanelOptions) *Panel { type LogPanelOptions struct { *PanelOptions + PrettifyJSON bool } func NewLogPanel(options *LogPanelOptions) *Panel { @@ -403,7 +448,8 @@ func NewLogPanel(options *LogPanelOptions) *Panel { Description(options.Description). Span(options.Span). Height(options.Height). - NoValue(options.NoValue) + NoValue(options.NoValue). + PrettifyLogMessage(options.PrettifyJSON) if options.Min != nil { newPanel.Min(*options.Min) @@ -425,16 +471,58 @@ func NewLogPanel(options *LogPanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - logPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ logPanelBuilder: newPanel, } } + +type HeatmapPanelOptions struct { + *PanelOptions +} + +func NewHeatmapPanel(options *HeatmapPanelOptions) *Panel { + setDefaults(options.PanelOptions) + + newPanel := heatmap.NewPanelBuilder(). + Datasource(datasourceRef(options.Datasource)). + Title(options.Title). + Description(options.Description). + Span(options.Span). + Height(options.Height). + Decimals(options.Decimals). + Unit(options.Unit). + NoValue(options.NoValue) + + if options.Min != nil { + newPanel.Min(*options.Min) + } + + if options.Max != nil { + newPanel.Max(*options.Max) + } + + for _, q := range options.Query { + q.Format = prometheus.PromQueryFormatHeatmap + newPanel.WithTarget(newQuery(q)) + } + + if options.Threshold != nil { + newPanel.Thresholds(newThresholds(options.Threshold)) + } + + if options.Transform != nil { + newPanel.WithTransformation(newTransform(options.Transform)) + } + + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) + } + + return &Panel{ + heatmapBuilder: newPanel, + } +} diff --git a/observability-lib/grafana/variables.go b/observability-lib/grafana/variables.go index 5ad263368..445a071cb 100644 --- a/observability-lib/grafana/variables.go +++ b/observability-lib/grafana/variables.go @@ -1,38 +1,93 @@ package grafana import ( + "strings" + "github.com/grafana/grafana-foundation-sdk/go/cog" "github.com/grafana/grafana-foundation-sdk/go/dashboard" ) -type VariableOption struct { - Name string - Label string +type VariableOptionValues struct { } -type QueryVariableOptions struct { - *VariableOption - Datasource string - Query string - Multi bool - Regex string +type VariableOption struct { + Name string + Label string + Description string CurrentText string CurrentValue string - IncludeAll bool } -func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBuilder { - if options.CurrentText == "" { +type CustomVariableOptions struct { + *VariableOption + Values map[string]any +} + +func NewCustomVariable(options *CustomVariableOptions) *dashboard.CustomVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { options.CurrentText = "All" + options.CurrentValue = "$__all" + } + + variable := dashboard.NewCustomVariableBuilder(options.Name). + Label(options.Label). + Description(options.Description). + Current(dashboard.VariableOption{ + Selected: cog.ToPtr[bool](true), + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentText)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentValue)}, + }) + + optionsList := []dashboard.VariableOption{ + { + Selected: cog.ToPtr[bool](true), + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentText)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentValue)}, + }, + } + for key, value := range options.Values { + if key != options.CurrentText { + option := dashboard.VariableOption{ + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(key)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(value.(string))}, + } + optionsList = append(optionsList, option) + } + } + variable.Options(optionsList) + + valuesString := "" + for key, value := range options.Values { + // Escape commas and colons in the value which are reserved characters for values string + cleanValue := strings.ReplaceAll(value.(string), ",", "\\,") + cleanValue = strings.ReplaceAll(cleanValue, ":", "\\:") + valuesString += key + " : " + cleanValue + " , " } + variable.Values(dashboard.StringOrMap{String: cog.ToPtr(strings.TrimSuffix(valuesString, ", "))}) + + return variable +} + +type QueryVariableOptions struct { + *VariableOption + Datasource string + Query string + Multi bool + Regex string + IncludeAll bool + QueryWithType map[string]any + Hide *dashboard.VariableHide +} - if options.CurrentValue == "" { +func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { + options.CurrentText = "All" options.CurrentValue = "$__all" } variable := dashboard.NewQueryVariableBuilder(options.Name). Label(options.Label). - Query(dashboard.StringOrMap{String: cog.ToPtr[string](options.Query)}). + Description(options.Description). Datasource(datasourceRef(options.Datasource)). Current(dashboard.VariableOption{ Selected: cog.ToPtr[bool](true), @@ -42,6 +97,12 @@ func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBui Sort(dashboard.VariableSortAlphabeticalAsc). Multi(options.Multi) + if options.Query != "" { + variable.Query(dashboard.StringOrMap{String: cog.ToPtr[string](options.Query)}) + } else if options.QueryWithType != nil { + variable.Query(dashboard.StringOrMap{Map: options.QueryWithType}) + } + if options.Regex != "" { variable.Regex(options.Regex) } @@ -50,6 +111,10 @@ func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBui variable.IncludeAll(options.IncludeAll) } + if options.Hide != nil { + variable.Hide(*options.Hide) + } + return variable } @@ -59,12 +124,18 @@ type IntervalVariableOptions struct { } func NewIntervalVariable(options *IntervalVariableOptions) *dashboard.IntervalVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { + options.CurrentText = "All" + options.CurrentValue = "$__all" + } + return dashboard.NewIntervalVariableBuilder(options.Name). Label(options.Label). + Description(options.Description). Values(dashboard.StringOrMap{String: cog.ToPtr[string](options.Interval)}). Current(dashboard.VariableOption{ Selected: cog.ToPtr[bool](true), - Text: dashboard.StringOrArrayOfString{ArrayOfString: []string{"All"}}, - Value: dashboard.StringOrArrayOfString{ArrayOfString: []string{"$__all"}}, + Text: dashboard.StringOrArrayOfString{ArrayOfString: []string{options.CurrentText}}, + Value: dashboard.StringOrArrayOfString{ArrayOfString: []string{options.CurrentValue}}, }) } diff --git a/observability-lib/main.go b/observability-lib/main.go index 73a5d750d..0a1350eb7 100644 --- a/observability-lib/main.go +++ b/observability-lib/main.go @@ -1,26 +1,9 @@ package main import ( - "log" - - "github.com/spf13/cobra" - "github.com/smartcontractkit/chainlink-common/observability-lib/cmd" ) -var rootCmd = &cobra.Command{ - Use: "observability-lib", - Short: "observability-lib is a library for creating and deploying Grafana dashboards and alerts", -} - -func init() { - rootCmd.AddCommand(cmd.DeployCmd) - rootCmd.AddCommand(cmd.DeleteCmd) - rootCmd.AddCommand(cmd.GenerateCmd) -} - func main() { - if err := rootCmd.Execute(); err != nil { - log.Fatalln(err) - } + cmd.Execute() } diff --git a/pkg/beholder/auth.go b/pkg/beholder/auth.go new file mode 100644 index 000000000..ae944ab0f --- /dev/null +++ b/pkg/beholder/auth.go @@ -0,0 +1,27 @@ +package beholder + +import ( + "crypto/ed25519" + "fmt" +) + +// authHeaderKey is the name of the header that the node authenticator will use to send the auth token +var authHeaderKey = "X-Beholder-Node-Auth-Token" + +// authHeaderVersion is the version of the auth header format +var authHeaderVersion = "1" + +// BuildAuthHeaders creates the auth header value to be included on requests. +// The current format for the header is: +// +// :: +// +// where the byte value of is what's being signed +func BuildAuthHeaders(privKey ed25519.PrivateKey) map[string]string { + pubKey := privKey.Public().(ed25519.PublicKey) + messageBytes := pubKey + signature := ed25519.Sign(privKey, messageBytes) + headerValue := fmt.Sprintf("%s:%x:%x", authHeaderVersion, messageBytes, signature) + + return map[string]string{authHeaderKey: headerValue} +} diff --git a/pkg/beholder/auth_test.go b/pkg/beholder/auth_test.go new file mode 100644 index 000000000..fd0e2c86c --- /dev/null +++ b/pkg/beholder/auth_test.go @@ -0,0 +1,22 @@ +package beholder + +import ( + "crypto/ed25519" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBuildAuthHeaders(t *testing.T) { + csaPrivKeyHex := "1ac84741fa51c633845fa65c06f37a700303619135630a01f2d22fb98eb1c54ecab39509e63cfaa81c70e2c907391f96803aacb00db5619a5ace5588b4b08159" + csaPrivKeyBytes, err := hex.DecodeString(csaPrivKeyHex) + assert.NoError(t, err) + csaPrivKey := ed25519.PrivateKey(csaPrivKeyBytes) + + expectedHeaders := map[string]string{ + "X-Beholder-Node-Auth-Token": "1:cab39509e63cfaa81c70e2c907391f96803aacb00db5619a5ace5588b4b08159:4403178e299e9acc5b48ae97de617d3975c5d431b794cfab1d23eda01c194119b2360f5f74cfb3e4f706237ab57a0ba88ffd3f8addbc1e5197b3d3e13a1fc409", + } + + assert.Equal(t, expectedHeaders, BuildAuthHeaders(csaPrivKey)) +} diff --git a/pkg/beholder/client.go b/pkg/beholder/client.go index 63a91922e..69e15c284 100644 --- a/pkg/beholder/client.go +++ b/pkg/beholder/client.go @@ -6,6 +6,7 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" otellog "go.opentelemetry.io/otel/log" @@ -13,7 +14,6 @@ import ( sdklog "go.opentelemetry.io/otel/sdk/log" sdkmetric "go.opentelemetry.io/otel/sdk/metric" sdkresource "go.opentelemetry.io/otel/sdk/resource" - "go.opentelemetry.io/otel/sdk/trace" sdktrace "go.opentelemetry.io/otel/sdk/trace" oteltrace "go.opentelemetry.io/otel/trace" "google.golang.org/grpc/credentials" @@ -53,43 +53,80 @@ type Client struct { // NewClient creates a new Client with initialized OpenTelemetry components // To handle OpenTelemetry errors use [otel.SetErrorHandler](https://pkg.go.dev/go.opentelemetry.io/otel#SetErrorHandler) func NewClient(cfg Config) (*Client, error) { + if cfg.OtelExporterGRPCEndpoint != "" && cfg.OtelExporterHTTPEndpoint != "" { + return nil, errors.New("only one exporter endpoint should be set") + } + if cfg.OtelExporterGRPCEndpoint == "" && cfg.OtelExporterHTTPEndpoint == "" { + return nil, errors.New("at least one exporter endpoint should be set") + } + if cfg.OtelExporterHTTPEndpoint != "" { + factory := func(options ...otlploghttp.Option) (sdklog.Exporter, error) { + // note: context is unused internally + return otlploghttp.New(context.Background(), options...) //nolint + } + return newHTTPClient(cfg, factory) + } + factory := func(options ...otlploggrpc.Option) (sdklog.Exporter, error) { // note: context is unused internally return otlploggrpc.New(context.Background(), options...) //nolint } - return newClient(cfg, factory) + return newGRPCClient(cfg, factory) } // Used for testing to override the default exporter type otlploggrpcFactory func(options ...otlploggrpc.Option) (sdklog.Exporter, error) -func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { +func newGRPCClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, err := newOtelResource(cfg) - noop := NewNoopClient() if err != nil { - return noop, err + return nil, err } creds := insecure.NewCredentials() if !cfg.InsecureConnection && cfg.CACertFile != "" { creds, err = credentials.NewClientTLSFromFile(cfg.CACertFile, "") if err != nil { - return noop, err + return nil, err } } - sharedLogExporter, err := otlploggrpcNew( + opts := []otlploggrpc.Option{ otlploggrpc.WithTLSCredentials(creds), otlploggrpc.WithEndpoint(cfg.OtelExporterGRPCEndpoint), - ) + otlploggrpc.WithHeaders(cfg.AuthHeaders), + } + if cfg.LogRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlploggrpc.WithRetry(otlploggrpc.RetryConfig{ + Enabled: cfg.LogRetryConfig.Enabled(), + InitialInterval: cfg.LogRetryConfig.GetInitialInterval(), + MaxInterval: cfg.LogRetryConfig.GetMaxInterval(), + MaxElapsedTime: cfg.LogRetryConfig.GetMaxElapsedTime(), + })) + } + sharedLogExporter, err := otlploggrpcNew(opts...) if err != nil { - return noop, err + return nil, err } // Logger var loggerProcessor sdklog.Processor if cfg.LogBatchProcessor { + batchProcessorOpts := []sdklog.BatchProcessorOption{} + if cfg.LogExportTimeout > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportTimeout(cfg.LogExportTimeout)) // Default is 30s + } + if cfg.LogExportMaxBatchSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportMaxBatchSize(cfg.LogExportMaxBatchSize)) // Default is 512, must be <= maxQueueSize + } + if cfg.LogExportInterval > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportInterval(cfg.LogExportInterval)) // Default is 1s + } + if cfg.LogMaxQueueSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithMaxQueueSize(cfg.LogMaxQueueSize)) // Default is 2048 + } loggerProcessor = sdklog.NewBatchProcessor( sharedLogExporter, - sdklog.WithExportTimeout(cfg.LogExportTimeout), // Default is 30s + batchProcessorOpts..., ) } else { loggerProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) @@ -102,7 +139,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } loggerProvider := sdklog.NewLoggerProvider( sdklog.WithResource(loggerResource), @@ -113,23 +150,36 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { // Tracer tracerProvider, err := newTracerProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } tracer := tracerProvider.Tracer(defaultPackageName) // Meter meterProvider, err := newMeterProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } meter := meterProvider.Meter(defaultPackageName) // Message Emitter var messageLogProcessor sdklog.Processor if cfg.EmitterBatchProcessor { + batchProcessorOpts := []sdklog.BatchProcessorOption{} + if cfg.EmitterExportTimeout > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportTimeout(cfg.EmitterExportTimeout)) // Default is 30s + } + if cfg.EmitterExportMaxBatchSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportMaxBatchSize(cfg.EmitterExportMaxBatchSize)) // Default is 512, must be <= maxQueueSize + } + if cfg.EmitterExportInterval > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportInterval(cfg.EmitterExportInterval)) // Default is 1s + } + if cfg.EmitterMaxQueueSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithMaxQueueSize(cfg.EmitterMaxQueueSize)) // Default is 2048 + } messageLogProcessor = sdklog.NewBatchProcessor( sharedLogExporter, - sdklog.WithExportTimeout(cfg.EmitterExportTimeout), // Default is 30s + batchProcessorOpts..., ) } else { messageLogProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) @@ -143,7 +193,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } messageLoggerProvider := sdklog.NewLoggerProvider( @@ -174,7 +224,15 @@ func (c Client) Close() (err error) { } // Returns a new Client with the same configuration but with a different package name +// Deprecated: Use ForName func (c Client) ForPackage(name string) Client { + return c.ForName(name) +} + +// ForName returns a new Client with the same configuration but with a different name. +// For global package-scoped telemetry, use the package name. +// For injected component-scoped telemetry, use a fully qualified name that uniquely identifies this instance. +func (c Client) ForName(name string) Client { // Logger logger := c.LoggerProvider.Logger(name) // Tracer @@ -210,6 +268,21 @@ func newOtelResource(cfg Config) (resource *sdkresource.Resource, err error) { if err != nil { return nil, err } + + // Add csa public key resource attribute + csaPublicKeyHex := "not-configured" + if len(cfg.AuthPublicKeyHex) > 0 { + csaPublicKeyHex = cfg.AuthPublicKeyHex + } + csaPublicKeyAttr := attribute.String("csa_public_key", csaPublicKeyHex) + resource, err = sdkresource.Merge( + sdkresource.NewSchemaless(csaPublicKeyAttr), + resource, + ) + if err != nil { + return nil, err + } + // Add custom resource attributes resource, err = sdkresource.Merge( sdkresource.NewSchemaless(cfg.ResourceAttributes...), @@ -247,16 +320,31 @@ type shutdowner interface { func newTracerProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdktrace.TracerProvider, error) { ctx := context.Background() - exporter, err := otlptracegrpc.New(ctx, + exporterOpts := []otlptracegrpc.Option{ otlptracegrpc.WithTLSCredentials(creds), otlptracegrpc.WithEndpoint(config.OtelExporterGRPCEndpoint), - ) + otlptracegrpc.WithHeaders(config.AuthHeaders), + } + if config.TraceRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + exporterOpts = append(exporterOpts, otlptracegrpc.WithRetry(otlptracegrpc.RetryConfig{ + Enabled: config.TraceRetryConfig.Enabled(), + InitialInterval: config.TraceRetryConfig.GetInitialInterval(), + MaxInterval: config.TraceRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.TraceRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is used internally + exporter, err := otlptracegrpc.New(ctx, exporterOpts...) if err != nil { return nil, err } - + batcherOpts := []sdktrace.BatchSpanProcessorOption{} + if config.TraceBatchTimeout > 0 { + batcherOpts = append(batcherOpts, sdktrace.WithBatchTimeout(config.TraceBatchTimeout)) // Default is 5s + } opts := []sdktrace.TracerProviderOption{ - sdktrace.WithBatcher(exporter, trace.WithBatchTimeout(config.TraceBatchTimeout)), // Default is 5s + sdktrace.WithBatcher(exporter, batcherOpts...), sdktrace.WithResource(resource), sdktrace.WithSampler( sdktrace.ParentBased( @@ -272,16 +360,25 @@ func newTracerProvider(config Config, resource *sdkresource.Resource, creds cred func newMeterProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdkmetric.MeterProvider, error) { ctx := context.Background() - - exporter, err := otlpmetricgrpc.New( - ctx, + opts := []otlpmetricgrpc.Option{ otlpmetricgrpc.WithTLSCredentials(creds), otlpmetricgrpc.WithEndpoint(config.OtelExporterGRPCEndpoint), - ) + otlpmetricgrpc.WithHeaders(config.AuthHeaders), + } + if config.MetricRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlpmetricgrpc.WithRetry(otlpmetricgrpc.RetryConfig{ + Enabled: config.MetricRetryConfig.Enabled(), + InitialInterval: config.MetricRetryConfig.GetInitialInterval(), + MaxInterval: config.MetricRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.MetricRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlpmetricgrpc.New(ctx, opts...) if err != nil { return nil, err } - mp := sdkmetric.NewMeterProvider( sdkmetric.WithReader( sdkmetric.NewPeriodicReader( @@ -289,6 +386,7 @@ func newMeterProvider(config Config, resource *sdkresource.Resource, creds crede sdkmetric.WithInterval(config.MetricReaderInterval), // Default is 10s )), sdkmetric.WithResource(resource), + sdkmetric.WithView(config.MetricViews...), ) return mp, nil } diff --git a/pkg/beholder/client_test.go b/pkg/beholder/client_test.go index aad3f475f..c76e9dfdd 100644 --- a/pkg/beholder/client_test.go +++ b/pkg/beholder/client_test.go @@ -2,14 +2,16 @@ package beholder import ( "context" - "fmt" + "errors" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" otellog "go.opentelemetry.io/otel/log" sdklog "go.opentelemetry.io/otel/sdk/log" @@ -49,11 +51,37 @@ func TestClient(t *testing.T) { "byte_key_1": []byte("byte_val_1"), "str_slice_key_1": []string{"str_val_1", "str_val_2"}, "nil_key_1": nil, + "beholder_domain": "TestDomain", // Required field + "beholder_entity": "TestEntity", // Required field "beholder_data_schema": "/schemas/ids/1001", // Required field, URI } } defaultMessageBody := []byte("body bytes") + mustNewGRPCClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newGRPCClient(TestDefaultConfig(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + + mustNewHTTPClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploghttp.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newHTTPClient(TestDefaultConfigHTTPClient(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + testCases := []struct { name string makeCustomAttributes func() map[string]any @@ -62,9 +90,24 @@ func TestClient(t *testing.T) { exporterMockErrorCount int exporterOutputExpected bool messageGenerator func(client *Client, messageBody []byte, customAttributes map[string]any) + mustNewGrpcClient func(*testing.T, *mocks.OTLPExporter) *Client }{ { - name: "Test Emit", + name: "Test Emit (GRPC Client)", + makeCustomAttributes: defaultCustomAttributes, + messageBody: defaultMessageBody, + messageCount: 10, + exporterMockErrorCount: 0, + exporterOutputExpected: true, + messageGenerator: func(client *Client, messageBody []byte, customAttributes map[string]any) { + err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) + assert.NoError(t, err) + }, + mustNewGrpcClient: mustNewGRPCClient, + }, + + { + name: "Test Emit (HTTP Client)", makeCustomAttributes: defaultCustomAttributes, messageBody: defaultMessageBody, messageCount: 10, @@ -74,6 +117,7 @@ func TestClient(t *testing.T) { err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) assert.NoError(t, err) }, + mustNewGrpcClient: mustNewHTTPClient, }, } @@ -82,29 +126,23 @@ func TestClient(t *testing.T) { exporterMock := mocks.NewOTLPExporter(t) defer exporterMock.AssertExpectations(t) - // Override exporter factory which is used by Client - exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { - return exporterMock, nil - } - client, err := newClient(TestDefaultConfig(), exporterFactory) - if err != nil { - t.Fatalf("Error creating beholder client: %v", err) - } + client := tc.mustNewGrpcClient(t, exporterMock) + otel.SetErrorHandler(otelMustNotErr(t)) // Number of exported messages exportedMessageCount := 0 // Simulate exporter error if configured if tc.exporterMockErrorCount > 0 { - exporterMock.On("Export", mock.Anything, mock.Anything).Return(fmt.Errorf("an error occurred")).Times(tc.exporterMockErrorCount) + exporterMock.On("Export", mock.Anything, mock.Anything).Return(errors.New("an error occurred")).Times(tc.exporterMockErrorCount) } customAttributes := tc.makeCustomAttributes() if tc.exporterOutputExpected { exporterMock.On("Export", mock.Anything, mock.Anything).Return(nil).Times(tc.messageCount). Run(func(args mock.Arguments) { - assert.IsType(t, args.Get(1), []sdklog.Record{}, "Record type mismatch") + assert.IsType(t, []sdklog.Record{}, args.Get(1), "Record type mismatch") records := args.Get(1).([]sdklog.Record) - assert.Equal(t, 1, len(records), "batching is disabled, expecte 1 record") + assert.Len(t, records, 1, "batching is disabled, expecte 1 record") record := records[0] assert.Equal(t, tc.messageBody, record.Body().AsBytes(), "Record body mismatch") actualAttributeKeys := map[string]struct{}{} @@ -117,7 +155,7 @@ func TestClient(t *testing.T) { } expectedKv := OtelAttr(key, expectedValue) equal := kv.Value.Equal(expectedKv.Value) - assert.True(t, equal, fmt.Sprintf("Record attributes mismatch for key %v", key)) + assert.True(t, equal, "Record attributes mismatch for key %v", key) return true }) for key := range customAttributes { @@ -138,7 +176,7 @@ func TestClient(t *testing.T) { func TestEmitterMessageValidation(t *testing.T) { getEmitter := func(exporterMock *mocks.OTLPExporter) Emitter { - client, err := newClient( + client, err := newGRPCClient( TestDefaultConfig(), // Override exporter factory which is used by Client func(...otlploggrpc.Option) (sdklog.Exporter, error) { @@ -167,15 +205,69 @@ func TestEmitterMessageValidation(t *testing.T) { { name: "Invalid URI", attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", "beholder_data_schema": "example-schema", }, exporterCalledTimes: 0, expectedError: "'Metadata.BeholderDataSchema' Error:Field validation for 'BeholderDataSchema' failed on the 'uri' tag", }, { - name: "Valid URI", + name: "Invalid Beholder domain (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "Test__Domain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder domain (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "TestDomain*$", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "Test__Entity", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity*$", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Valid Attributes", + exporterCalledTimes: 1, + attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", + "beholder_data_schema": "/example-schema/versions/1", + }, + expectedError: "", + }, + { + name: "Valid Attributes (special characters)", exporterCalledTimes: 1, attrs: Attributes{ + "beholder_domain": "Test.Domain_42-1", + "beholder_entity": "Test.Entity_42-1", "beholder_data_schema": "/example-schema/versions/1", }, expectedError: "", @@ -194,9 +286,9 @@ func TestEmitterMessageValidation(t *testing.T) { err := emitter.Emit(tests.Context(t), message.Body, tc.attrs) // Assert expectations if tc.expectedError != "" { - assert.ErrorContains(t, err, tc.expectedError) + require.ErrorContains(t, err, tc.expectedError) } else { - assert.NoError(t, err) + require.NoError(t, err) } if tc.exporterCalledTimes > 0 { exporterMock.AssertExpectations(t) @@ -213,10 +305,10 @@ func TestClient_Close(t *testing.T) { defer exporterMock.AssertExpectations(t) client, err := NewStdoutClient() - assert.NoError(t, err) + require.NoError(t, err) err = client.Close() - assert.NoError(t, err) + require.NoError(t, err) exporterMock.AssertExpectations(t) } @@ -226,7 +318,7 @@ func TestClient_ForPackage(t *testing.T) { defer exporterMock.AssertExpectations(t) var b strings.Builder client, err := NewWriterClient(&b) - assert.NoError(t, err) + require.NoError(t, err) clientForTest := client.ForPackage("TestClient_ForPackage") // Log @@ -252,3 +344,40 @@ func TestClient_ForPackage(t *testing.T) { func otelMustNotErr(t *testing.T) otel.ErrorHandlerFunc { return func(err error) { t.Fatalf("otel error: %v", err) } } + +func TestNewClient(t *testing.T) { + t.Run("both endpoints set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + OtelExporterHTTPEndpoint: "http-endpoint", + }) + require.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "only one exporter endpoint should be set", err.Error()) + }) + + t.Run("no endpoints set", func(t *testing.T) { + client, err := NewClient(Config{}) + require.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "at least one exporter endpoint should be set", err.Error()) + }) + + t.Run("GRPC endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + }) + require.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) + + t.Run("HTTP endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterHTTPEndpoint: "http-endpoint", + }) + require.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) +} diff --git a/pkg/beholder/config.go b/pkg/beholder/config.go index b80021b44..c01256b1c 100644 --- a/pkg/beholder/config.go +++ b/pkg/beholder/config.go @@ -4,6 +4,7 @@ import ( "time" otelattr "go.opentelemetry.io/otel/attribute" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" sdktrace "go.opentelemetry.io/otel/sdk/trace" ) @@ -11,25 +12,62 @@ type Config struct { InsecureConnection bool CACertFile string OtelExporterGRPCEndpoint string + OtelExporterHTTPEndpoint string // OTel Resource ResourceAttributes []otelattr.KeyValue // Message Emitter - EmitterExportTimeout time.Duration - // Batch processing is enabled by default - // Disable it only for testing - EmitterBatchProcessor bool + EmitterExportTimeout time.Duration + EmitterExportInterval time.Duration + EmitterExportMaxBatchSize int + EmitterMaxQueueSize int + EmitterBatchProcessor bool // Enabled by default. Disable only for testing. + // OTel Trace TraceSampleRatio float64 TraceBatchTimeout time.Duration TraceSpanExporter sdktrace.SpanExporter // optional additional exporter + TraceRetryConfig *RetryConfig + // OTel Metric MetricReaderInterval time.Duration + MetricRetryConfig *RetryConfig + MetricViews []sdkmetric.View + // OTel Log - LogExportTimeout time.Duration - // Batch processing is enabled by default - // Disable it only for testing - LogBatchProcessor bool + LogExportTimeout time.Duration + LogExportInterval time.Duration + LogExportMaxBatchSize int + LogMaxQueueSize int + LogBatchProcessor bool // Enabled by default. Disable only for testing. + // Retry config for shared log exporter, used by Emitter and Logger + LogRetryConfig *RetryConfig + + // Auth + AuthPublicKeyHex string + AuthHeaders map[string]string +} + +type RetryConfig struct { + // InitialInterval the time to wait after the first failure before + // retrying. + InitialInterval time.Duration + // MaxInterval is the upper bound on backoff interval. Once this value is + // reached the delay between consecutive retries will always be + // `MaxInterval`. + MaxInterval time.Duration + // MaxElapsedTime is the maximum amount of time (including retries) spent + // trying to send a request/batch. Once this value is reached, the data + // is discarded. + // Set to zero to disable retry + MaxElapsedTime time.Duration +} + +// Same defaults as used by the OTel SDK +var defaultRetryConfig = RetryConfig{ + InitialInterval: 5 * time.Second, + MaxInterval: 30 * time.Second, + MaxElapsedTime: 1 * time.Minute, // Retry is enabled } const ( @@ -48,16 +86,28 @@ func DefaultConfig() Config { // Resource ResourceAttributes: defaultOtelAttributes, // Message Emitter - EmitterExportTimeout: 1 * time.Second, - EmitterBatchProcessor: true, + EmitterExportTimeout: 30 * time.Second, + EmitterExportMaxBatchSize: 512, + EmitterExportInterval: 1 * time.Second, + EmitterMaxQueueSize: 2048, + EmitterBatchProcessor: true, + // OTel message log exporter retry config + LogRetryConfig: defaultRetryConfig.Copy(), // Trace TraceSampleRatio: 1, TraceBatchTimeout: 1 * time.Second, + // OTel trace exporter retry config + TraceRetryConfig: defaultRetryConfig.Copy(), // Metric MetricReaderInterval: 1 * time.Second, + // OTel metric exporter retry config + MetricRetryConfig: defaultRetryConfig.Copy(), // Log - LogExportTimeout: 1 * time.Second, - LogBatchProcessor: true, + LogExportTimeout: 30 * time.Second, + LogExportMaxBatchSize: 512, + LogExportInterval: 1 * time.Second, + LogMaxQueueSize: 2048, + LogBatchProcessor: true, } } @@ -66,5 +116,54 @@ func TestDefaultConfig() Config { // Should be only disabled for testing config.EmitterBatchProcessor = false config.LogBatchProcessor = false + // Retries are disabled for testing + config.LogRetryConfig.MaxElapsedTime = 0 // Retry is disabled + config.TraceRetryConfig.MaxElapsedTime = 0 // Retry is disabled + config.MetricRetryConfig.MaxElapsedTime = 0 // Retry is disabled + return config +} + +func TestDefaultConfigHTTPClient() Config { + config := DefaultConfig() + // Should be only disabled for testing + config.EmitterBatchProcessor = false + config.LogBatchProcessor = false + config.OtelExporterGRPCEndpoint = "" + config.OtelExporterHTTPEndpoint = "localhost:4318" return config } + +func (c *RetryConfig) Copy() *RetryConfig { + newConfig := *c + return &newConfig +} + +// Calculate if retry is enabled +func (c *RetryConfig) Enabled() bool { + if c == nil { + return false + } + return c.InitialInterval > 0 && c.MaxInterval > 0 && c.MaxElapsedTime > 0 +} + +// Implement getters for fields to avoid nil pointer dereference in case the config is not set +func (c *RetryConfig) GetInitialInterval() time.Duration { + if c == nil { + return 0 + } + return c.InitialInterval +} + +func (c *RetryConfig) GetMaxInterval() time.Duration { + if c == nil { + return 0 + } + return c.MaxInterval +} + +func (c *RetryConfig) GetMaxElapsedTime() time.Duration { + if c == nil { + return 0 + } + return c.MaxElapsedTime +} diff --git a/pkg/beholder/config_test.go b/pkg/beholder/config_test.go index 5bc81c7e6..3b4fa86f1 100644 --- a/pkg/beholder/config_test.go +++ b/pkg/beholder/config_test.go @@ -18,24 +18,44 @@ func ExampleConfig() { InsecureConnection: true, CACertFile: "", OtelExporterGRPCEndpoint: "localhost:4317", + OtelExporterHTTPEndpoint: "localhost:4318", // Resource ResourceAttributes: []otelattr.KeyValue{ otelattr.String("package_name", packageName), otelattr.String("sender", "beholderclient"), }, // Message Emitter - EmitterExportTimeout: 1 * time.Second, - EmitterBatchProcessor: true, + EmitterExportTimeout: 1 * time.Second, + EmitterExportMaxBatchSize: 512, + EmitterExportInterval: 1 * time.Second, + EmitterMaxQueueSize: 2048, + EmitterBatchProcessor: true, + // OTel message log exporter retry config + LogRetryConfig: nil, // Trace TraceSampleRatio: 1, TraceBatchTimeout: 1 * time.Second, + // OTel trace exporter retry config + TraceRetryConfig: nil, // Metric MetricReaderInterval: 1 * time.Second, + // OTel metric exporter retry config + MetricRetryConfig: nil, // Log - LogExportTimeout: 1 * time.Second, - LogBatchProcessor: true, + LogExportTimeout: 1 * time.Second, + LogExportMaxBatchSize: 512, + LogExportInterval: 1 * time.Second, + LogMaxQueueSize: 2048, + LogBatchProcessor: true, } - fmt.Printf("%+v", config) + fmt.Printf("%+v\n", config) + config.LogRetryConfig = &beholder.RetryConfig{ + InitialInterval: 5 * time.Second, + MaxInterval: 30 * time.Second, + MaxElapsedTime: 1 * time.Minute, // Set to zero to disable retry + } + fmt.Printf("%+v\n", *config.LogRetryConfig) // Output: - // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: MetricReaderInterval:1s LogExportTimeout:1s LogBatchProcessor:true} + // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 OtelExporterHTTPEndpoint:localhost:4318 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterExportInterval:1s EmitterExportMaxBatchSize:512 EmitterMaxQueueSize:2048 EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: TraceRetryConfig: MetricReaderInterval:1s MetricRetryConfig: MetricViews:[] LogExportTimeout:1s LogExportInterval:1s LogExportMaxBatchSize:512 LogMaxQueueSize:2048 LogBatchProcessor:true LogRetryConfig: AuthPublicKeyHex: AuthHeaders:map[]} + // {InitialInterval:5s MaxInterval:30s MaxElapsedTime:1m0s} } diff --git a/pkg/beholder/example_test.go b/pkg/beholder/example_test.go index 04c895104..2045c3bb8 100644 --- a/pkg/beholder/example_test.go +++ b/pkg/beholder/example_test.go @@ -45,6 +45,8 @@ func ExampleNewClient() { for range 10 { err := beholder.GetEmitter().Emit(context.Background(), payloadBytes, "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required "beholder_data_type", "custom_message", "foo", "bar", ) @@ -105,6 +107,8 @@ func ExampleNewNoopClient() { err := beholder.GetEmitter().Emit(context.Background(), []byte("test message"), "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required ) if err != nil { log.Printf("Error emitting message: %v", err) diff --git a/pkg/beholder/global_test.go b/pkg/beholder/global_test.go index 1dbc9e373..2bb5c51f8 100644 --- a/pkg/beholder/global_test.go +++ b/pkg/beholder/global_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel" otelattribute "go.opentelemetry.io/otel/attribute" @@ -76,7 +77,7 @@ func TestClient_SetGlobalOtelProviders(t *testing.T) { var b strings.Builder client, err := beholder.NewWriterClient(&b) - assert.NoError(t, err) + require.NoError(t, err) // Set global Otel Client beholder.SetClient(client) diff --git a/pkg/beholder/httpclient.go b/pkg/beholder/httpclient.go new file mode 100644 index 000000000..7427af5d8 --- /dev/null +++ b/pkg/beholder/httpclient.go @@ -0,0 +1,266 @@ +package beholder + +import ( + "context" + "crypto/tls" + "crypto/x509" + "errors" + "os" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + sdklog "go.opentelemetry.io/otel/sdk/log" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + sdkresource "go.opentelemetry.io/otel/sdk/resource" + sdktrace "go.opentelemetry.io/otel/sdk/trace" +) + +// Used for testing to override the default exporter +type otlploghttpFactory func(options ...otlploghttp.Option) (sdklog.Exporter, error) + +func newCertFromFile(certFile string) (*x509.CertPool, error) { + b, err := os.ReadFile(certFile) + if err != nil { + return nil, err + } + cp := x509.NewCertPool() + if !cp.AppendCertsFromPEM(b) { + return nil, errors.New("credentials: failed to append certificates") + } + return cp, nil +} + +func newHTTPClient(cfg Config, otlploghttpNew otlploghttpFactory) (*Client, error) { + baseResource, err := newOtelResource(cfg) + if err != nil { + return nil, err + } + var tlsConfig *tls.Config + if !cfg.InsecureConnection { + tlsConfig = &tls.Config{ + MinVersion: tls.VersionTLS12, + } + if cfg.CACertFile != "" { + rootCAs, e := newCertFromFile(cfg.CACertFile) + if e != nil { + return nil, e + } + tlsConfig.RootCAs = rootCAs + } + } + tlsConfigOption := otlploghttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlploghttp.WithTLSClientConfig(tlsConfig) + } + opts := []otlploghttp.Option{ + tlsConfigOption, + otlploghttp.WithEndpoint(cfg.OtelExporterHTTPEndpoint), + otlploghttp.WithHeaders(cfg.AuthHeaders), + } + if cfg.LogRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlploghttp.WithRetry(otlploghttp.RetryConfig{ + Enabled: cfg.LogRetryConfig.Enabled(), + InitialInterval: cfg.LogRetryConfig.GetInitialInterval(), + MaxInterval: cfg.LogRetryConfig.GetMaxInterval(), + MaxElapsedTime: cfg.LogRetryConfig.GetMaxElapsedTime(), + })) + } + sharedLogExporter, err := otlploghttpNew(opts...) + if err != nil { + return nil, err + } + + // Logger + var loggerProcessor sdklog.Processor + if cfg.LogBatchProcessor { + batchProcessorOpts := []sdklog.BatchProcessorOption{} + if cfg.LogExportTimeout > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportTimeout(cfg.LogExportTimeout)) // Default is 30s + } + if cfg.LogExportMaxBatchSize > 0 { + + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportMaxBatchSize(cfg.LogExportMaxBatchSize)) // Default is 512, must be <= maxQueueSize + } + if cfg.LogExportInterval > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportInterval(cfg.LogExportInterval)) // Default is 1s + } + if cfg.LogMaxQueueSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithMaxQueueSize(cfg.LogMaxQueueSize)) // Default is 2048 + } + loggerProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + batchProcessorOpts..., + ) + } else { + loggerProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + loggerAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "zap_log_message"), + } + loggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(loggerAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + loggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(loggerResource), + sdklog.WithProcessor(loggerProcessor), + ) + logger := loggerProvider.Logger(defaultPackageName) + + // Tracer + tracerProvider, err := newHTTPTracerProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + tracer := tracerProvider.Tracer(defaultPackageName) + + // Meter + meterProvider, err := newHTTPMeterProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + meter := meterProvider.Meter(defaultPackageName) + + // Message Emitter + var messageLogProcessor sdklog.Processor + if cfg.EmitterBatchProcessor { + batchProcessorOpts := []sdklog.BatchProcessorOption{} + if cfg.EmitterExportTimeout > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportTimeout(cfg.EmitterExportTimeout)) // Default is 30s + } + if cfg.EmitterExportMaxBatchSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportMaxBatchSize(cfg.EmitterExportMaxBatchSize)) // Default is 512, must be <= maxQueueSize + } + if cfg.EmitterExportInterval > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithExportInterval(cfg.EmitterExportInterval)) // Default is 1s + } + if cfg.EmitterMaxQueueSize > 0 { + batchProcessorOpts = append(batchProcessorOpts, sdklog.WithMaxQueueSize(cfg.EmitterMaxQueueSize)) // Default is 2048 + } + messageLogProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + batchProcessorOpts..., + ) + } else { + messageLogProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + + messageAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "custom_message"), + } + messageLoggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(messageAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + + messageLoggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(messageLoggerResource), + sdklog.WithProcessor(messageLogProcessor), + ) + messageLogger := messageLoggerProvider.Logger(defaultPackageName) + + emitter := messageEmitter{ + messageLogger: messageLogger, + } + + onClose := func() (err error) { + for _, provider := range []shutdowner{messageLoggerProvider, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider} { + err = errors.Join(err, provider.Shutdown(context.Background())) + } + return + } + return &Client{cfg, logger, tracer, meter, emitter, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider, onClose}, nil +} + +func newHTTPTracerProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdktrace.TracerProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlptracehttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlptracehttp.WithTLSClientConfig(tlsConfig) + } + exporterOpts := []otlptracehttp.Option{ + tlsConfigOption, + otlptracehttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + otlptracehttp.WithHeaders(config.AuthHeaders), + } + if config.TraceRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + exporterOpts = append(exporterOpts, otlptracehttp.WithRetry(otlptracehttp.RetryConfig{ + Enabled: config.TraceRetryConfig.Enabled(), + InitialInterval: config.TraceRetryConfig.GetInitialInterval(), + MaxInterval: config.TraceRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.TraceRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlptracehttp.New(ctx, exporterOpts...) + if err != nil { + return nil, err + } + batcherOpts := []sdktrace.BatchSpanProcessorOption{} + if config.TraceBatchTimeout > 0 { + batcherOpts = append(batcherOpts, sdktrace.WithBatchTimeout(config.TraceBatchTimeout)) // Default is 5s + } + opts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exporter, batcherOpts...), + sdktrace.WithResource(resource), + sdktrace.WithSampler( + sdktrace.ParentBased( + sdktrace.TraceIDRatioBased(config.TraceSampleRatio), + ), + ), + } + if config.TraceSpanExporter != nil { + opts = append(opts, sdktrace.WithBatcher(config.TraceSpanExporter)) + } + return sdktrace.NewTracerProvider(opts...), nil +} + +func newHTTPMeterProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdkmetric.MeterProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlpmetrichttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlpmetrichttp.WithTLSClientConfig(tlsConfig) + } + opts := []otlpmetrichttp.Option{ + tlsConfigOption, + otlpmetrichttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + otlpmetrichttp.WithHeaders(config.AuthHeaders), + } + if config.MetricRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlpmetrichttp.WithRetry(otlpmetrichttp.RetryConfig{ + Enabled: config.MetricRetryConfig.Enabled(), + InitialInterval: config.MetricRetryConfig.GetInitialInterval(), + MaxInterval: config.MetricRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.MetricRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlpmetrichttp.New(ctx, opts...) + if err != nil { + return nil, err + } + + mp := sdkmetric.NewMeterProvider( + sdkmetric.WithReader( + sdkmetric.NewPeriodicReader( + exporter, + sdkmetric.WithInterval(config.MetricReaderInterval), // Default is 10s + )), + sdkmetric.WithResource(resource), + sdkmetric.WithView(config.MetricViews...), + ) + return mp, nil +} diff --git a/pkg/beholder/internal/exporter.go b/pkg/beholder/internal/exporter.go index 271077a5c..033854dcc 100644 --- a/pkg/beholder/internal/exporter.go +++ b/pkg/beholder/internal/exporter.go @@ -4,12 +4,17 @@ import ( "context" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + sdklog "go.opentelemetry.io/otel/sdk/log" ) var _ sdklog.Exporter = (*otlploggrpc.Exporter)(nil) var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) +var _ sdklog.Exporter = (*otlploghttp.Exporter)(nil) +var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) + // Copy of sdklog.Exporter interface, used for mocking type OTLPExporter interface { Export(ctx context.Context, records []sdklog.Record) error diff --git a/pkg/beholder/message.go b/pkg/beholder/message.go index 2b1d89d3a..7cb6f1bdb 100644 --- a/pkg/beholder/message.go +++ b/pkg/beholder/message.go @@ -1,7 +1,10 @@ package beholder import ( + "errors" "fmt" + "regexp" + "strings" "github.com/go-playground/validator/v10" "go.opentelemetry.io/otel/attribute" @@ -16,6 +19,8 @@ type Message struct { type Metadata struct { // REQUIRED FIELDS // Schema Registry URI to fetch schema + BeholderDomain string `validate:"required,domain_entity"` + BeholderEntity string `validate:"required,domain_entity"` BeholderDataSchema string `validate:"required,uri"` // OPTIONAL FIELDS @@ -55,6 +60,8 @@ func (m Metadata) Attributes() Attributes { "workflow_owner_address": m.WorkflowOwnerAddress, "workflow_spec_id": m.WorkflowSpecID, "workflow_execution_id": m.WorkflowExecutionID, + "beholder_domain": m.BeholderDomain, + "beholder_entity": m.BeholderEntity, "beholder_data_schema": m.BeholderDataSchema, "capability_contract_address": m.CapabilityContractAddress, "capability_id": m.CapabilityID, @@ -199,6 +206,10 @@ func (m *Metadata) FromAttributes(attrs Attributes) *Metadata { m.WorkflowSpecID = v.(string) case "workflow_execution_id": m.WorkflowExecutionID = v.(string) + case "beholder_domain": + m.BeholderDomain = v.(string) + case "beholder_entity": + m.BeholderEntity = v.(string) case "beholder_data_schema": m.BeholderDataSchema = v.(string) case "capability_contract_address": @@ -222,17 +233,44 @@ func NewMetadata(attrs Attributes) *Metadata { return m } -func (m *Metadata) Validate() error { +// validDomainAndEntityRegex allows for alphanumeric characters and ._- +var validDomainAndEntityRegex = regexp.MustCompile(`^[a-zA-Z0-9._-]+$`) + +func NewMetadataValidator() (*validator.Validate, error) { validate := validator.New() + err := validate.RegisterValidation("domain_entity", func(fl validator.FieldLevel) bool { + str, isStr := fl.Field().Interface().(string) + if !isStr { + return false + } + if strings.Contains(str, "__") { + return false + } + if !validDomainAndEntityRegex.MatchString(str) { + return false + } + return true + }) + if err != nil { + return nil, err + } + return validate, nil +} + +func (m *Metadata) Validate() error { + validate, err := NewMetadataValidator() + if err != nil { + return err + } return validate.Struct(m) } func (e Message) Validate() error { if e.Body == nil { - return fmt.Errorf("message body is required") + return errors.New("message body is required") } if len(e.Attrs) == 0 { - return fmt.Errorf("message attributes are required") + return errors.New("message attributes are required") } metadata := NewMetadata(e.Attrs) return metadata.Validate() diff --git a/pkg/beholder/message_test.go b/pkg/beholder/message_test.go index 1f8f990fb..266908733 100644 --- a/pkg/beholder/message_test.go +++ b/pkg/beholder/message_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" otellog "go.opentelemetry.io/otel/log" @@ -110,6 +109,8 @@ func testMetadata() beholder.Metadata { WorkflowOwnerAddress: "test_owner_address", WorkflowSpecID: "test_spec_id", WorkflowExecutionID: "test_execution_id", + BeholderDomain: "TestDomain", // required field + BeholderEntity: "TestEntity", // required field BeholderDataSchema: "/schemas/ids/test_schema", // required field, URI CapabilityContractAddress: "test_contract_address", CapabilityID: "test_capability_id", @@ -123,14 +124,20 @@ func ExampleMetadata() { fmt.Printf("%#v\n", m) fmt.Println(m.Attributes()) // Output: - // beholder.Metadata{BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} - // map[beholder_data_schema:/schemas/ids/test_schema capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] + // beholder.Metadata{BeholderDomain:"TestDomain", BeholderEntity:"TestEntity", BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} + // map[beholder_data_schema:/schemas/ids/test_schema beholder_domain:TestDomain beholder_entity:TestEntity capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] } -func ExampleValidate() { - validate := validator.New() +func ExampleMetadata_Validate() { + validate, err := beholder.NewMetadataValidator() + if err != nil { + fmt.Println(err) + } - metadata := beholder.Metadata{} + metadata := beholder.Metadata{ + BeholderDomain: "TestDomain", + BeholderEntity: "TestEntity", + } if err := validate.Struct(metadata); err != nil { fmt.Println(err) } @@ -174,7 +181,7 @@ func TestMessage_OtelAttributes(t *testing.T) { return strings.Compare(a.Key, b.Key) }) - assert.Equal(t, 3, len(otelAttrs)) + assert.Len(t, otelAttrs, 3) assert.Equal(t, "key_int", otelAttrs[0].Key) assert.Equal(t, int64(1), otelAttrs[0].Value.AsInt64()) assert.Equal(t, "key_string", otelAttrs[1].Key) diff --git a/pkg/beholder/noop_test.go b/pkg/beholder/noop_test.go index ee1fb7209..7258a0208 100644 --- a/pkg/beholder/noop_test.go +++ b/pkg/beholder/noop_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/attribute" otellog "go.opentelemetry.io/otel/log" "go.opentelemetry.io/otel/trace" @@ -22,7 +23,7 @@ func TestNoopClient(t *testing.T) { err := noopClient.Emitter.Emit(tests.Context(t), []byte("test"), "key1", "value1", ) - assert.NoError(t, err) + require.NoError(t, err) // Logger noopClient.Logger.Emit(tests.Context(t), otellog.Record{}) @@ -38,7 +39,7 @@ func TestNoopClient(t *testing.T) { if err != nil { log.Fatalf("failed to create new gauge") } - assert.NoError(t, err) + require.NoError(t, err) // Use the counter and gauge for metrics within application logic counter.Add(tests.Context(t), 1) diff --git a/pkg/beholder/pb/base_message.pb.go b/pkg/beholder/pb/base_message.pb.go index 773d2c873..c9a69c3b0 100644 --- a/pkg/beholder/pb/base_message.pb.go +++ b/pkg/beholder/pb/base_message.pb.go @@ -7,7 +7,6 @@ package pb import ( - pb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" @@ -23,7 +22,8 @@ const ( // BaseMessage is a basic custom message, allowing the consumer to send // a string msg with some key-value pairs for labels. Consumers can consume -// BaseMessage directly or extend it by addding use-case specific fields +// BaseMessage directly or extend it by adding use-case specific fields +// NOTE: do not compose protos for Beholder until INFOPLAT-1386 is completed type BaseMessage struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -32,7 +32,7 @@ type BaseMessage struct { Msg string `protobuf:"bytes,1,opt,name=msg,proto3" json:"msg,omitempty"` // https://protobuf.dev/programming-guides/proto3/#maps // In go: if Value is empty for a key, nothing will be serialized - Labels *pb.Map `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *BaseMessage) Reset() { @@ -74,7 +74,7 @@ func (x *BaseMessage) GetMsg() string { return "" } -func (x *BaseMessage) GetLabels() *pb.Map { +func (x *BaseMessage) GetLabels() map[string]string { if x != nil { return x.Labels } @@ -86,17 +86,20 @@ var File_beholder_pb_base_message_proto protoreflect.FileDescriptor var file_beholder_pb_base_message_proto_rawDesc = []byte{ 0x0a, 0x1e, 0x62, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x02, 0x70, 0x62, 0x1a, 0x16, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x2f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x44, 0x0a, 0x0b, - 0x42, 0x61, 0x73, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6d, - 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, 0x12, 0x23, 0x0a, - 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x42, 0x3f, 0x5a, 0x3d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, - 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x62, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2f, - 0x70, 0x62, 0x2f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x02, 0x70, 0x62, 0x22, 0x8f, 0x01, 0x0a, 0x0b, 0x42, 0x61, 0x73, 0x65, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, 0x12, 0x33, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x62, 0x2e, 0x42, 0x61, 0x73, 0x65, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, + 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x3f, 0x5a, 0x3d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x62, 0x65, 0x68, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -111,13 +114,13 @@ func file_beholder_pb_base_message_proto_rawDescGZIP() []byte { return file_beholder_pb_base_message_proto_rawDescData } -var file_beholder_pb_base_message_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_beholder_pb_base_message_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_beholder_pb_base_message_proto_goTypes = []interface{}{ (*BaseMessage)(nil), // 0: pb.BaseMessage - (*pb.Map)(nil), // 1: values.Map + nil, // 1: pb.BaseMessage.LabelsEntry } var file_beholder_pb_base_message_proto_depIdxs = []int32{ - 1, // 0: pb.BaseMessage.labels:type_name -> values.Map + 1, // 0: pb.BaseMessage.labels:type_name -> pb.BaseMessage.LabelsEntry 1, // [1:1] is the sub-list for method output_type 1, // [1:1] is the sub-list for method input_type 1, // [1:1] is the sub-list for extension type_name @@ -150,7 +153,7 @@ func file_beholder_pb_base_message_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_beholder_pb_base_message_proto_rawDesc, NumEnums: 0, - NumMessages: 1, + NumMessages: 2, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/beholder/pb/base_message.proto b/pkg/beholder/pb/base_message.proto index 3d39f8d7a..0913a77ee 100644 --- a/pkg/beholder/pb/base_message.proto +++ b/pkg/beholder/pb/base_message.proto @@ -1,17 +1,16 @@ syntax = "proto3"; -import "values/pb/values.proto"; - option go_package = "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb/"; package pb; // BaseMessage is a basic custom message, allowing the consumer to send // a string msg with some key-value pairs for labels. Consumers can consume -// BaseMessage directly or extend it by addding use-case specific fields +// BaseMessage directly or extend it by adding use-case specific fields +// NOTE: do not compose protos for Beholder until INFOPLAT-1386 is completed message BaseMessage { string msg=1; // https://protobuf.dev/programming-guides/proto3/#maps // In go: if Value is empty for a key, nothing will be serialized - values.Map labels = 2; + map labels = 2; } diff --git a/pkg/capabilities/actions/readcontract/action_builders_generated.go b/pkg/capabilities/actions/readcontract/action_builders_generated.go new file mode 100644 index 000000000..dad3f4d49 --- /dev/null +++ b/pkg/capabilities/actions/readcontract/action_builders_generated.go @@ -0,0 +1,90 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package readcontract + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func (cfg Config) New(w *sdk.WorkflowSpecFactory, ref string, input ActionInput) OutputCap { + + def := sdk.StepDefinition{ + ID: "read-contract-action@1.0.0", Ref: ref, + Inputs: input.ToSteps(), + Config: map[string]any{ + "ContractAddress": cfg.ContractAddress, + "ContractName": cfg.ContractName, + "ContractReaderConfig": cfg.ContractReaderConfig, + "ReadIdentifier": cfg.ReadIdentifier, + }, + CapabilityType: capabilities.CapabilityTypeAction, + } + + step := sdk.Step[Output]{Definition: def} + raw := step.AddTo(w) + return OutputWrapper(raw) +} + +// OutputWrapper allows access to field from an sdk.CapDefinition[Output] +func OutputWrapper(raw sdk.CapDefinition[Output]) OutputCap { + wrapped, ok := raw.(OutputCap) + if ok { + return wrapped + } + return &outputCap{CapDefinition: raw} +} + +type OutputCap interface { + sdk.CapDefinition[Output] + LatestValue() sdk.CapDefinition[any] + private() +} + +type outputCap struct { + sdk.CapDefinition[Output] +} + +func (*outputCap) private() {} +func (c *outputCap) LatestValue() sdk.CapDefinition[any] { + return sdk.AccessField[Output, any](c.CapDefinition, "LatestValue") +} + +func ConstantOutput(value Output) OutputCap { + return &outputCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewOutputFromFields( + latestValue sdk.CapDefinition[any]) OutputCap { + return &simpleOutput{ + CapDefinition: sdk.ComponentCapDefinition[Output]{ + "LatestValue": latestValue.Ref(), + }, + latestValue: latestValue, + } +} + +type simpleOutput struct { + sdk.CapDefinition[Output] + latestValue sdk.CapDefinition[any] +} + +func (c *simpleOutput) LatestValue() sdk.CapDefinition[any] { + return c.latestValue +} + +func (c *simpleOutput) private() {} + +type ActionInput struct { + ConfidenceLevel sdk.CapDefinition[string] + Params sdk.CapDefinition[InputParams] +} + +func (input ActionInput) ToSteps() sdk.StepInputs { + return sdk.StepInputs{ + Mapping: map[string]any{ + "ConfidenceLevel": input.ConfidenceLevel.Ref(), + "Params": input.Params.Ref(), + }, + } +} diff --git a/pkg/capabilities/actions/readcontract/readcontract_action-schema.json b/pkg/capabilities/actions/readcontract/readcontract_action-schema.json new file mode 100644 index 000000000..de94037d6 --- /dev/null +++ b/pkg/capabilities/actions/readcontract/readcontract_action-schema.json @@ -0,0 +1,58 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/smartcontractkit/chainlink-common/pkg/capabilities/actions/readcontract/read-contract-action@1.0.0", + "$defs": { + "Config": { + "type": "object", + "properties": { + "ContractReaderConfig": { + "type": "string" + }, + "ReadIdentifier": { + "type": "string" + }, + "ContractAddress": { + "type": "string" + }, + "ContractName": { + "type": "string" + } + }, + "required": ["ContractReaderConfig", "ReadIdentifier", "ContractAddress", "ContractName"] + }, + "Input": { + "type": "object", + "properties": { + "ConfidenceLevel": { + "type": "string" + }, + "Params": { + "type": "object", + "additionalProperties": true + } + }, + "required": ["ConfidenceLevel", "Params"] + }, + "Output": { + "type": "object", + "properties": { + "LatestValue": { + "type": ["object", "string", "boolean", "null", "array"] + } + }, + "required": ["LatestValue"] + } + }, + "type": "object", + "properties": { + "Config": { + "$ref": "#/$defs/Config" + }, + "Inputs": { + "$ref": "#/$defs/Input" + }, + "Outputs": { + "$ref": "#/$defs/Output" + } + } +} \ No newline at end of file diff --git a/pkg/capabilities/actions/readcontract/readcontract_action_generated.go b/pkg/capabilities/actions/readcontract/readcontract_action_generated.go new file mode 100644 index 000000000..8f19f8da4 --- /dev/null +++ b/pkg/capabilities/actions/readcontract/readcontract_action_generated.go @@ -0,0 +1,115 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package readcontract + +import ( + "encoding/json" + "fmt" +) + +type Action struct { + // Config corresponds to the JSON schema field "Config". + Config *Config `json:"Config,omitempty" yaml:"Config,omitempty" mapstructure:"Config,omitempty"` + + // Inputs corresponds to the JSON schema field "Inputs". + Inputs *Input `json:"Inputs,omitempty" yaml:"Inputs,omitempty" mapstructure:"Inputs,omitempty"` + + // Outputs corresponds to the JSON schema field "Outputs". + Outputs *Output `json:"Outputs,omitempty" yaml:"Outputs,omitempty" mapstructure:"Outputs,omitempty"` +} + +type Config struct { + // ContractAddress corresponds to the JSON schema field "ContractAddress". + ContractAddress string `json:"ContractAddress" yaml:"ContractAddress" mapstructure:"ContractAddress"` + + // ContractName corresponds to the JSON schema field "ContractName". + ContractName string `json:"ContractName" yaml:"ContractName" mapstructure:"ContractName"` + + // ContractReaderConfig corresponds to the JSON schema field + // "ContractReaderConfig". + ContractReaderConfig string `json:"ContractReaderConfig" yaml:"ContractReaderConfig" mapstructure:"ContractReaderConfig"` + + // ReadIdentifier corresponds to the JSON schema field "ReadIdentifier". + ReadIdentifier string `json:"ReadIdentifier" yaml:"ReadIdentifier" mapstructure:"ReadIdentifier"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Config) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["ContractAddress"]; raw != nil && !ok { + return fmt.Errorf("field ContractAddress in Config: required") + } + if _, ok := raw["ContractName"]; raw != nil && !ok { + return fmt.Errorf("field ContractName in Config: required") + } + if _, ok := raw["ContractReaderConfig"]; raw != nil && !ok { + return fmt.Errorf("field ContractReaderConfig in Config: required") + } + if _, ok := raw["ReadIdentifier"]; raw != nil && !ok { + return fmt.Errorf("field ReadIdentifier in Config: required") + } + type Plain Config + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Config(plain) + return nil +} + +type Input struct { + // ConfidenceLevel corresponds to the JSON schema field "ConfidenceLevel". + ConfidenceLevel string `json:"ConfidenceLevel" yaml:"ConfidenceLevel" mapstructure:"ConfidenceLevel"` + + // Params corresponds to the JSON schema field "Params". + Params InputParams `json:"Params" yaml:"Params" mapstructure:"Params"` +} + +type InputParams map[string]interface{} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Input) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["ConfidenceLevel"]; raw != nil && !ok { + return fmt.Errorf("field ConfidenceLevel in Input: required") + } + if _, ok := raw["Params"]; raw != nil && !ok { + return fmt.Errorf("field Params in Input: required") + } + type Plain Input + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Input(plain) + return nil +} + +type Output struct { + // LatestValue corresponds to the JSON schema field "LatestValue". + LatestValue interface{} `json:"LatestValue" yaml:"LatestValue" mapstructure:"LatestValue"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Output) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["LatestValue"]; raw != nil && !ok { + return fmt.Errorf("field LatestValue in Output: required") + } + type Plain Output + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Output(plain) + return nil +} diff --git a/pkg/capabilities/actions/readcontract/readcontracttest/action_mock_generated.go b/pkg/capabilities/actions/readcontract/readcontracttest/action_mock_generated.go new file mode 100644 index 000000000..ca5f1b321 --- /dev/null +++ b/pkg/capabilities/actions/readcontract/readcontracttest/action_mock_generated.go @@ -0,0 +1,27 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package readcontracttest + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/actions/readcontract" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" +) + +// Action registers a new capability mock with the runner +// if another mock is registered for the same capability with for a step, it will take priority for that step. +func Action(runner *testutils.Runner, fn func(input readcontract.Input) (readcontract.Output, error)) *testutils.Mock[readcontract.Input, readcontract.Output] { + mock := testutils.MockCapability[readcontract.Input, readcontract.Output]("read-contract-action@1.0.0", fn) + runner.MockCapability("read-contract-action@1.0.0", nil, mock) + return mock +} + +// ActionForStep registers a new capability mock with the runner, but only for a given step. +// if another mock was registered for the same capability without a step, this mock will take priority for that step. +func ActionForStep(runner *testutils.Runner, step string, mockFn func(input readcontract.Input) (readcontract.Output, error)) *testutils.Mock[readcontract.Input, readcontract.Output] { + fn := mockFn + mock := testutils.MockCapability[readcontract.Input, readcontract.Output]("read-contract-action@1.0.0", fn) + runner.MockCapability("read-contract-action@1.0.0", &step, mock) + return mock +} diff --git a/pkg/capabilities/capabilities.go b/pkg/capabilities/capabilities.go index 4c4e6c215..f1b6cdcab 100644 --- a/pkg/capabilities/capabilities.go +++ b/pkg/capabilities/capabilities.go @@ -65,12 +65,17 @@ type RequestMetadata struct { WorkflowName string WorkflowDonID uint32 WorkflowDonConfigVersion uint32 - ReferenceID string + // The step reference ID of the workflow + ReferenceID string + // Use DecodedWorkflowName if the human readable name needs to be exposed, such as for logging purposes. + DecodedWorkflowName string } type RegistrationMetadata struct { WorkflowID string WorkflowOwner string + // The step reference ID of the workflow + ReferenceID string } // CapabilityRequest is a struct for the Execute request of a capability. @@ -345,6 +350,12 @@ type RemoteTargetConfig struct { RequestHashExcludedAttributes []string } +type RemoteExecutableConfig struct { + RequestHashExcludedAttributes []string + RegistrationRefresh time.Duration + RegistrationExpiry time.Duration +} + // NOTE: consider splitting this config into values stored in Registry (KS-118) // and values defined locally by Capability owners. func (c *RemoteTriggerConfig) ApplyDefaults() { @@ -368,8 +379,21 @@ func (c *RemoteTriggerConfig) ApplyDefaults() { } } +func (c *RemoteExecutableConfig) ApplyDefaults() { + if c == nil { + return + } + if c.RegistrationRefresh == 0 { + c.RegistrationRefresh = DefaultRegistrationRefresh + } + if c.RegistrationExpiry == 0 { + c.RegistrationExpiry = DefaultRegistrationExpiry + } +} + type CapabilityConfiguration struct { - DefaultConfig *values.Map - RemoteTriggerConfig *RemoteTriggerConfig - RemoteTargetConfig *RemoteTargetConfig + DefaultConfig *values.Map + RemoteTriggerConfig *RemoteTriggerConfig + RemoteTargetConfig *RemoteTargetConfig + RemoteExecutableConfig *RemoteExecutableConfig } diff --git a/pkg/capabilities/capabilities_test.go b/pkg/capabilities/capabilities_test.go index a623d1320..9e0e2a4f5 100644 --- a/pkg/capabilities/capabilities_test.go +++ b/pkg/capabilities/capabilities_test.go @@ -149,3 +149,11 @@ func Test_MustNewCapabilityInfo(t *testing.T) { ) }) } + +func Test_RemoteExecutableConfig_ApplyDefaults(t *testing.T) { + rec := &RemoteExecutableConfig{} + rec.ApplyDefaults() + + assert.Equal(t, DefaultRegistrationRefresh, rec.RegistrationRefresh) + assert.Equal(t, DefaultRegistrationExpiry, rec.RegistrationExpiry) +} diff --git a/pkg/capabilities/cli/cmd/built_in_generators.go b/pkg/capabilities/cli/cmd/built_in_generators.go new file mode 100644 index 000000000..7f9a960d0 --- /dev/null +++ b/pkg/capabilities/cli/cmd/built_in_generators.go @@ -0,0 +1,16 @@ +package cmd + +import _ "embed" + +//go:embed go_workflow_builder.go.tmpl +var goWorkflowTemplate string + +//go:embed go_mock_capability_builder.go.tmpl +var goWorkflowTestTemplate string + +func AddDefaultGoTemplates(to map[string]TemplateAndCondition, includeMocks bool) { + to["{{if .BaseName}}{{.BaseName|ToSnake}}_builders{{ else }}wrappers{{ end }}_generated.go"] = BaseGenerate{TemplateValue: goWorkflowTemplate} + if includeMocks { + to["{{.Package}}test/{{.BaseName|ToSnake}}_mock_generated.go"] = TestHelperGenerate{TemplateValue: goWorkflowTestTemplate} + } +} diff --git a/pkg/capabilities/cli/cmd/field.go b/pkg/capabilities/cli/cmd/field.go index ca9297ec8..37f946393 100644 --- a/pkg/capabilities/cli/cmd/field.go +++ b/pkg/capabilities/cli/cmd/field.go @@ -5,4 +5,9 @@ type Field struct { NumSlice int IsPrimitive bool ConfigName string + SkipCap bool +} + +func (f Field) WrapCap() bool { + return !f.SkipCap && !f.IsPrimitive && f.NumSlice == 0 } diff --git a/pkg/capabilities/cli/cmd/generate-types/main.go b/pkg/capabilities/cli/cmd/generate-types/main.go index b0b9d2e7e..f78267e0d 100644 --- a/pkg/capabilities/cli/cmd/generate-types/main.go +++ b/pkg/capabilities/cli/cmd/generate-types/main.go @@ -1,7 +1,6 @@ package main import ( - _ "embed" "flag" "fmt" "os" @@ -11,12 +10,6 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd" ) -//go:embed go_workflow_builder.go.tmpl -var goWorkflowTemplate string - -//go:embed go_mock_capability_builder.go.tmpl -var goWorkflowTestTemplate string - var dir = flag.String("dir", "", fmt.Sprintf("Directory to search for %s files, if a file is provided, the directory it is in will be used", cmd.CapabilitySchemaFilePattern.String())) var localPrefix = flag.String("local_prefix", "github.com/smartcontractkit", "The local prefix to use when formatting go files") var extraUrls = flag.String("extra_urls", "", "Comma separated list of extra URLs to fetch schemas from") @@ -48,12 +41,10 @@ func run(dir string) error { extras = strings.Split(*extraUrls, ",") } + templates := map[string]cmd.TemplateAndCondition{} + cmd.AddDefaultGoTemplates(templates, true) + return cmd.GenerateTypes(dir, *localPrefix, extras, []cmd.WorkflowHelperGenerator{ - &cmd.TemplateWorkflowGeneratorHelper{ - Templates: map[string]cmd.TemplateAndCondition{ - "{{.BaseName|ToSnake}}_builders_generated.go": cmd.BaseGenerate{TemplateValue: goWorkflowTemplate}, - "{{.Package}}test/{{.BaseName|ToSnake}}_mock_generated.go": cmd.TestHelperGenerate{TemplateValue: goWorkflowTestTemplate}, - }, - }, + &cmd.TemplateWorkflowGeneratorHelper{Templates: templates}, }) } diff --git a/pkg/capabilities/cli/cmd/generate-user-types/main.go b/pkg/capabilities/cli/cmd/generate-user-types/main.go new file mode 100644 index 000000000..49e1ea0ea --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate-user-types/main.go @@ -0,0 +1,98 @@ +package main + +import ( + "flag" + "fmt" + "strings" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd" +) + +var localPrefix = flag.String( + "local_prefix", + "github.com/smartcontractkit", + "The local prefix to use when formatting go files.", +) + +var types = flag.String( + "types", + "", + "Comma separated list of types to generate for. If empty, all types created in the package will be generated."+ + " if set, other types in the same package will automatically be added to the skip_cap list", +) + +var skipCap = flag.String( + "skip_cap", + "", + "Comma separated list of types (including the import name), or impute to not expect a capability definition to exist for"+ + " By default, this generator assumes that all types referenced (aside from primitives) will either be generated with this call or already have Cap type", +) + +var dir = flag.String("dir", ".", "The input directory, defaults to the running directory") + +func main() { + flag.Parse() + templates := map[string]cmd.TemplateAndCondition{} + cmd.AddDefaultGoTemplates(templates, false) + helpers := []cmd.WorkflowHelperGenerator{&cmd.TemplateWorkflowGeneratorHelper{Templates: templates}} + + info := cmd.UserGenerationInfo{ + Dir: *dir, + LocalPrefix: *localPrefix, + Helpers: helpers, + GenForStruct: genForStruct(), + } + + if err := cmd.GenerateUserTypes(info); err != nil { + panic(err) + } +} + +func genForStruct() func(string) bool { + skipGen := buildSkipGen() + genPackageType := buildGenPkgType() + return func(s string) bool { + if skipGen[s] { + return false + } + + pkgAndStruct := strings.Split(s, ".") + + switch len(pkgAndStruct) { + case 1: + return genPackageType(pkgAndStruct[0]) + + case 2: + if skipGen[pkgAndStruct[0]] { + return false + } + default: + panic(fmt.Sprintf("invalid type %s", s)) + } + + return true + } +} + +func buildSkipGen() map[string]bool { + skipGen := map[string]bool{} + for _, skip := range strings.Split(*skipCap, ",") { + skipGen[skip] = true + } + return skipGen +} + +func buildGenPkgType() func(string) bool { + genPkgType := func(_ string) bool { return true } + if *types != "" { + genPkg := map[string]bool{} + for _, t := range strings.Split(*types, ",") { + genPkg[t] = true + } + genPkgType = func(s string) bool { + return genPkg[s] + } + } + + return genPkgType +} diff --git a/pkg/capabilities/cli/cmd/generate_types.go b/pkg/capabilities/cli/cmd/generate_types.go index 2db528263..9a1648c98 100644 --- a/pkg/capabilities/cli/cmd/generate_types.go +++ b/pkg/capabilities/cli/cmd/generate_types.go @@ -58,20 +58,39 @@ func generateFromSchema(schemaPath, localPrefix string, cfgInfo ConfigInfo, help allFiles[file] = content typeInfo := cfgInfo.SchemaToTypeInfo[schemaPath] - structs, err := generatedInfoFromSrc(content, getCapID(typeInfo), typeInfo) + err = generateSchemaTypes(schemaPath, localPrefix, content, typeInfo, helpers, allFiles) if err != nil { return err } + return nil +} + +func generateSchemaTypes(schemaPath string, localPrefix string, content string, typeInfo TypeInfo, helpers []WorkflowHelperGenerator, allFiles map[string]string) error { + fullPkg, err := packageFromSchemaID(typeInfo.SchemaID) + if err != nil { + return err + } + + generatedInfo, err := generatedInfoFromSrc(content, fullPkg, getCapID(typeInfo), typeInfo, func(string) bool { + return true + }) + if err != nil { + return err + } + + return generateFromGoSrc(generatedInfo, path.Dir(schemaPath), localPrefix, helpers, allFiles) +} - if err = generateHelpers(helpers, structs, allFiles); err != nil { +func generateFromGoSrc(generatedInfo GeneratedInfo, dir, localPrefix string, helpers []WorkflowHelperGenerator, allFiles map[string]string) error { + if err := generateHelpers(helpers, generatedInfo, allFiles); err != nil { return err } - if err = codegen.WriteFiles(path.Dir(schemaPath), localPrefix, toolName, allFiles); err != nil { + if err := codegen.WriteFiles(dir, localPrefix, toolName, allFiles); err != nil { return err } - fmt.Println("Generated types for", schemaPath) + fmt.Println("Generated types for", dir) return nil } @@ -102,7 +121,7 @@ func schemaFilesFromDir(dir string) ([]string, error) { schemaPaths = append(schemaPaths, path) return nil }); err != nil { - return nil, fmt.Errorf("error walking the directory %v: %v", dir, err) + return nil, fmt.Errorf("error walking the directory %v: %w", dir, err) } return schemaPaths, nil } diff --git a/pkg/capabilities/cli/cmd/generate_user_types.go b/pkg/capabilities/cli/cmd/generate_user_types.go new file mode 100644 index 000000000..47a1ba0a7 --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate_user_types.go @@ -0,0 +1,64 @@ +package cmd + +import ( + "errors" + "os" + "path" + "strings" +) + +func GenerateUserTypes(info UserGenerationInfo) error { + dir, err := os.ReadDir(info.Dir) + if err != nil { + return err + } + + generatedInfo := GeneratedInfo{} + err = errors.Join() + for i, file := range dir { + fileName := file.Name() + if file.IsDir() || !strings.HasSuffix(fileName, ".go") { + continue + } + + rawContent, err2 := os.ReadFile(path.Join(info.Dir, fileName)) + if err2 != nil { + err = errors.Join(err, err2) + } + + content := string(rawContent) + if strings.HasPrefix(content, "// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT.") { + continue + } + + typeInfo := TypeInfo{CapabilityType: "common"} + + fileGeneratedInfo, err2 := generatedInfoFromSrc(content, "", getCapID(typeInfo), typeInfo, info.GenForStruct) + + if err2 != nil { + err = errors.Join(err, err2) + continue + } + + if i == 0 { + generatedInfo = fileGeneratedInfo + } else { + for name, strct := range fileGeneratedInfo.Types { + generatedInfo.Types[name] = strct + } + } + } + + if err != nil { + return errors.Join(err) + } + + return generateFromGoSrc(generatedInfo, info.Dir, info.LocalPrefix, info.Helpers, map[string]string{}) +} + +type UserGenerationInfo struct { + Dir string + LocalPrefix string + Helpers []WorkflowHelperGenerator + GenForStruct func(string) bool +} diff --git a/pkg/capabilities/cli/cmd/generate_user_types_test.go b/pkg/capabilities/cli/cmd/generate_user_types_test.go new file mode 100644 index 000000000..0d9a3ee9c --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate_user_types_test.go @@ -0,0 +1,59 @@ +package cmd_test + +import ( + "os" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +//go:generate go run github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/generate-user-types -dir ./testdata/fixtures/usercode/pkg -skip_cap time.Time +//go:generate go run github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/generate-user-types -dir ./testdata/fixtures/usercode/pkg2 -types OtherPackage + +func TestGenerateUserTypes(t *testing.T) { + t.Parallel() + + t.Run("generated types work as expected", func(t *testing.T) { + onlyVerifySyntax(func() { + myVal := pkg.ConstantMyType(pkg.MyType{I: 10}) + // verify both types were generated from different files + pkg.ConstantMyType2(pkg.MyType2{I: 10}) + + var tmp sdk.CapDefinition[pkg.MyType] = myVal // nolint + _ = tmp + + other := pkg2.ConstantOtherPackage(pkg2.OtherPackage{X: "x", Z: "z"}) //nolint + other = myVal.O() // nolint + _ = other + + var s sdk.CapDefinition[string] = myVal.S() // nolint + _ = s + }) + }) + + t.Run("specifying types to generate ignores other types", func(t *testing.T) { + content, err := os.ReadFile("./testdata/fixtures/usercode/pkg2/wrappers_generated.go") + require.NoError(t, err) + + require.False(t, strings.Contains(string(content), "NotWrappedCap")) + }) + + t.Run("Wrapping wrapped type is no-op", func(t *testing.T) { + original := pkg.NewMyTypeFromFields( + sdk.ConstantDefinition(1), + pkg.ConstantMyNestedType(pkg.MyNestedType{}), + pkg2.ConstantOtherPackage(pkg2.OtherPackage{}), + sdk.ConstantDefinition(""), + sdk.ConstantDefinition(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)), + ) + + wrapped := pkg.MyTypeWrapper(original) + require.Same(t, original, wrapped) + }) +} diff --git a/pkg/capabilities/cli/cmd/generated_info.go b/pkg/capabilities/cli/cmd/generated_info.go index 84d72dcb5..766e5ec45 100644 --- a/pkg/capabilities/cli/cmd/generated_info.go +++ b/pkg/capabilities/cli/cmd/generated_info.go @@ -2,12 +2,7 @@ package cmd import ( "fmt" - "go/ast" - "go/parser" - "go/token" - "reflect" "strings" - "unicode" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" ) @@ -36,44 +31,20 @@ func (g GeneratedInfo) RootType() Struct { } } -func generatedInfoFromSrc(src string, capID *string, typeInfo TypeInfo) (GeneratedInfo, error) { - fset := token.NewFileSet() +func generatedInfoFromSrc( + src, fullPkg string, capID *string, typeInfo TypeInfo, includeType func(name string) bool) (GeneratedInfo, error) { + reader := GoStructReader{IncludeType: includeType} - // Parse the source code string - node, err := parser.ParseFile(fset, "", src, parser.AllErrors) + generatedStructs, pkg, extraImports, err := reader.Read(src) if err != nil { return GeneratedInfo{}, err } - pkg := node.Name.Name - - generatedStructs := map[string]Struct{} - var extraImports []string - ast.Inspect(node, func(n ast.Node) bool { - return inspectNode(n, fset, src, generatedStructs, &extraImports) - }) root := generatedStructs[typeInfo.RootType] input, config := extractInputAndConfig(generatedStructs, typeInfo, root) output := root.Outputs["Outputs"] - fullPkg := typeInfo.SchemaID - - // drop protocol - index := strings.Index(typeInfo.SchemaID, "//") - if index != -1 { - fullPkg = fullPkg[index+2:] - } - - // drop the capability name and version - index = strings.LastIndex(fullPkg, "/") - if index == -1 { - return GeneratedInfo{}, - fmt.Errorf("invalid schema ID: %s must end in /capability_name and optioanlly a version", typeInfo.SchemaID) - } - - fullPkg = fullPkg[:index] - return GeneratedInfo{ Package: pkg, Config: config, @@ -89,6 +60,25 @@ func generatedInfoFromSrc(src string, capID *string, typeInfo TypeInfo) (Generat }, nil } +func packageFromSchemaID(schemaID string) (string, error) { + fullPkg := schemaID + + // drop protocol + index := strings.Index(fullPkg, "//") + if index != -1 { + fullPkg = fullPkg[index+2:] + } + + // drop the capability name and version + index = strings.LastIndex(fullPkg, "/") + if index == -1 { + return "", fmt.Errorf("invalid schema ID: %s must end in /capability_name and optioanlly a version", schemaID) + } + + fullPkg = fullPkg[:index] + return fullPkg, nil +} + func extractInputAndConfig(generatedStructs map[string]Struct, typeInfo TypeInfo, root Struct) (*Struct, Struct) { delete(generatedStructs, typeInfo.RootType) inputField, ok := root.Outputs["Inputs"] @@ -124,76 +114,6 @@ func extractInputAndConfig(generatedStructs map[string]Struct, typeInfo TypeInfo return input, config } -func inspectNode(n ast.Node, fset *token.FileSet, src string, rawInfo map[string]Struct, extraImports *[]string) bool { - if ts, ok := n.(*ast.TypeSpec); ok { - s := Struct{ - Name: strings.TrimSpace(ts.Name.Name), - Outputs: map[string]Field{}, - } - - if structType, ok := ts.Type.(*ast.StructType); ok { - for _, field := range structType.Fields.List { - start := fset.Position(field.Type.Pos()).Offset - end := fset.Position(field.Type.End()).Offset - typeStr := src[start:end] - if typeStr == "interface{}" { - typeStr = "any" - } - f := Field{} - - if field.Tag != nil { - // This is safe because the generator used to create the structs from jsonschema - // will always have json tag if there's tags on the field, per configuration. - // The substring removes the quotes around that tag. - tag := reflect.StructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) - jsonTag := tag.Get("json") - if jsonTag != "" { - jsonName := strings.Split(jsonTag, ",")[0] - if jsonName != "" { - f.ConfigName = jsonName - } - } - } - - f.Type = typeStr - if f.ConfigName == "" { - f.ConfigName = field.Names[0].Name - } - - for strings.HasPrefix(f.Type, "[]") { - f.NumSlice++ - f.Type = f.Type[2:] - } - - f.Type = strings.TrimPrefix(f.Type, "*") - t := f.Type - for t[0] == '*' { - t = t[1:] - } - - f.IsPrimitive = unicode.IsLower(rune(t[0])) - s.Outputs[field.Names[0].Name] = f - } - } - - // artifact used for deserializing - if s.Name != "Plain" { - rawInfo[ts.Name.Name] = s - } - } else if imp, ok := n.(*ast.ImportSpec); ok { - switch imp.Path.Value { - case `"reflect"`, `"fmt"`, `"encoding/json"`, `"regexp"`: - default: - importStr := imp.Path.Value - if imp.Name != nil { - importStr = imp.Name.Name + " " + importStr - } - *extraImports = append(*extraImports, importStr) - } - } - return true -} - func lastAfterDot(s string) string { parts := strings.Split(s, ".") return parts[len(parts)-1] diff --git a/pkg/capabilities/cli/cmd/generator_test.go b/pkg/capabilities/cli/cmd/generator_test.go index d95eb5f78..782297473 100644 --- a/pkg/capabilities/cli/cmd/generator_test.go +++ b/pkg/capabilities/cli/cmd/generator_test.go @@ -53,6 +53,8 @@ func TestTypeGeneration(t *testing.T) { var expectedOutput sdk.CapDefinition[string] //nolint expectedOutput = trigger.CoolOutput() _ = expectedOutput + + trigger = basictrigger.ConstantTriggerOutputs(basictrigger.TriggerOutputs{}) //nolint }) }) @@ -76,6 +78,8 @@ func TestTypeGeneration(t *testing.T) { var expectedOutput sdk.CapDefinition[string] //nolint expectedOutput = action.AdaptedThing() _ = expectedOutput + + action = basicaction.ConstantActionOutputs(basicaction.ActionOutputs{}) //nolint }) }) @@ -103,6 +107,8 @@ func TestTypeGeneration(t *testing.T) { var expectedSigsField sdk.CapDefinition[[]string] //nolint expectedSigsField = consensus.Sigs() _ = expectedSigsField + + consensus = basicconsensus.ConstantConsensusOutputs(basicconsensus.ConsensusOutputs{}) //nolint }) }) @@ -247,7 +253,7 @@ func TestTypeGeneration(t *testing.T) { }) t.Run("casing is respected from the json schema", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Owner: "owner", Name: "name"}) + workflow := sdk.NewWorkflowSpecFactory() ai := basicaction.ActionConfig{CamelCaseInSchemaForTesting: "foo", SnakeCaseInSchemaForTesting: 12}. New(workflow, "ref", basicaction.ActionInput{InputThing: sdk.ConstantDefinition[bool](true)}) spec, _ := workflow.Spec() diff --git a/pkg/capabilities/cli/cmd/generate-types/go_mock_capability_builder.go.tmpl b/pkg/capabilities/cli/cmd/go_mock_capability_builder.go.tmpl similarity index 100% rename from pkg/capabilities/cli/cmd/generate-types/go_mock_capability_builder.go.tmpl rename to pkg/capabilities/cli/cmd/go_mock_capability_builder.go.tmpl diff --git a/pkg/capabilities/cli/cmd/go_reader.go b/pkg/capabilities/cli/cmd/go_reader.go new file mode 100644 index 000000000..aad2e9ad6 --- /dev/null +++ b/pkg/capabilities/cli/cmd/go_reader.go @@ -0,0 +1,160 @@ +package cmd + +import ( + "go/ast" + "go/parser" + "go/token" + "reflect" + "strings" + "unicode" +) + +type GoStructReader struct { + IncludeType func(name string) bool +} + +func (g *GoStructReader) Read(src string) (map[string]Struct, string, []string, error) { + fset := token.NewFileSet() + + // Parse the source code string + node, err := parser.ParseFile(fset, "", src, parser.AllErrors) + if err != nil { + return nil, "", nil, err + } + + structs := g.gatherStructs(node, fset, src) + return structs, node.Name.Name, g.gatherImports(node, structs), nil +} + +func (g *GoStructReader) gatherStructs(node *ast.File, fset *token.FileSet, src string) map[string]Struct { + generatedStructs := map[string]Struct{} + for _, decl := range node.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.TYPE { + continue + } + + for _, spec := range gd.Specs { + if strct := g.getStructFromSpec(spec, fset, src); strct != nil { + generatedStructs[strct.Name] = *strct + } + } + } + return generatedStructs +} + +func (g *GoStructReader) getStructFromSpec(spec ast.Spec, fset *token.FileSet, src string) *Struct { + ts, ok := spec.(*ast.TypeSpec) + if !ok { + return nil + } + + name := ts.Name.Name + if !g.IncludeType(name) { + return nil + } + + switch declType := ts.Type.(type) { + case *ast.StructType: + return g.structFromGoStruct(name, declType, fset, src) + case *ast.MapType, *ast.Ident: + return &Struct{Name: name} + default: + return nil + } +} + +func (g *GoStructReader) structFromGoStruct(name string, structType *ast.StructType, fset *token.FileSet, src string) *Struct { + s := Struct{ + Name: strings.TrimSpace(name), + Outputs: map[string]Field{}, + } + + for _, field := range structType.Fields.List { + start := fset.Position(field.Type.Pos()).Offset + end := fset.Position(field.Type.End()).Offset + typeStr := src[start:end] + if typeStr == "interface{}" { + typeStr = "any" + } + + f := Field{ + Type: typeStr, + ConfigName: g.configName(field), + SkipCap: !g.IncludeType(typeStr), + } + + for strings.HasPrefix(f.Type, "[]") { + f.NumSlice++ + f.Type = f.Type[2:] + } + + f.Type = strings.TrimPrefix(f.Type, "*") + t := f.Type + for t[0] == '*' { + t = t[1:] + } + + importLoc := strings.Index(t, ".") + if importLoc != -1 { + t = t[importLoc+1:] + } + f.IsPrimitive = unicode.IsLower(rune(t[0])) + s.Outputs[field.Names[0].Name] = f + } + + return &s +} + +func (g *GoStructReader) configName(field *ast.Field) string { + defaultName := field.Names[0].Name + if field.Tag == nil { + return defaultName + } + + // Tags have string values, so we need to strip the quotes + tag := reflect.StructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) + jsonTag := tag.Get("json") + if jsonTag != "" { + jsonName := strings.Split(jsonTag, ",")[0] + if jsonName != "" { + return jsonName + } + } + + return defaultName +} + +func (g *GoStructReader) gatherImports(node *ast.File, structs map[string]Struct) []string { + requiredImports := map[string]bool{} + for _, strct := range structs { + for _, field := range strct.Outputs { + parts := strings.Split(field.Type, ".") + if len(parts) > 1 { + requiredImports[parts[0]] = true + } + } + } + + var allValues []string + var imports []string + var check []bool + for _, imp := range node.Imports { + var importName string + if imp.Name != nil { + importName = imp.Name.Name + } else { + importParts := strings.Split(imp.Path.Value, "/") + importName = importParts[len(importParts)-1] + } + importName = strings.Trim(importName, "\"") + + allValues = append(allValues, importName) + check = append(check, requiredImports[importName]) + if requiredImports[importName] { + imports = append(imports, imp.Path.Value) + } + } + + return imports +} diff --git a/pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl b/pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl similarity index 79% rename from pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl rename to pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl index d68a3d75b..8a2fab611 100644 --- a/pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl +++ b/pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl @@ -33,7 +33,8 @@ func (cfg {{.Config.Name}}) New(w *sdk.WorkflowSpecFactory, {{- if not .ID }}id {{- if eq .CapabilityType "target" }} step.AddTo(w) {{- else if eq 0 .RootNumSlice }} - return {{.RootType.Name}}CapFromStep(w, step) + raw := step.AddTo(w) + return {{.RootType.Name}}Wrapper(raw) {{- else }} return step.AddTo(w) {{- end }} @@ -41,54 +42,60 @@ func (cfg {{.Config.Name}}) New(w *sdk.WorkflowSpecFactory, {{- if not .ID }}id {{- end }} {{ range $key, $value := .Types }} + +// {{$key}}Wrapper allows access to field from an sdk.CapDefinition[{{$key}}] +func {{$key}}Wrapper(raw sdk.CapDefinition[{{$key}}]) {{$key}}Cap { + wrapped, ok := raw.({{$key}}Cap) + if ok { + return wrapped + } + + {{- if .Outputs }} + return &{{$key|LowerFirst}}Cap{CapDefinition: raw} + {{- else }} + return {{$key}}Cap(raw) + {{- end }} +} + {{- if .Outputs }} type {{$key}}Cap interface { sdk.CapDefinition[{{ $key }}] {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] - {{- else }} + {{- if or $type.WrapCap }} {{$fieldName}}() {{ $type.Type }}Cap + {{- else }} + {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] {{- end }} {{- end }} private() } -{{ if ne $.CapabilityType "target" }} -// {{$key}}CapFromStep should only be called from generated code to assure type safety -func {{$key}}CapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[{{$key}}]) {{$key}}Cap { - raw := step.AddTo(w) - return &{{$key|LowerFirst}}{CapDefinition: raw} -} -{{ end }} - -type {{$key|LowerFirst}} struct { +type {{$key|LowerFirst}}Cap struct { sdk.CapDefinition[{{ $key }}] } -func (*{{$key|LowerFirst}}) private() {} +func (*{{$key|LowerFirst}}Cap) private() {} {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} -func (c *{{$key|LowerFirst}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { - return sdk.AccessField[{{$value.Name}}, {{Repeat "[]" $type.NumSlice}}{{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}") -} + {{- if or $type.WrapCap }} +func (c *{{$key|LowerFirst}}Cap) {{$fieldName}}() {{ $type.Type }}Cap { + return {{ $type.Type }}Wrapper(sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")) {{- else }} -func (c *{{$key|LowerFirst}}) {{$fieldName}}() {{ $type.Type }}Cap { - {{- if $type.Type|HasOutputs }} - return &{{ $type.Type | LowerFirst }}{ CapDefinition: sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")} - {{- else }} - return {{ $type.Type }}Cap(sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")) - {{- end }} -} +func (c *{{$key|LowerFirst}}Cap) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { + return sdk.AccessField[{{$value.Name}}, {{Repeat "[]" $type.NumSlice}}{{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}") {{- end }} +} {{- end }} +func Constant{{$key}}(value {{$key}}) {{$key}}Cap { + return &{{$key|LowerFirst}}Cap{CapDefinition: sdk.ConstantDefinition(value)} +} + func New{{$key}}FromFields({{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}], - {{- else }} + {{- if or $type.WrapCap }} {{$fieldName|LowerFirst}} {{ $type.Type }}Cap, + {{- else }} + {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}], {{- end }} {{- end }}) {{$key}}Cap { return &simple{{$key}}{ CapDefinition: sdk.ComponentCapDefinition[{{$value.Name}}]{ {{- range $fieldName, $type := .Outputs }} @@ -104,19 +111,19 @@ func New{{$key}}FromFields({{- range $fieldName, $type := .Outputs }} type simple{{$key}} struct { sdk.CapDefinition[{{ $key }}] {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] - {{- else }} + {{- if $type.WrapCap }} {{$fieldName|LowerFirst}} {{ $type.Type }}Cap + {{- else }} + {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] {{- end }} {{- end }} } {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} -func (c *simple{{$key}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { - {{- else }} + {{- if or $type.WrapCap }} func (c *simple{{$key}}) {{$fieldName}}() {{ $type.Type }}Cap { + {{- else }} +func (c *simple{{$key}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { {{- end }} return c.{{$fieldName|LowerFirst}} } diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go index b70987ede..d46d5d7c7 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go @@ -17,7 +17,17 @@ func (cfg MapActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Map } step := sdk.Step[MapActionOutputs]{Definition: def} - return MapActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return MapActionOutputsWrapper(raw) +} + +// MapActionOutputsWrapper allows access to field from an sdk.CapDefinition[MapActionOutputs] +func MapActionOutputsWrapper(raw sdk.CapDefinition[MapActionOutputs]) MapActionOutputsCap { + wrapped, ok := raw.(MapActionOutputsCap) + if ok { + return wrapped + } + return &mapActionOutputsCap{CapDefinition: raw} } type MapActionOutputsCap interface { @@ -26,19 +36,17 @@ type MapActionOutputsCap interface { private() } -// MapActionOutputsCapFromStep should only be called from generated code to assure type safety -func MapActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[MapActionOutputs]) MapActionOutputsCap { - raw := step.AddTo(w) - return &mapActionOutputs{CapDefinition: raw} +type mapActionOutputsCap struct { + sdk.CapDefinition[MapActionOutputs] } -type mapActionOutputs struct { - sdk.CapDefinition[MapActionOutputs] +func (*mapActionOutputsCap) private() {} +func (c *mapActionOutputsCap) Payload() MapActionOutputsPayloadCap { + return MapActionOutputsPayloadWrapper(sdk.AccessField[MapActionOutputs, MapActionOutputsPayload](c.CapDefinition, "payload")) } -func (*mapActionOutputs) private() {} -func (c *mapActionOutputs) Payload() MapActionOutputsPayloadCap { - return MapActionOutputsPayloadCap(sdk.AccessField[MapActionOutputs, MapActionOutputsPayload](c.CapDefinition, "payload")) +func ConstantMapActionOutputs(value MapActionOutputs) MapActionOutputsCap { + return &mapActionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewMapActionOutputsFromFields( @@ -62,6 +70,15 @@ func (c *simpleMapActionOutputs) Payload() MapActionOutputsPayloadCap { func (c *simpleMapActionOutputs) private() {} +// MapActionOutputsPayloadWrapper allows access to field from an sdk.CapDefinition[MapActionOutputsPayload] +func MapActionOutputsPayloadWrapper(raw sdk.CapDefinition[MapActionOutputsPayload]) MapActionOutputsPayloadCap { + wrapped, ok := raw.(MapActionOutputsPayloadCap) + if ok { + return wrapped + } + return MapActionOutputsPayloadCap(raw) +} + type MapActionOutputsPayloadCap sdk.CapDefinition[MapActionOutputsPayload] type MapActionInput struct { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go index f5e449ad0..ee3c64a96 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go @@ -22,25 +22,32 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action return step.AddTo(w) } +// ActionOutputsElemWrapper allows access to field from an sdk.CapDefinition[ActionOutputsElem] +func ActionOutputsElemWrapper(raw sdk.CapDefinition[ActionOutputsElem]) ActionOutputsElemCap { + wrapped, ok := raw.(ActionOutputsElemCap) + if ok { + return wrapped + } + return &actionOutputsElemCap{CapDefinition: raw} +} + type ActionOutputsElemCap interface { sdk.CapDefinition[ActionOutputsElem] Results() ActionOutputsElemResultsCap private() } -// ActionOutputsElemCapFromStep should only be called from generated code to assure type safety -func ActionOutputsElemCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsElem]) ActionOutputsElemCap { - raw := step.AddTo(w) - return &actionOutputsElem{CapDefinition: raw} +type actionOutputsElemCap struct { + sdk.CapDefinition[ActionOutputsElem] } -type actionOutputsElem struct { - sdk.CapDefinition[ActionOutputsElem] +func (*actionOutputsElemCap) private() {} +func (c *actionOutputsElemCap) Results() ActionOutputsElemResultsCap { + return ActionOutputsElemResultsWrapper(sdk.AccessField[ActionOutputsElem, ActionOutputsElemResults](c.CapDefinition, "results")) } -func (*actionOutputsElem) private() {} -func (c *actionOutputsElem) Results() ActionOutputsElemResultsCap { - return &actionOutputsElemResults{CapDefinition: sdk.AccessField[ActionOutputsElem, ActionOutputsElemResults](c.CapDefinition, "results")} +func ConstantActionOutputsElem(value ActionOutputsElem) ActionOutputsElemCap { + return &actionOutputsElemCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsElemFromFields( @@ -64,27 +71,34 @@ func (c *simpleActionOutputsElem) Results() ActionOutputsElemResultsCap { func (c *simpleActionOutputsElem) private() {} +// ActionOutputsElemResultsWrapper allows access to field from an sdk.CapDefinition[ActionOutputsElemResults] +func ActionOutputsElemResultsWrapper(raw sdk.CapDefinition[ActionOutputsElemResults]) ActionOutputsElemResultsCap { + wrapped, ok := raw.(ActionOutputsElemResultsCap) + if ok { + return wrapped + } + return &actionOutputsElemResultsCap{CapDefinition: raw} +} + type ActionOutputsElemResultsCap interface { sdk.CapDefinition[ActionOutputsElemResults] AdaptedThing() sdk.CapDefinition[string] private() } -// ActionOutputsElemResultsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsElemResultsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsElemResults]) ActionOutputsElemResultsCap { - raw := step.AddTo(w) - return &actionOutputsElemResults{CapDefinition: raw} -} - -type actionOutputsElemResults struct { +type actionOutputsElemResultsCap struct { sdk.CapDefinition[ActionOutputsElemResults] } -func (*actionOutputsElemResults) private() {} -func (c *actionOutputsElemResults) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsElemResultsCap) private() {} +func (c *actionOutputsElemResultsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputsElemResults, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputsElemResults(value ActionOutputsElemResults) ActionOutputsElemResultsCap { + return &actionOutputsElemResultsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsElemResultsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsElemResultsCap { return &simpleActionOutputsElemResults{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go index dc36bfd92..2b40fea6d 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go @@ -20,7 +20,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -29,21 +39,19 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} -} - -type actionOutputs struct { +type actionOutputsCap struct { sdk.CapDefinition[ActionOutputs] } -func (*actionOutputs) private() {} -func (c *actionOutputs) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputs, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsCap { return &simpleActionOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go index 938709be3..f8bee1ebb 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go @@ -20,7 +20,17 @@ func (cfg ConsensusConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Con } step := sdk.Step[ConsensusOutputs]{Definition: def} - return ConsensusOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ConsensusOutputsWrapper(raw) +} + +// ConsensusOutputsWrapper allows access to field from an sdk.CapDefinition[ConsensusOutputs] +func ConsensusOutputsWrapper(raw sdk.CapDefinition[ConsensusOutputs]) ConsensusOutputsCap { + wrapped, ok := raw.(ConsensusOutputsCap) + if ok { + return wrapped + } + return &consensusOutputsCap{CapDefinition: raw} } type ConsensusOutputsCap interface { @@ -30,24 +40,22 @@ type ConsensusOutputsCap interface { private() } -// ConsensusOutputsCapFromStep should only be called from generated code to assure type safety -func ConsensusOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ConsensusOutputs]) ConsensusOutputsCap { - raw := step.AddTo(w) - return &consensusOutputs{CapDefinition: raw} -} - -type consensusOutputs struct { +type consensusOutputsCap struct { sdk.CapDefinition[ConsensusOutputs] } -func (*consensusOutputs) private() {} -func (c *consensusOutputs) Consensus() sdk.CapDefinition[[]string] { +func (*consensusOutputsCap) private() {} +func (c *consensusOutputsCap) Consensus() sdk.CapDefinition[[]string] { return sdk.AccessField[ConsensusOutputs, []string](c.CapDefinition, "consensus") } -func (c *consensusOutputs) Sigs() sdk.CapDefinition[[]string] { +func (c *consensusOutputsCap) Sigs() sdk.CapDefinition[[]string] { return sdk.AccessField[ConsensusOutputs, []string](c.CapDefinition, "sigs") } +func ConstantConsensusOutputs(value ConsensusOutputs) ConsensusOutputsCap { + return &consensusOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewConsensusOutputsFromFields( consensus sdk.CapDefinition[[]string], sigs sdk.CapDefinition[[]string]) ConsensusOutputsCap { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go index 350408a3d..ba92f4162 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go @@ -20,7 +20,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) TriggerOutputsCap { } step := sdk.Step[TriggerOutputs]{Definition: def} - return TriggerOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return TriggerOutputsWrapper(raw) +} + +// TriggerOutputsWrapper allows access to field from an sdk.CapDefinition[TriggerOutputs] +func TriggerOutputsWrapper(raw sdk.CapDefinition[TriggerOutputs]) TriggerOutputsCap { + wrapped, ok := raw.(TriggerOutputsCap) + if ok { + return wrapped + } + return &triggerOutputsCap{CapDefinition: raw} } type TriggerOutputsCap interface { @@ -29,21 +39,19 @@ type TriggerOutputsCap interface { private() } -// TriggerOutputsCapFromStep should only be called from generated code to assure type safety -func TriggerOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[TriggerOutputs]) TriggerOutputsCap { - raw := step.AddTo(w) - return &triggerOutputs{CapDefinition: raw} -} - -type triggerOutputs struct { +type triggerOutputsCap struct { sdk.CapDefinition[TriggerOutputs] } -func (*triggerOutputs) private() {} -func (c *triggerOutputs) CoolOutput() sdk.CapDefinition[string] { +func (*triggerOutputsCap) private() {} +func (c *triggerOutputsCap) CoolOutput() sdk.CapDefinition[string] { return sdk.AccessField[TriggerOutputs, string](c.CapDefinition, "cool_output") } +func ConstantTriggerOutputs(value TriggerOutputs) TriggerOutputsCap { + return &triggerOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewTriggerOutputsFromFields( coolOutput sdk.CapDefinition[string]) TriggerOutputsCap { return &simpleTriggerOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go index 61a18d965..754c529de 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go @@ -4,7 +4,7 @@ package externalreferenceaction import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - referenceaction "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) @@ -20,7 +20,8 @@ func (cfg SomeConfig) New(w *sdk.WorkflowSpecFactory, ref string, input ActionIn } step := sdk.Step[referenceaction.SomeOutputs]{Definition: def} - return referenceaction.SomeOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return referenceaction.SomeOutputsWrapper(raw) } type ActionInput = referenceaction.ActionInput diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go index 37ff271fc..e00c9b842 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go @@ -5,7 +5,7 @@ package externalreferenceactiontest import ( - referenceaction "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" ) diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go index 9cd7e5aef..0d419feba 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go @@ -17,7 +17,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -26,19 +36,17 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} +type actionOutputsCap struct { + sdk.CapDefinition[ActionOutputs] } -type actionOutputs struct { - sdk.CapDefinition[ActionOutputs] +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) Payload() ActionOutputsPayloadCap { + return ActionOutputsPayloadWrapper(sdk.AccessField[ActionOutputs, ActionOutputsPayload](c.CapDefinition, "payload")) } -func (*actionOutputs) private() {} -func (c *actionOutputs) Payload() ActionOutputsPayloadCap { - return ActionOutputsPayloadCap(sdk.AccessField[ActionOutputs, ActionOutputsPayload](c.CapDefinition, "payload")) +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsFromFields( @@ -62,6 +70,15 @@ func (c *simpleActionOutputs) Payload() ActionOutputsPayloadCap { func (c *simpleActionOutputs) private() {} +// ActionOutputsPayloadWrapper allows access to field from an sdk.CapDefinition[ActionOutputsPayload] +func ActionOutputsPayloadWrapper(raw sdk.CapDefinition[ActionOutputsPayload]) ActionOutputsPayloadCap { + wrapped, ok := raw.(ActionOutputsPayloadCap) + if ok { + return wrapped + } + return ActionOutputsPayloadCap(raw) +} + type ActionOutputsPayloadCap sdk.CapDefinition[ActionOutputsPayload] type ActionInput struct { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go index f2d1d9731..4d7051a63 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go @@ -19,7 +19,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -28,19 +38,17 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} +type actionOutputsCap struct { + sdk.CapDefinition[ActionOutputs] } -type actionOutputs struct { - sdk.CapDefinition[ActionOutputs] +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) Results() ActionOutputsResultsCap { + return ActionOutputsResultsWrapper(sdk.AccessField[ActionOutputs, ActionOutputsResults](c.CapDefinition, "results")) } -func (*actionOutputs) private() {} -func (c *actionOutputs) Results() ActionOutputsResultsCap { - return &actionOutputsResults{CapDefinition: sdk.AccessField[ActionOutputs, ActionOutputsResults](c.CapDefinition, "results")} +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsFromFields( @@ -64,27 +72,34 @@ func (c *simpleActionOutputs) Results() ActionOutputsResultsCap { func (c *simpleActionOutputs) private() {} +// ActionOutputsResultsWrapper allows access to field from an sdk.CapDefinition[ActionOutputsResults] +func ActionOutputsResultsWrapper(raw sdk.CapDefinition[ActionOutputsResults]) ActionOutputsResultsCap { + wrapped, ok := raw.(ActionOutputsResultsCap) + if ok { + return wrapped + } + return &actionOutputsResultsCap{CapDefinition: raw} +} + type ActionOutputsResultsCap interface { sdk.CapDefinition[ActionOutputsResults] AdaptedThing() sdk.CapDefinition[string] private() } -// ActionOutputsResultsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsResultsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsResults]) ActionOutputsResultsCap { - raw := step.AddTo(w) - return &actionOutputsResults{CapDefinition: raw} -} - -type actionOutputsResults struct { +type actionOutputsResultsCap struct { sdk.CapDefinition[ActionOutputsResults] } -func (*actionOutputsResults) private() {} -func (c *actionOutputsResults) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsResultsCap) private() {} +func (c *actionOutputsResultsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputsResults, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputsResults(value ActionOutputsResults) ActionOutputsResultsCap { + return &actionOutputsResultsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsResultsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsResultsCap { return &simpleActionOutputsResults{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go index db2d9b59d..4d399fb69 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go @@ -20,7 +20,17 @@ func (cfg SomeConfig) New(w *sdk.WorkflowSpecFactory, ref string, input ActionIn } step := sdk.Step[SomeOutputs]{Definition: def} - return SomeOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return SomeOutputsWrapper(raw) +} + +// SomeOutputsWrapper allows access to field from an sdk.CapDefinition[SomeOutputs] +func SomeOutputsWrapper(raw sdk.CapDefinition[SomeOutputs]) SomeOutputsCap { + wrapped, ok := raw.(SomeOutputsCap) + if ok { + return wrapped + } + return &someOutputsCap{CapDefinition: raw} } type SomeOutputsCap interface { @@ -29,21 +39,19 @@ type SomeOutputsCap interface { private() } -// SomeOutputsCapFromStep should only be called from generated code to assure type safety -func SomeOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SomeOutputs]) SomeOutputsCap { - raw := step.AddTo(w) - return &someOutputs{CapDefinition: raw} -} - -type someOutputs struct { +type someOutputsCap struct { sdk.CapDefinition[SomeOutputs] } -func (*someOutputs) private() {} -func (c *someOutputs) AdaptedThing() sdk.CapDefinition[string] { +func (*someOutputsCap) private() {} +func (c *someOutputsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[SomeOutputs, string](c.CapDefinition, "adapted_thing") } +func ConstantSomeOutputs(value SomeOutputs) SomeOutputsCap { + return &someOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSomeOutputsFromFields( adaptedThing sdk.CapDefinition[string]) SomeOutputsCap { return &simpleSomeOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go new file mode 100644 index 000000000..e41fa97d1 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go @@ -0,0 +1,20 @@ +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" +) + +type MyType struct { + Nested MyNestedType + I int + S string + T time.Time + O pkg2.OtherPackage +} + +type MyNestedType struct { + II int + SS string +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go new file mode 100644 index 000000000..c123504ff --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go @@ -0,0 +1,17 @@ +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" +) + +// A second file is used to make sure that all files in the package are collapsed into one correctly. + +type MyType2 struct { + Nested MyNestedType + I int + S string + T time.Time + O pkg2.OtherPackage +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go new file mode 100644 index 000000000..6384a42ee --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go @@ -0,0 +1,256 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// MyNestedTypeWrapper allows access to field from an sdk.CapDefinition[MyNestedType] +func MyNestedTypeWrapper(raw sdk.CapDefinition[MyNestedType]) MyNestedTypeCap { + wrapped, ok := raw.(MyNestedTypeCap) + if ok { + return wrapped + } + return &myNestedTypeCap{CapDefinition: raw} +} + +type MyNestedTypeCap interface { + sdk.CapDefinition[MyNestedType] + II() sdk.CapDefinition[int] + SS() sdk.CapDefinition[string] + private() +} + +type myNestedTypeCap struct { + sdk.CapDefinition[MyNestedType] +} + +func (*myNestedTypeCap) private() {} +func (c *myNestedTypeCap) II() sdk.CapDefinition[int] { + return sdk.AccessField[MyNestedType, int](c.CapDefinition, "II") +} +func (c *myNestedTypeCap) SS() sdk.CapDefinition[string] { + return sdk.AccessField[MyNestedType, string](c.CapDefinition, "SS") +} + +func ConstantMyNestedType(value MyNestedType) MyNestedTypeCap { + return &myNestedTypeCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyNestedTypeFromFields( + iI sdk.CapDefinition[int], + sS sdk.CapDefinition[string]) MyNestedTypeCap { + return &simpleMyNestedType{ + CapDefinition: sdk.ComponentCapDefinition[MyNestedType]{ + "II": iI.Ref(), + "SS": sS.Ref(), + }, + iI: iI, + sS: sS, + } +} + +type simpleMyNestedType struct { + sdk.CapDefinition[MyNestedType] + iI sdk.CapDefinition[int] + sS sdk.CapDefinition[string] +} + +func (c *simpleMyNestedType) II() sdk.CapDefinition[int] { + return c.iI +} +func (c *simpleMyNestedType) SS() sdk.CapDefinition[string] { + return c.sS +} + +func (c *simpleMyNestedType) private() {} + +// MyTypeWrapper allows access to field from an sdk.CapDefinition[MyType] +func MyTypeWrapper(raw sdk.CapDefinition[MyType]) MyTypeCap { + wrapped, ok := raw.(MyTypeCap) + if ok { + return wrapped + } + return &myTypeCap{CapDefinition: raw} +} + +type MyTypeCap interface { + sdk.CapDefinition[MyType] + I() sdk.CapDefinition[int] + Nested() MyNestedTypeCap + O() pkg2.OtherPackageCap + S() sdk.CapDefinition[string] + T() sdk.CapDefinition[time.Time] + private() +} + +type myTypeCap struct { + sdk.CapDefinition[MyType] +} + +func (*myTypeCap) private() {} +func (c *myTypeCap) I() sdk.CapDefinition[int] { + return sdk.AccessField[MyType, int](c.CapDefinition, "I") +} +func (c *myTypeCap) Nested() MyNestedTypeCap { + return MyNestedTypeWrapper(sdk.AccessField[MyType, MyNestedType](c.CapDefinition, "Nested")) +} +func (c *myTypeCap) O() pkg2.OtherPackageCap { + return pkg2.OtherPackageWrapper(sdk.AccessField[MyType, pkg2.OtherPackage](c.CapDefinition, "O")) +} +func (c *myTypeCap) S() sdk.CapDefinition[string] { + return sdk.AccessField[MyType, string](c.CapDefinition, "S") +} +func (c *myTypeCap) T() sdk.CapDefinition[time.Time] { + return sdk.AccessField[MyType, time.Time](c.CapDefinition, "T") +} + +func ConstantMyType(value MyType) MyTypeCap { + return &myTypeCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyTypeFromFields( + i sdk.CapDefinition[int], + nested MyNestedTypeCap, + o pkg2.OtherPackageCap, + s sdk.CapDefinition[string], + t sdk.CapDefinition[time.Time]) MyTypeCap { + return &simpleMyType{ + CapDefinition: sdk.ComponentCapDefinition[MyType]{ + "I": i.Ref(), + "Nested": nested.Ref(), + "O": o.Ref(), + "S": s.Ref(), + "T": t.Ref(), + }, + i: i, + nested: nested, + o: o, + s: s, + t: t, + } +} + +type simpleMyType struct { + sdk.CapDefinition[MyType] + i sdk.CapDefinition[int] + nested MyNestedTypeCap + o pkg2.OtherPackageCap + s sdk.CapDefinition[string] + t sdk.CapDefinition[time.Time] +} + +func (c *simpleMyType) I() sdk.CapDefinition[int] { + return c.i +} +func (c *simpleMyType) Nested() MyNestedTypeCap { + return c.nested +} +func (c *simpleMyType) O() pkg2.OtherPackageCap { + return c.o +} +func (c *simpleMyType) S() sdk.CapDefinition[string] { + return c.s +} +func (c *simpleMyType) T() sdk.CapDefinition[time.Time] { + return c.t +} + +func (c *simpleMyType) private() {} + +// MyType2Wrapper allows access to field from an sdk.CapDefinition[MyType2] +func MyType2Wrapper(raw sdk.CapDefinition[MyType2]) MyType2Cap { + wrapped, ok := raw.(MyType2Cap) + if ok { + return wrapped + } + return &myType2Cap{CapDefinition: raw} +} + +type MyType2Cap interface { + sdk.CapDefinition[MyType2] + I() sdk.CapDefinition[int] + Nested() MyNestedTypeCap + O() pkg2.OtherPackageCap + S() sdk.CapDefinition[string] + T() sdk.CapDefinition[time.Time] + private() +} + +type myType2Cap struct { + sdk.CapDefinition[MyType2] +} + +func (*myType2Cap) private() {} +func (c *myType2Cap) I() sdk.CapDefinition[int] { + return sdk.AccessField[MyType2, int](c.CapDefinition, "I") +} +func (c *myType2Cap) Nested() MyNestedTypeCap { + return MyNestedTypeWrapper(sdk.AccessField[MyType2, MyNestedType](c.CapDefinition, "Nested")) +} +func (c *myType2Cap) O() pkg2.OtherPackageCap { + return pkg2.OtherPackageWrapper(sdk.AccessField[MyType2, pkg2.OtherPackage](c.CapDefinition, "O")) +} +func (c *myType2Cap) S() sdk.CapDefinition[string] { + return sdk.AccessField[MyType2, string](c.CapDefinition, "S") +} +func (c *myType2Cap) T() sdk.CapDefinition[time.Time] { + return sdk.AccessField[MyType2, time.Time](c.CapDefinition, "T") +} + +func ConstantMyType2(value MyType2) MyType2Cap { + return &myType2Cap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyType2FromFields( + i sdk.CapDefinition[int], + nested MyNestedTypeCap, + o pkg2.OtherPackageCap, + s sdk.CapDefinition[string], + t sdk.CapDefinition[time.Time]) MyType2Cap { + return &simpleMyType2{ + CapDefinition: sdk.ComponentCapDefinition[MyType2]{ + "I": i.Ref(), + "Nested": nested.Ref(), + "O": o.Ref(), + "S": s.Ref(), + "T": t.Ref(), + }, + i: i, + nested: nested, + o: o, + s: s, + t: t, + } +} + +type simpleMyType2 struct { + sdk.CapDefinition[MyType2] + i sdk.CapDefinition[int] + nested MyNestedTypeCap + o pkg2.OtherPackageCap + s sdk.CapDefinition[string] + t sdk.CapDefinition[time.Time] +} + +func (c *simpleMyType2) I() sdk.CapDefinition[int] { + return c.i +} +func (c *simpleMyType2) Nested() MyNestedTypeCap { + return c.nested +} +func (c *simpleMyType2) O() pkg2.OtherPackageCap { + return c.o +} +func (c *simpleMyType2) S() sdk.CapDefinition[string] { + return c.s +} +func (c *simpleMyType2) T() sdk.CapDefinition[time.Time] { + return c.t +} + +func (c *simpleMyType2) private() {} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go new file mode 100644 index 000000000..e839d3a52 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go @@ -0,0 +1,11 @@ +package pkg2 + +type OtherPackage struct { + X string + Z string + Nr NotWrapped +} + +type NotWrapped struct { + A string +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go new file mode 100644 index 000000000..b6993f7c6 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go @@ -0,0 +1,78 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package pkg2 + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// OtherPackageWrapper allows access to field from an sdk.CapDefinition[OtherPackage] +func OtherPackageWrapper(raw sdk.CapDefinition[OtherPackage]) OtherPackageCap { + wrapped, ok := raw.(OtherPackageCap) + if ok { + return wrapped + } + return &otherPackageCap{CapDefinition: raw} +} + +type OtherPackageCap interface { + sdk.CapDefinition[OtherPackage] + Nr() sdk.CapDefinition[NotWrapped] + X() sdk.CapDefinition[string] + Z() sdk.CapDefinition[string] + private() +} + +type otherPackageCap struct { + sdk.CapDefinition[OtherPackage] +} + +func (*otherPackageCap) private() {} +func (c *otherPackageCap) Nr() sdk.CapDefinition[NotWrapped] { + return sdk.AccessField[OtherPackage, NotWrapped](c.CapDefinition, "Nr") +} +func (c *otherPackageCap) X() sdk.CapDefinition[string] { + return sdk.AccessField[OtherPackage, string](c.CapDefinition, "X") +} +func (c *otherPackageCap) Z() sdk.CapDefinition[string] { + return sdk.AccessField[OtherPackage, string](c.CapDefinition, "Z") +} + +func ConstantOtherPackage(value OtherPackage) OtherPackageCap { + return &otherPackageCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewOtherPackageFromFields( + nr sdk.CapDefinition[NotWrapped], + x sdk.CapDefinition[string], + z sdk.CapDefinition[string]) OtherPackageCap { + return &simpleOtherPackage{ + CapDefinition: sdk.ComponentCapDefinition[OtherPackage]{ + "Nr": nr.Ref(), + "X": x.Ref(), + "Z": z.Ref(), + }, + nr: nr, + x: x, + z: z, + } +} + +type simpleOtherPackage struct { + sdk.CapDefinition[OtherPackage] + nr sdk.CapDefinition[NotWrapped] + x sdk.CapDefinition[string] + z sdk.CapDefinition[string] +} + +func (c *simpleOtherPackage) Nr() sdk.CapDefinition[NotWrapped] { + return c.nr +} +func (c *simpleOtherPackage) X() sdk.CapDefinition[string] { + return c.x +} +func (c *simpleOtherPackage) Z() sdk.CapDefinition[string] { + return c.z +} + +func (c *simpleOtherPackage) private() {} diff --git a/pkg/capabilities/consensus/ocr3/aggregators/identical.go b/pkg/capabilities/consensus/ocr3/aggregators/identical.go index 389390882..aa05e7cf4 100644 --- a/pkg/capabilities/consensus/ocr3/aggregators/identical.go +++ b/pkg/capabilities/consensus/ocr3/aggregators/identical.go @@ -13,12 +13,12 @@ import ( ocrcommon "github.com/smartcontractkit/libocr/commontypes" ) +// Aggregates by the most frequent observation for each index of a data set type identicalAggregator struct { - config aggregatorConfig - lggr logger.Logger + config identicalAggConfig } -type aggregatorConfig struct { +type identicalAggConfig struct { // Length of the list of observations that each node is expected to provide. // Aggregator's output (i.e. EncodableOutcome) will be a values.Map with the same // number of elements and keyed by indices 0,1,2,... (unless KeyOverrides are provided). @@ -103,7 +103,7 @@ func (a *identicalAggregator) collectHighestCounts(counters []map[[32]byte]*coun } func NewIdenticalAggregator(config values.Map) (*identicalAggregator, error) { - parsedConfig, err := ParseConfig(config) + parsedConfig, err := ParseConfigIdenticalAggregator(config) if err != nil { return nil, fmt.Errorf("failed to parse config (%+v): %w", config, err) } @@ -112,10 +112,10 @@ func NewIdenticalAggregator(config values.Map) (*identicalAggregator, error) { }, nil } -func ParseConfig(config values.Map) (aggregatorConfig, error) { - parsedConfig := aggregatorConfig{} +func ParseConfigIdenticalAggregator(config values.Map) (identicalAggConfig, error) { + parsedConfig := identicalAggConfig{} if err := config.UnwrapTo(&parsedConfig); err != nil { - return aggregatorConfig{}, err + return identicalAggConfig{}, err } if parsedConfig.ExpectedObservationsLen == 0 { parsedConfig.ExpectedObservationsLen = 1 diff --git a/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go b/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go index 711b1ab25..95688e894 100644 --- a/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go +++ b/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go @@ -13,7 +13,7 @@ import ( ) func TestDataFeedsAggregator_Aggregate(t *testing.T) { - config := getConfig(t, nil) + config := getConfigIdenticalAggregator(t, nil) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -37,7 +37,7 @@ func TestDataFeedsAggregator_Aggregate(t *testing.T) { } func TestDataFeedsAggregator_Aggregate_OverrideWithKeys(t *testing.T) { - config := getConfig(t, []string{"outcome"}) + config := getConfigIdenticalAggregator(t, []string{"outcome"}) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -61,7 +61,7 @@ func TestDataFeedsAggregator_Aggregate_OverrideWithKeys(t *testing.T) { } func TestDataFeedsAggregator_Aggregate_NoConsensus(t *testing.T) { - config := getConfig(t, []string{"outcome"}) + config := getConfigIdenticalAggregator(t, []string{"outcome"}) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -81,7 +81,7 @@ func TestDataFeedsAggregator_Aggregate_NoConsensus(t *testing.T) { require.ErrorContains(t, err, "can't reach consensus on observations with index 0") } -func getConfig(t *testing.T, overrideKeys []string) *values.Map { +func getConfigIdenticalAggregator(t *testing.T, overrideKeys []string) *values.Map { unwrappedConfig := map[string]any{ "expectedObservationsLen": len(overrideKeys), "keyOverrides": overrideKeys, diff --git a/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go b/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go new file mode 100644 index 000000000..7b331d37e --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go @@ -0,0 +1,623 @@ +package aggregators + +import ( + "bytes" + "crypto/sha256" + "errors" + "fmt" + "math" + "math/big" + "sort" + "strconv" + "time" + + "github.com/shopspring/decimal" + "google.golang.org/protobuf/proto" + + ocrcommon "github.com/smartcontractkit/libocr/commontypes" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" +) + +const ( + AGGREGATION_METHOD_MEDIAN = "median" + AGGREGATION_METHOD_MODE = "mode" + // DEVIATION_TYPE_NONE is no deviation check + DEVIATION_TYPE_NONE = "none" + // DEVIATION_TYPE_ANY is any difference from the previous value to the next value + DEVIATION_TYPE_ANY = "any" + // DEVIATION_TYPE_PERCENT is a numeric percentage difference + DEVIATION_TYPE_PERCENT = "percent" + // DEVIATION_TYPE_ABSOLUTE is a numeric unsigned difference + DEVIATION_TYPE_ABSOLUTE = "absolute" + REPORT_FORMAT_MAP = "map" + REPORT_FORMAT_ARRAY = "array" + REPORT_FORMAT_VALUE = "value" + MODE_QUORUM_OCR = "ocr" + MODE_QUORUM_ANY = "any" + + DEFAULT_REPORT_FORMAT = REPORT_FORMAT_MAP + DEFAULT_OUTPUT_FIELD_NAME = "Reports" + DEFAULT_MODE_QUORUM = MODE_QUORUM_ANY +) + +type ReduceAggConfig struct { + // Configuration on how to aggregate one or more data points + Fields []AggregationField `mapstructure:"fields" required:"true"` + // The top level field name that report data is put into + OutputFieldName string `mapstructure:"outputFieldName" json:"outputFieldName" default:"Reports"` + // The structure surrounding the report data that is put on to "OutputFieldName" + ReportFormat string `mapstructure:"reportFormat" json:"reportFormat" default:"map" jsonschema:"enum=map,enum=array,enum=value"` + // Optional key name, that when given will contain a nested map with designated Fields moved into it + // If given, one or more fields must be given SubMapField: true + SubMapKey string `mapstructure:"subMapKey" json:"subMapKey" default:""` +} + +type AggregationField struct { + // An optional check to only report when the difference from the previous report exceeds a certain threshold. + // Can only be used when the field is of a numeric type: string, decimal, int64, big.Int, time.Time, float64 + // If no deviation is provided on any field, there will always be a report once minimum observations are reached. + Deviation decimal.Decimal `mapstructure:"-" json:"-"` + DeviationString string `mapstructure:"deviation" json:"deviation,omitempty"` + // The format of the deviation being provided + // * percent - a percentage deviation + // * absolute - an unsigned numeric difference + // * none - no deviation check + // * any - any difference from the previous value to the next value + DeviationType string `mapstructure:"deviationType" json:"deviationType,omitempty" jsonschema:"enum=percent,enum=absolute,enum=none,enum=any"` + // The key to find a data point within the input data + // If omitted, the entire input will be used + InputKey string `mapstructure:"inputKey" json:"inputKey"` + // How the data set should be aggregated to a single value + // * median - take the centermost value of the sorted data set of observations. can only be used on numeric types. not a true median, because no average if two middle values. + // * mode - take the most frequent value. if tied, use the "first". use "ModeQuorom" to configure the minimum number of seen values. + Method string `mapstructure:"method" json:"method" jsonschema:"enum=median,enum=mode" required:"true"` + // When using Method=mode, this will configure the minimum number of values that must be seen + // * ocr - (default) enforces that the number of matching values must be at least f+1, otherwise consensus fails + // * any - do not enforce any limit on the minimum viable count. this may result in unexpected answers if every observation is unique. + ModeQuorum string `mapstructure:"modeQuorum" json:"modeQuorum,omitempty" jsonschema:"enum=ocr,enum=any" default:"ocr"` + // The key that the aggregated data is put under + // If omitted, the InputKey will be used + OutputKey string `mapstructure:"outputKey" json:"outputKey"` + // If enabled, this field will be moved from the top level map + // into a nested map on the key defined by "SubMapKey" + SubMapField bool `mapstructure:"subMapField" json:"subMapField,omitempty"` +} + +type reduceAggregator struct { + config ReduceAggConfig +} + +var _ types.Aggregator = (*reduceAggregator)(nil) + +// Condenses multiple observations into a single encodable outcome +func (a *reduceAggregator) Aggregate(lggr logger.Logger, previousOutcome *types.AggregationOutcome, observations map[ocrcommon.OracleID][]values.Value, f int) (*types.AggregationOutcome, error) { + if len(observations) < 2*f+1 { + return nil, fmt.Errorf("not enough observations, have %d want %d", len(observations), 2*f+1) + } + + currentState, err := a.initializeCurrentState(lggr, previousOutcome) + if err != nil { + return nil, err + } + + report := map[string]any{} + shouldReport := false + + for _, field := range a.config.Fields { + vals := a.extractValues(lggr, observations, field.InputKey) + + // only proceed if every field has reached the minimum number of observations + if len(vals) < 2*f+1 { + return nil, fmt.Errorf("not enough observations provided %s, have %d want %d", field.InputKey, len(vals), 2*f+1) + } + + singleValue, err := reduce(field.Method, vals, f, field.ModeQuorum) + if err != nil { + return nil, fmt.Errorf("unable to reduce on method %s, err: %s", field.Method, err.Error()) + } + + shouldReportField, err := a.shouldReport(lggr, field, singleValue, currentState) + if err != nil { + return nil, fmt.Errorf("unable to determine if should report, err: %s", err.Error()) + } + + if shouldReportField || field.DeviationType == DEVIATION_TYPE_NONE { + (*currentState)[field.OutputKey] = singleValue + } + + if shouldReportField { + shouldReport = true + } + + if len(field.OutputKey) > 0 { + report[field.OutputKey] = singleValue + } else { + report[field.InputKey] = singleValue + } + } + + // if SubMapKey is provided, move fields in a nested map + if len(a.config.SubMapKey) > 0 { + subMap := map[string]any{} + for _, field := range a.config.Fields { + if field.SubMapField { + if len(field.OutputKey) > 0 { + subMap[field.OutputKey] = report[field.OutputKey] + delete(report, field.OutputKey) + } else { + subMap[field.InputKey] = report[field.InputKey] + delete(report, field.InputKey) + } + } + } + report[a.config.SubMapKey] = subMap + } + + // if none of the AggregationFields define deviation, always report + hasNoDeviation := true + for _, field := range a.config.Fields { + if field.DeviationType != DEVIATION_TYPE_NONE { + hasNoDeviation = false + break + } + } + if hasNoDeviation { + lggr.Debugw("no deviation defined, reporting") + shouldReport = true + } + + stateValuesMap, err := values.WrapMap(currentState) + if err != nil { + return nil, fmt.Errorf("aggregate state wrapmap error: %s", err.Error()) + } + stateBytes, err := proto.MarshalOptions{Deterministic: true}.Marshal(values.ProtoMap(stateValuesMap)) + if err != nil { + return nil, fmt.Errorf("aggregate state proto marshal error: %s", err.Error()) + } + + toWrap, err := formatReport(report, a.config.ReportFormat) + if err != nil { + return nil, fmt.Errorf("aggregate formatReport error: %s", err.Error()) + } + reportValuesMap, err := values.NewMap(map[string]any{ + a.config.OutputFieldName: toWrap, + }) + if err != nil { + return nil, fmt.Errorf("aggregate new map error: %s", err.Error()) + } + reportProtoMap := values.Proto(reportValuesMap).GetMapValue() + + lggr.Debugw("Aggregation complete", "shouldReport", shouldReport) + + return &types.AggregationOutcome{ + EncodableOutcome: reportProtoMap, + Metadata: stateBytes, + ShouldReport: shouldReport, + }, nil +} + +func (a *reduceAggregator) shouldReport(lggr logger.Logger, field AggregationField, singleValue values.Value, currentState *map[string]values.Value) (bool, error) { + if field.DeviationType == DEVIATION_TYPE_NONE { + return false, nil + } + + oldValue := (*currentState)[field.OutputKey] + // this means its the first round and the field has not been initialised + if oldValue == nil { + return true, nil + } + + if field.DeviationType == DEVIATION_TYPE_ANY { + unwrappedOldValue, err := oldValue.Unwrap() + if err != nil { + return false, err + } + + unwrappedSingleValue, err := singleValue.Unwrap() + if err != nil { + return false, err + } + + // we will only report in case of a change in value + switch v := unwrappedOldValue.(type) { + case []byte: + if !bytes.Equal(v, unwrappedSingleValue.([]byte)) { + return true, nil + } + case map[string]interface{}, []any: + marshalledOldValue, err := proto.MarshalOptions{Deterministic: true}.Marshal(values.Proto(oldValue)) + if err != nil { + return false, err + } + + marshalledSingleValue, err := proto.MarshalOptions{Deterministic: true}.Marshal(values.Proto(singleValue)) + if err != nil { + return false, err + } + if !bytes.Equal(marshalledOldValue, marshalledSingleValue) { + return true, nil + } + default: + if unwrappedOldValue != unwrappedSingleValue { + return true, nil + } + } + + return false, nil + } + + currDeviation, err := deviation(field.DeviationType, oldValue, singleValue) + if err != nil { + return false, fmt.Errorf("unable to determine deviation %s", err.Error()) + } + + if currDeviation.GreaterThan(field.Deviation) { + lggr.Debugw("checked deviation", "key", field.OutputKey, "deviationType", field.DeviationType, "currentDeviation", currDeviation.String(), "targetDeviation", field.Deviation.String(), "shouldReport", true) + return true, nil + } + + return false, nil +} + +func (a *reduceAggregator) initializeCurrentState(lggr logger.Logger, previousOutcome *types.AggregationOutcome) (*map[string]values.Value, error) { + currentState := map[string]values.Value{} + + if previousOutcome != nil { + pb := &pb.Map{} + err := proto.Unmarshal(previousOutcome.Metadata, pb) + if err != nil { + return nil, fmt.Errorf("initializeCurrentState Unmarshal error: %w", err) + } + mv, err := values.FromMapValueProto(pb) + if err != nil { + return nil, fmt.Errorf("initializeCurrentState FromMapValueProto error: %w", err) + } + currentState = mv.Underlying + } + + lggr.Debugw("current state initialized", "state", currentState, "previousOutcome", previousOutcome) + return ¤tState, nil +} + +func (a *reduceAggregator) extractValues(lggr logger.Logger, observations map[ocrcommon.OracleID][]values.Value, aggregationKey string) (vals []values.Value) { + for nodeID, nodeObservations := range observations { + // we only expect a single observation per node + if len(nodeObservations) == 0 || nodeObservations[0] == nil { + lggr.Warnf("node %d contributed with empty observations", nodeID) + continue + } + if len(nodeObservations) > 1 { + lggr.Warnf("node %d contributed with more than one observation", nodeID) + continue + } + + val, err := nodeObservations[0].Unwrap() + if err != nil { + lggr.Warnf("node %d contributed a Value that could not be unwrapped", nodeID) + continue + } + + // if the observation data is a complex type, extract the value using the inputKey + // values are then re-wrapped here to handle aggregating against Value types + // which is used for mode aggregation + switch val := val.(type) { + case map[string]interface{}: + _, ok := val[aggregationKey] + if !ok { + continue + } + + rewrapped, err := values.Wrap(val[aggregationKey]) + if err != nil { + lggr.Warnf("unable to wrap value %s", val[aggregationKey]) + continue + } + vals = append(vals, rewrapped) + case []interface{}: + i, err := strconv.Atoi(aggregationKey) + if err != nil { + lggr.Warnf("aggregation key %s could not be used to index a list type", aggregationKey) + continue + } + rewrapped, err := values.Wrap(val[i]) + if err != nil { + lggr.Warnf("unable to wrap value %s", val[i]) + continue + } + vals = append(vals, rewrapped) + default: + // not a complex type, use raw value + if len(aggregationKey) == 0 { + vals = append(vals, nodeObservations[0]) + } else { + lggr.Warnf("aggregation key %s provided, but value is not an indexable type", aggregationKey) + } + } + } + + return vals +} + +func reduce(method string, items []values.Value, f int, modeQuorum string) (values.Value, error) { + switch method { + case AGGREGATION_METHOD_MEDIAN: + return median(items) + case AGGREGATION_METHOD_MODE: + value, count, err := mode(items) + if err != nil { + return value, err + } + err = modeHasQuorum(modeQuorum, count, f) + if err != nil { + return value, err + } + return value, err + default: + // invariant, config should be validated + return nil, fmt.Errorf("unsupported aggregation method %s", method) + } +} + +func median(items []values.Value) (values.Value, error) { + if len(items) == 0 { + // invariant, as long as f > 0 there should be items + return nil, errors.New("items cannot be empty") + } + err := sortAsDecimal(items) + if err != nil { + return nil, err + } + return items[(len(items)-1)/2], nil +} + +func sortAsDecimal(items []values.Value) error { + var err error + sort.Slice(items, func(i, j int) bool { + decimalI, errI := toDecimal(items[i]) + if errI != nil { + err = errI + } + decimalJ, errJ := toDecimal(items[j]) + if errJ != nil { + err = errJ + } + return decimalI.GreaterThan(decimalJ) + }) + if err != nil { + return err + } + return nil +} + +func toDecimal(item values.Value) (decimal.Decimal, error) { + unwrapped, err := item.Unwrap() + if err != nil { + return decimal.NewFromInt(0), err + } + + switch v := unwrapped.(type) { + case string: + deci, err := decimal.NewFromString(unwrapped.(string)) + if err != nil { + return decimal.NewFromInt(0), err + } + return deci, nil + case decimal.Decimal: + return unwrapped.(decimal.Decimal), nil + case int64: + return decimal.NewFromInt(unwrapped.(int64)), nil + case *big.Int: + big := unwrapped.(*big.Int) + return decimal.NewFromBigInt(big, 10), nil + case time.Time: + return decimal.NewFromInt(unwrapped.(time.Time).Unix()), nil + case float64: + return decimal.NewFromFloat(unwrapped.(float64)), nil + default: + // unsupported type + return decimal.NewFromInt(0), fmt.Errorf("unable to convert type %T to decimal", v) + } +} + +func mode(items []values.Value) (values.Value, int, error) { + if len(items) == 0 { + // invariant, as long as f > 0 there should be items + return nil, 0, errors.New("items cannot be empty") + } + + counts := make(map[[32]byte]*counter) + for _, item := range items { + marshalled, err := proto.MarshalOptions{Deterministic: true}.Marshal(values.Proto(item)) + if err != nil { + // invariant: values should always be able to be proto marshalled + return nil, 0, err + } + sha := sha256.Sum256(marshalled) + elem, ok := counts[sha] + if !ok { + counts[sha] = &counter{ + fullObservation: item, + count: 1, + } + } else { + elem.count++ + } + } + + var maxCount int + for _, ctr := range counts { + if ctr.count > maxCount { + maxCount = ctr.count + } + } + + var modes []values.Value + for _, ctr := range counts { + if ctr.count == maxCount { + modes = append(modes, ctr.fullObservation) + } + } + + // If more than one mode found, choose first + + return modes[0], maxCount, nil +} + +func modeHasQuorum(quorumType string, count int, f int) error { + switch quorumType { + case MODE_QUORUM_ANY: + return nil + case MODE_QUORUM_OCR: + if count < f+1 { + return fmt.Errorf("mode quorum not reached. have: %d, want: %d", count, f+1) + } + return nil + default: + // invariant, config should be validated + return fmt.Errorf("unsupported mode quorum %s", quorumType) + } +} + +func deviation(method string, previousValue values.Value, nextValue values.Value) (decimal.Decimal, error) { + prevDeci, err := toDecimal(previousValue) + if err != nil { + return decimal.NewFromInt(0), err + } + nextDeci, err := toDecimal(nextValue) + if err != nil { + return decimal.NewFromInt(0), err + } + + diff := prevDeci.Sub(nextDeci).Abs() + + switch method { + case DEVIATION_TYPE_ABSOLUTE: + return diff, nil + case DEVIATION_TYPE_PERCENT: + if prevDeci.Cmp(decimal.NewFromInt(0)) == 0 { + if diff.Cmp(decimal.NewFromInt(0)) == 0 { + return decimal.NewFromInt(0), nil + } + return decimal.NewFromInt(math.MaxInt), nil + } + return diff.Div(prevDeci), nil + default: + return decimal.NewFromInt(0), fmt.Errorf("unsupported deviation method %s", method) + } +} + +func formatReport(report map[string]any, format string) (any, error) { + switch format { + case REPORT_FORMAT_ARRAY: + return []map[string]any{report}, nil + case REPORT_FORMAT_MAP: + return report, nil + case REPORT_FORMAT_VALUE: + for _, value := range report { + return value, nil + } + // invariant: validation enforces only one output value + return nil, errors.New("value format must contain at least one output") + default: + return nil, errors.New("unsupported report format") + } +} + +func isOneOf(toCheck string, options []string) bool { + for _, option := range options { + if toCheck == option { + return true + } + } + return false +} + +func NewReduceAggregator(config values.Map) (types.Aggregator, error) { + parsedConfig, err := ParseConfigReduceAggregator(config) + if err != nil { + return nil, fmt.Errorf("failed to parse config (%+v): %w", config, err) + } + return &reduceAggregator{ + config: parsedConfig, + }, nil +} + +func ParseConfigReduceAggregator(config values.Map) (ReduceAggConfig, error) { + parsedConfig := ReduceAggConfig{} + if err := config.UnwrapTo(&parsedConfig); err != nil { + return ReduceAggConfig{}, err + } + + // validations & fill defaults + if len(parsedConfig.Fields) == 0 { + return ReduceAggConfig{}, errors.New("reduce aggregator must contain config for Fields to aggregate") + } + if len(parsedConfig.OutputFieldName) == 0 { + parsedConfig.OutputFieldName = DEFAULT_OUTPUT_FIELD_NAME + } + if len(parsedConfig.ReportFormat) == 0 { + parsedConfig.ReportFormat = DEFAULT_REPORT_FORMAT + } + if len(parsedConfig.Fields) > 1 && parsedConfig.ReportFormat == REPORT_FORMAT_VALUE { + return ReduceAggConfig{}, errors.New("report type of value can only have one field") + } + hasSubMapField := false + outputKeyCount := make(map[any]bool) + for i, field := range parsedConfig.Fields { + if (parsedConfig.ReportFormat == REPORT_FORMAT_ARRAY || parsedConfig.ReportFormat == REPORT_FORMAT_MAP) && len(field.OutputKey) == 0 { + return ReduceAggConfig{}, fmt.Errorf("report type %s or %s must have an OutputKey to put the result under", REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP) + } + if len(field.DeviationType) == 0 { + field.DeviationType = DEVIATION_TYPE_NONE + parsedConfig.Fields[i].DeviationType = DEVIATION_TYPE_NONE + } + if !isOneOf(field.DeviationType, []string{DEVIATION_TYPE_ABSOLUTE, DEVIATION_TYPE_PERCENT, DEVIATION_TYPE_NONE, DEVIATION_TYPE_ANY}) { + return ReduceAggConfig{}, fmt.Errorf("invalid config DeviationType. received: %s. options: [%s, %s, %s]", field.DeviationType, DEVIATION_TYPE_ABSOLUTE, DEVIATION_TYPE_PERCENT, DEVIATION_TYPE_NONE) + } + if !isOneOf(field.DeviationType, []string{DEVIATION_TYPE_NONE, DEVIATION_TYPE_ANY}) && len(field.DeviationString) == 0 { + return ReduceAggConfig{}, errors.New("aggregation field deviation must contain DeviationString amount") + } + if field.DeviationType != DEVIATION_TYPE_NONE && len(field.DeviationString) > 0 { + deci, err := decimal.NewFromString(field.DeviationString) + if err != nil { + return ReduceAggConfig{}, fmt.Errorf("reduce aggregator could not parse deviation decimal from string %s", field.DeviationString) + } + parsedConfig.Fields[i].Deviation = deci + } + if len(field.Method) == 0 || !isOneOf(field.Method, []string{AGGREGATION_METHOD_MEDIAN, AGGREGATION_METHOD_MODE}) { + return ReduceAggConfig{}, fmt.Errorf("aggregation field must contain a method. options: [%s, %s]", AGGREGATION_METHOD_MEDIAN, AGGREGATION_METHOD_MODE) + } + if field.Method == AGGREGATION_METHOD_MODE && len(field.ModeQuorum) == 0 { + field.ModeQuorum = MODE_QUORUM_OCR + parsedConfig.Fields[i].ModeQuorum = MODE_QUORUM_OCR + } + if field.Method == AGGREGATION_METHOD_MODE && !isOneOf(field.ModeQuorum, []string{MODE_QUORUM_ANY, MODE_QUORUM_OCR}) { + return ReduceAggConfig{}, fmt.Errorf("mode quorum must be one of options: [%s, %s]", MODE_QUORUM_ANY, MODE_QUORUM_OCR) + } + if len(field.DeviationString) > 0 && isOneOf(field.DeviationType, []string{DEVIATION_TYPE_NONE, DEVIATION_TYPE_ANY}) { + return ReduceAggConfig{}, fmt.Errorf("aggregation field cannot have deviation with a deviation type of %s", field.DeviationType) + } + if field.SubMapField { + hasSubMapField = true + } + if outputKeyCount[field.OutputKey] { + return ReduceAggConfig{}, errors.New("multiple fields have the same outputkey, which will overwrite each other") + } + outputKeyCount[field.OutputKey] = true + } + if len(parsedConfig.SubMapKey) > 0 && !hasSubMapField { + return ReduceAggConfig{}, fmt.Errorf("sub Map key %s given, but no fields are marked as sub map fields", parsedConfig.SubMapKey) + } + if hasSubMapField && len(parsedConfig.SubMapKey) == 0 { + return ReduceAggConfig{}, errors.New("fields are marked as sub Map fields, but no sub map key given") + } + if !isOneOf(parsedConfig.ReportFormat, []string{REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP, REPORT_FORMAT_VALUE}) { + return ReduceAggConfig{}, fmt.Errorf("invalid config ReportFormat. received: %s. options: %s, %s, %s", parsedConfig.ReportFormat, REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP, REPORT_FORMAT_VALUE) + } + + return parsedConfig, nil +} diff --git a/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go b/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go new file mode 100644 index 000000000..19d254191 --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go @@ -0,0 +1,1885 @@ +package aggregators_test + +import ( + "math/big" + "testing" + "time" + + "github.com/shopspring/decimal" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/libocr/commontypes" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/datastreams" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" +) + +var ( + feedIDA = datastreams.FeedID("0x0001013ebd4ed3f5889fb5a8a52b42675c60c1a8c42bc79eaa72dcd922ac4292") + idABytes = feedIDA.Bytes() + feedIDB = datastreams.FeedID("0x0003c317fec7fad514c67aacc6366bf2f007ce37100e3cddcacd0ccaa1f3746d") + idBBytes = feedIDB.Bytes() + now = time.Now() +) + +func TestReduceAggregator_Aggregate(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + cases := []struct { + name string + fields []aggregators.AggregationField + extraConfig map[string]any + observationsFactory func() map[commontypes.OracleID][]values.Value + shouldReport bool + expectedState any + expectedOutcome map[string]any + previousOutcome func(t *testing.T) *types.AggregationOutcome + }{ + { + name: "aggregate on int64 median", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + { + name: "aggregate on decimal median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": decimal.NewFromInt(32), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": decimal.NewFromInt(32), + }, + }, + }, + expectedState: map[string]any{ + "Price": decimal.NewFromInt(32), + }, + }, + { + name: "aggregate on float64 median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": float64(1.2), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": float64(1.2), + }, + }, + }, + expectedState: map[string]any{ + "Price": float64(1.2), + }, + }, + { + name: "aggregate on time median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": now, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": time.Time(now).UTC(), + }, + }, + }, + expectedState: map[string]any{ + "Price": now.UTC(), + }, + }, + { + name: "aggregate on big int median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": big.NewInt(100), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": big.NewInt(100), + }, + }, + }, + expectedState: map[string]any{ + "Price": big.NewInt(100), + }, + }, + { + name: "aggregate with previous outcome", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + previousOutcome: func(t *testing.T) *types.AggregationOutcome { + m, err := values.NewMap(map[string]any{}) + require.NoError(t, err) + pm := values.Proto(m) + bm, err := proto.Marshal(pm) + require.NoError(t, err) + return &types.AggregationOutcome{Metadata: bm} + }, + }, + { + name: "aggregate on bytes mode", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "FeedID": idBBytes[:], + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}, 4: {mockValue1}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + }, + }, + { + name: "aggregate on string mode", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": "1", + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": "2", + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": "1", + }, + }, + }, + expectedState: map[string]any{ + "Price": "1", + }, + }, + { + name: "aggregate on bool mode", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": true, + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": false, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": true, + }, + }, + }, + expectedState: map[string]any{ + "Price": true, + }, + }, + { + name: "aggregate on non-indexable type", + fields: []aggregators.AggregationField{ + { + // Omitting "InputKey" + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": int64(1), + }, + }, + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + { + name: "aggregate on list type", + fields: []aggregators.AggregationField{ + { + InputKey: "1", + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.NewList([]any{"1", "2", "3"}) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": "2", + }, + }, + }, + expectedState: map[string]any{ + "Price": "2", + }, + }, + { + name: "submap", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + SubMapField: true, + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + }, + extraConfig: map[string]any{ + "SubMapKey": "Report", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Report": map[string]any{ + "Price": int64(100), + }, + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + "Price": int64(100), + "Timestamp": int64(12341414929), + }, + }, + { + name: "report format value", + fields: []aggregators.AggregationField{ + { + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{ + "reportFormat": "value", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": int64(1), + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + { + name: "report format array", + fields: []aggregators.AggregationField{ + { + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{ + "reportFormat": "array", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{map[string]any{"Price": int64(1)}}, + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + config := getConfigReduceAggregator(t, tt.fields, tt.extraConfig) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + pb := &pb.Map{} + + var po *types.AggregationOutcome + if tt.previousOutcome != nil { + po = tt.previousOutcome(t) + } + + outcome, err := agg.Aggregate(logger.Nop(), po, tt.observationsFactory(), 1) + require.NoError(t, err) + require.Equal(t, tt.shouldReport, outcome.ShouldReport) + + // validate metadata + err = proto.Unmarshal(outcome.Metadata, pb) + require.NoError(t, err) + vmap, err := values.FromMapValueProto(pb) + require.NoError(t, err) + state, err := vmap.Unwrap() + require.NoError(t, err) + require.Equal(t, tt.expectedState, state) + + // validate encodable outcome + val, err := values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err := val.Unwrap() + require.NoError(t, err) + mm, ok := topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + + require.Equal(t, tt.expectedOutcome, mm) + }) + } + }) + + t.Run("error path", func(t *testing.T) { + cases := []struct { + name string + previousOutcome *types.AggregationOutcome + fields []aggregators.AggregationField + extraConfig map[string]any + observationsFactory func() map[commontypes.OracleID][]values.Value + }{ + { + name: "not enough observations", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + return map[commontypes.OracleID][]values.Value{} + }, + }, + { + name: "invalid previous outcome not pb", + previousOutcome: &types.AggregationOutcome{ + Metadata: []byte{1, 2, 3}, + }, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(int64(100)) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + }, + { + name: "not enough extracted values", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + InputKey: "Price", + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{"Price": int64(100)}) + require.NoError(t, err) + mockValueEmpty := values.EmptyMap() + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValueEmpty}} + }, + }, + { + name: "reduce error median", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(true) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + }, + { + name: "reduce error mode with mode quorum of ocr", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + Method: "mode", + ModeQuorum: "ocr", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(true) + require.NoError(t, err) + mockValue2, err := values.Wrap(true) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue2}} + }, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + config := getConfigReduceAggregator(t, tt.fields, tt.extraConfig) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + _, err = agg.Aggregate(logger.Nop(), tt.previousOutcome, tt.observationsFactory(), 1) + require.Error(t, err) + }) + } + }) +} + +func TestInputChanges(t *testing.T) { + fields := []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + } + config := getConfigReduceAggregator(t, fields, map[string]any{}) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + // First Round + mockValue1, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + pb := &pb.Map{} + outcome, err := agg.Aggregate(logger.Nop(), nil, map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue1}}, 1) + require.NoError(t, err) + shouldReport := true + require.Equal(t, shouldReport, outcome.ShouldReport) + + // validate metadata + proto.Unmarshal(outcome.Metadata, pb) + vmap, err := values.FromMapValueProto(pb) + require.NoError(t, err) + state, err := vmap.Unwrap() + require.NoError(t, err) + expectedState1 := map[string]any{ + "FeedID": idABytes[:], + "Price": int64(100), + "Timestamp": int64(12341414929), + } + require.Equal(t, expectedState1, state) + + // validate encodable outcome + val, err := values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err := val.Unwrap() + require.NoError(t, err) + mm, ok := topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + expectedOutcome1 := map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + } + require.Equal(t, expectedOutcome1, mm) + + // Second Round + mockValue2, err := values.WrapMap(map[string]any{ + "FeedID": true, + "Timestamp": int64(12341414929), + "BenchmarkPrice": int64(100), + }) + require.NoError(t, err) + outcome, err = agg.Aggregate(logger.Nop(), nil, map[commontypes.OracleID][]values.Value{1: {mockValue2}, 2: {mockValue2}, 3: {mockValue2}}, 1) + require.NoError(t, err) + require.Equal(t, shouldReport, outcome.ShouldReport) + + // validate metadata + proto.Unmarshal(outcome.Metadata, pb) + vmap, err = values.FromMapValueProto(pb) + require.NoError(t, err) + state, err = vmap.Unwrap() + require.NoError(t, err) + expectedState2 := map[string]any{ + "FeedID": true, + "Price": int64(100), + "Timestamp": int64(12341414929), + } + require.Equal(t, expectedState2, state) + + // validate encodable outcome + val, err = values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err = val.Unwrap() + require.NoError(t, err) + mm, ok = topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + expectedOutcome2 := map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": true, + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + } + + require.Equal(t, expectedOutcome2, mm) + +} + +func TestMedianAggregator_ParseConfig(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + cases := []struct { + name string + inputFactory func() map[string]any + outputFactory func() aggregators.ReduceAggConfig + }{ + { + name: "no inputkey", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "", + OutputKey: "Price", + Method: "median", + DeviationString: "", + Deviation: decimal.Decimal{}, + DeviationType: "none", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "map", + } + }, + }, + { + name: "reportFormat map, aggregation method mode, deviation", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "mode", + DeviationString: "1.1", + DeviationType: "absolute", + }, + }, + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "mode", + ModeQuorum: "ocr", + DeviationString: "1.1", + Deviation: decimal.NewFromFloat(1.1), + DeviationType: "absolute", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "map", + } + }, + }, + { + name: "reportFormat array, aggregation method median, no deviation", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "median", + }, + }, + "outputFieldName": "Reports", + "reportFormat": "array", + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "median", + DeviationString: "", + Deviation: decimal.Decimal{}, + DeviationType: "none", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "array", + } + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + vMap, err := values.NewMap(tt.inputFactory()) + require.NoError(t, err) + parsedConfig, err := aggregators.ParseConfigReduceAggregator(*vMap) + require.NoError(t, err) + require.Equal(t, tt.outputFactory(), parsedConfig) + }) + } + }) + + t.Run("unhappy path", func(t *testing.T) { + cases := []struct { + name string + configFactory func() *values.Map + }{ + { + name: "empty", + configFactory: func() *values.Map { + return values.EmptyMap() + }, + }, + { + name: "invalid report format", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + "reportFormat": "invalid", + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with no method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with empty method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "invalid", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation string but no deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation string but empty deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + DeviationType: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + DeviationType: "invalid", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation type but no deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation type but empty deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + DeviationString: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + DeviationString: "1-1", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with sub report, but no sub report key", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + SubMapField: true, + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "sub report key, but no sub report fields", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "subMapKey": "Report", + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "clashing output keys", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "map/array type, no output key", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "report type value with multiple fields", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "reportFormat": "value", + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + Method: "median", + OutputKey: "FeedID", + }, + { + InputKey: "Price", + Method: "median", + OutputKey: "Price", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "invalid mode quorum", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "Price", + Method: "mode", + ModeQuorum: "invalid", + OutputKey: "Price", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + _, err := aggregators.ParseConfigReduceAggregator(*tt.configFactory()) + require.Error(t, err) + }, + ) + } + }) +} + +func TestAggregateShouldReport(t *testing.T) { + extraConfig := map[string]any{ + "reportFormat": "array", + } + + cases := []struct { + name string + fields []aggregators.AggregationField + mockValueFirstRound *values.Map + shouldReportFirstRound bool + stateFirstRound map[string]interface{} + mockValueSecondRound *values.Map + shouldReportSecondRound bool + stateSecondRound map[string]interface{} + mockValueThirdRound *values.Map + shouldReportThirdRound bool + stateThirdRound map[string]interface{} + }{ + { + name: "OK-report_only_when_deviation_exceeded", + fields: []aggregators.AggregationField{ + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{"Time": decimal.NewFromInt(10)}, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "Timestamp": decimal.NewFromInt(30), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + // the delta between 10 and 30 is 20, which is less than the deviation of 30, so the state should remain the same + stateSecondRound: map[string]interface{}(map[string]interface{}{"Time": decimal.NewFromInt(10)}), + + mockValueThirdRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "Timestamp": decimal.NewFromInt(45), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportThirdRound: true, + // the delta between 10 and 45 is 35, which is more than the deviation of 30, thats why the state is updated + stateThirdRound: map[string]interface{}{"Time": decimal.NewFromInt(45)}, + }, + { + name: "NOK-do_not_report_if_deviation_type_any_byte_field_does_not_change", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}(map[string]interface{}{ + "FeedID": idABytes[:], + "Time": decimal.NewFromInt(10), + }), + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": idABytes[:], + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "NOK-do_not_report_if_deviation_type_any_bool_field_does_not_change", + fields: []aggregators.AggregationField{ + { + InputKey: "BoolField", + OutputKey: "BoolField", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "BoolField": true, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "BoolField": true, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "BoolField": true, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "BoolField": true, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_byte_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": idABytes[:], + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idBBytes[:], + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": idBBytes[:], + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_bool_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "BoolField", + OutputKey: "BoolField", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "BoolField": true, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "BoolField": true, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "BoolField": false, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "BoolField": false, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_string_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": "A", + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": "A", + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": "B", + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": "B", + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "NOK-do_not_report_if_deviation_type_any_string_field_does_not_change", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": "A", + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": "A", + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": "A", + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": "A", + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_map_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": map[string]any{"A": "A"}, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": map[string]any{"A": "A"}, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": map[string]any{"A": "B"}, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": map[string]any{"A": "B"}, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "NOK-do_not_report_if_deviation_type_any_map_field_does_not_change", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": map[string]any{"A": "A"}, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": map[string]any{"A": "A"}, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": map[string]any{"A": "A"}, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": map[string]any{"A": "A"}, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_slice_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": []any{"A"}, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": []any{"A"}, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": []any{"B"}, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": []any{"B"}, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "NOK-do_not_report_if_deviation_type_any_slice_field_does_not_change", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": []any{"A"}, + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": []any{"A"}, + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": []any{"A"}, + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": []any{"A"}, + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_numeric_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": int64(1), + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": int64(1), + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": int64(2), + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: true, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": int64(2), + "Time": decimal.NewFromInt(10), + }), + }, + { + name: "OK-report_if_deviation_type_any_numeric_field_is_changed", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + ModeQuorum: "any", + DeviationType: "any", + }, + { + InputKey: "Timestamp", + OutputKey: "Time", + Method: "median", + DeviationString: "30", + DeviationType: "absolute", + }, + }, + mockValueFirstRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": int64(1), + "Timestamp": decimal.NewFromInt(10), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportFirstRound: true, + stateFirstRound: map[string]interface{}{ + "FeedID": int64(1), + "Time": decimal.NewFromInt(10), + }, + + mockValueSecondRound: func() *values.Map { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": int64(1), + "Timestamp": decimal.NewFromInt(20), + }) + require.NoError(t, err) + return mockValue + }(), + shouldReportSecondRound: false, + stateSecondRound: map[string]interface{}(map[string]interface{}{ + "FeedID": int64(1), + "Time": decimal.NewFromInt(10), + }), + }, + } + + for _, tc := range cases { + config := getConfigReduceAggregator(t, tc.fields, extraConfig) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + pb := &pb.Map{} + + // 1st round + firstOutcome, err := agg.Aggregate(logger.Nop(), nil, map[commontypes.OracleID][]values.Value{1: {tc.mockValueFirstRound}, 2: {tc.mockValueFirstRound}, 3: {tc.mockValueFirstRound}}, 1) + require.NoError(t, err) + require.Equal(t, tc.shouldReportFirstRound, firstOutcome.ShouldReport) + + // validate metadata + proto.Unmarshal(firstOutcome.Metadata, pb) + vmap, err := values.FromMapValueProto(pb) + require.NoError(t, err) + state, err := vmap.Unwrap() + require.NoError(t, err) + require.Equal(t, map[string]interface{}(tc.stateFirstRound), state) + + // 2nd round + secondOutcome, err := agg.Aggregate(logger.Nop(), firstOutcome, map[commontypes.OracleID][]values.Value{1: {tc.mockValueSecondRound}, 2: {tc.mockValueSecondRound}, 3: {tc.mockValueSecondRound}}, 1) + require.NoError(t, err) + require.Equal(t, tc.shouldReportSecondRound, secondOutcome.ShouldReport) + + // validate metadata + proto.Unmarshal(secondOutcome.Metadata, pb) + vmap, err = values.FromMapValueProto(pb) + require.NoError(t, err) + state, err = vmap.Unwrap() + require.NoError(t, err) + require.Equal(t, tc.stateSecondRound, state) + + // skip if there is no third round + if tc.mockValueThirdRound == nil { + continue + } + + // 3rd round + thirdOutcome, err := agg.Aggregate(logger.Nop(), secondOutcome, map[commontypes.OracleID][]values.Value{1: {tc.mockValueThirdRound}, 2: {tc.mockValueThirdRound}, 3: {tc.mockValueThirdRound}}, 1) + require.NoError(t, err) + require.Equal(t, true, thirdOutcome.ShouldReport) + + // validate metadata + proto.Unmarshal(thirdOutcome.Metadata, pb) + vmap, err = values.FromMapValueProto(pb) + require.NoError(t, err) + state, err = vmap.Unwrap() + require.NoError(t, err) + require.Equal(t, tc.stateThirdRound, state) + } +} + +func getConfigReduceAggregator(t *testing.T, fields []aggregators.AggregationField, override map[string]any) *values.Map { + unwrappedConfig := map[string]any{ + "fields": fields, + "outputFieldName": "Reports", + "reportFormat": "array", + } + for key, val := range override { + unwrappedConfig[key] = val + } + config, err := values.NewMap(unwrappedConfig) + require.NoError(t, err) + return config +} diff --git a/pkg/capabilities/consensus/ocr3/capability.go b/pkg/capabilities/consensus/ocr3/capability.go index 6c4c8f1a8..ed62faa43 100644 --- a/pkg/capabilities/consensus/ocr3/capability.go +++ b/pkg/capabilities/consensus/ocr3/capability.go @@ -146,7 +146,7 @@ func (o *capability) getAggregator(workflowID string) (types.Aggregator, error) func (o *capability) getEncoderByWorkflowID(workflowID string) (types.Encoder, error) { enc, ok := o.encoders[workflowID] if !ok { - return nil, fmt.Errorf("no aggregator found for workflowID %s", workflowID) + return nil, fmt.Errorf("no encoder found for workflowID %s", workflowID) } return enc, nil diff --git a/pkg/capabilities/consensus/ocr3/capability_test.go b/pkg/capabilities/consensus/ocr3/capability_test.go index e383a487c..720ed9cb0 100644 --- a/pkg/capabilities/consensus/ocr3/capability_test.go +++ b/pkg/capabilities/consensus/ocr3/capability_test.go @@ -68,62 +68,80 @@ func TestOCR3Capability_Schema(t *testing.T) { } func TestOCR3Capability(t *testing.T) { - n := time.Now() - fc := clockwork.NewFakeClockAt(n) - lggr := logger.Test(t) - - ctx := tests.Context(t) - - s := requests.NewStore() - - cp := newCapability(s, fc, 1*time.Second, mockAggregatorFactory, mockEncoderFactory, lggr, 10) - require.NoError(t, cp.Start(ctx)) - - config, err := values.NewMap( - map[string]any{ - "aggregation_method": "data_feeds", - "aggregation_config": map[string]any{}, - "encoder_config": map[string]any{}, - "encoder": "evm", - "report_id": "ffff", + cases := []struct { + name string + aggregationMethod string + }{ + { + name: "success - aggregation_method data_feeds", + aggregationMethod: "data_feeds", }, - ) - require.NoError(t, err) - - ethUsdValStr := "1.123456" - ethUsdValue, err := decimal.NewFromString(ethUsdValStr) - require.NoError(t, err) - observationKey := "ETH_USD" - obs := []any{map[string]any{observationKey: ethUsdValue}} - inputs, err := values.NewMap(map[string]any{"observations": obs}) - require.NoError(t, err) - - executeReq := capabilities.CapabilityRequest{ - Metadata: capabilities.RequestMetadata{ - WorkflowID: workflowTestID, - WorkflowExecutionID: workflowExecutionTestID, + { + name: "success - aggregation_method reduce", + aggregationMethod: "reduce", }, - Config: config, - Inputs: inputs, } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + n := time.Now() + fc := clockwork.NewFakeClockAt(n) + lggr := logger.Test(t) + + ctx := tests.Context(t) + + s := requests.NewStore() + + cp := newCapability(s, fc, 1*time.Second, mockAggregatorFactory, mockEncoderFactory, lggr, 10) + require.NoError(t, cp.Start(ctx)) + + config, err := values.NewMap( + map[string]any{ + "aggregation_method": tt.aggregationMethod, + "aggregation_config": map[string]any{}, + "encoder_config": map[string]any{}, + "encoder": "evm", + "report_id": "ffff", + "key_id": "evm", + }, + ) + require.NoError(t, err) + + ethUsdValStr := "1.123456" + ethUsdValue, err := decimal.NewFromString(ethUsdValStr) + require.NoError(t, err) + observationKey := "ETH_USD" + obs := []any{map[string]any{observationKey: ethUsdValue}} + inputs, err := values.NewMap(map[string]any{"observations": obs}) + require.NoError(t, err) + + executeReq := capabilities.CapabilityRequest{ + Metadata: capabilities.RequestMetadata{ + WorkflowID: workflowTestID, + WorkflowExecutionID: workflowExecutionTestID, + }, + Config: config, + Inputs: inputs, + } - respCh := executeAsync(ctx, executeReq, cp.Execute) + respCh := executeAsync(ctx, executeReq, cp.Execute) - obsv, err := values.NewList(obs) - require.NoError(t, err) + obsv, err := values.NewList(obs) + require.NoError(t, err) - // Mock the oracle returning a response - mresp, err := values.NewMap(map[string]any{"observations": obsv}) - cp.reqHandler.SendResponse(ctx, requests.Response{ - Value: mresp, - WorkflowExecutionID: workflowExecutionTestID, - }) - require.NoError(t, err) + // Mock the oracle returning a response + mresp, err := values.NewMap(map[string]any{"observations": obsv}) + cp.reqHandler.SendResponse(ctx, requests.Response{ + Value: mresp, + WorkflowExecutionID: workflowExecutionTestID, + }) + require.NoError(t, err) - resp := <-respCh - assert.Nil(t, resp.Err) + resp := <-respCh + assert.NoError(t, resp.Err) - assert.Equal(t, mresp, resp.Value) + assert.Equal(t, mresp, resp.Value) + }) + } } func TestOCR3Capability_Eviction(t *testing.T) { @@ -147,6 +165,7 @@ func TestOCR3Capability_Eviction(t *testing.T) { "encoder_config": map[string]any{}, "encoder": "evm", "report_id": "aaaa", + "key_id": "evm", }, ) require.NoError(t, err) @@ -214,6 +233,7 @@ func TestOCR3Capability_EvictionUsingConfig(t *testing.T) { "encoder_config": map[string]any{}, "encoder": "evm", "report_id": "aaaa", + "key_id": "evm", "request_timeout_ms": 10000, }, ) @@ -279,6 +299,7 @@ func TestOCR3Capability_Registration(t *testing.T) { "encoder": "", "encoder_config": map[string]any{}, "report_id": "000f", + "key_id": "evm", }) require.NoError(t, err) @@ -325,6 +346,7 @@ func TestOCR3Capability_ValidateConfig(t *testing.T) { "encoder": "", "encoder_config": map[string]any{}, "report_id": "aaaa", + "key_id": "evm", }) require.NoError(t, err) @@ -337,6 +359,7 @@ func TestOCR3Capability_ValidateConfig(t *testing.T) { config, err := values.NewMap(map[string]any{ "aggregation_method": "data_feeds", "report_id": "aaaa", + "key_id": "evm", }) require.NoError(t, err) @@ -353,6 +376,7 @@ func TestOCR3Capability_ValidateConfig(t *testing.T) { "encoder": "", "encoder_config": map[string]any{}, "report_id": "aa", + "key_id": "evm", }) require.NoError(t, err) @@ -361,6 +385,22 @@ func TestOCR3Capability_ValidateConfig(t *testing.T) { assert.Contains(t, err.Error(), "does not match pattern") // taken from the error json schema error message require.Nil(t, c) }) + + t.Run("InvalidConfig no key_id", func(t *testing.T) { + config, err := values.NewMap(map[string]any{ + "aggregation_method": "data_feeds", + "aggregation_config": map[string]any{}, + "encoder": "", + "encoder_config": map[string]any{}, + "report_id": "aaaa", + }) + require.NoError(t, err) + + c, err := o.ValidateConfig(config) + require.Error(t, err) + assert.Contains(t, err.Error(), "missing properties: 'key_id'") // taken from the error json schema error message + require.Nil(t, c) + }) } func TestOCR3Capability_RespondsToLateRequest(t *testing.T) { @@ -382,6 +422,7 @@ func TestOCR3Capability_RespondsToLateRequest(t *testing.T) { "encoder_config": map[string]any{}, "encoder": "evm", "report_id": "ffff", + "key_id": "evm", }, ) require.NoError(t, err) @@ -441,6 +482,7 @@ func TestOCR3Capability_RespondingToLateRequestDoesNotBlockOnSlowResponseConsume "encoder_config": map[string]any{}, "encoder": "evm", "report_id": "ffff", + "key_id": "evm", }, ) require.NoError(t, err) diff --git a/pkg/capabilities/consensus/ocr3/datafeeds/feeds_aggregator.go b/pkg/capabilities/consensus/ocr3/datafeeds/feeds_aggregator.go index b7a1f5c6e..e7cab87a4 100644 --- a/pkg/capabilities/consensus/ocr3/datafeeds/feeds_aggregator.go +++ b/pkg/capabilities/consensus/ocr3/datafeeds/feeds_aggregator.go @@ -162,7 +162,7 @@ func (a *dataFeedsAggregator) Aggregate(lggr logger.Logger, previousOutcome *typ return nil, err } - toWrap := []any{} + var toWrap []any for _, report := range reportsNeedingUpdate { feedID := datastreams.FeedID(report.FeedID).Bytes() remappedID := a.config.Feeds[datastreams.FeedID(report.FeedID)].RemappedID diff --git a/pkg/capabilities/consensus/ocr3/factory.go b/pkg/capabilities/consensus/ocr3/factory.go index d634121b7..04f062eec 100644 --- a/pkg/capabilities/consensus/ocr3/factory.go +++ b/pkg/capabilities/consensus/ocr3/factory.go @@ -22,7 +22,8 @@ type factory struct { } const ( - defaultMaxPhaseOutputBytes = 100000 + // TODO(KS-617): read this from contract config + defaultMaxPhaseOutputBytes = 1000000 // 1 MB defaultMaxReportCount = 20 ) diff --git a/pkg/capabilities/consensus/ocr3/models.go b/pkg/capabilities/consensus/ocr3/models.go index 86662dc31..f3ac76325 100644 --- a/pkg/capabilities/consensus/ocr3/models.go +++ b/pkg/capabilities/consensus/ocr3/models.go @@ -5,14 +5,14 @@ import ( ) type config struct { - AggregationMethod string `mapstructure:"aggregation_method" json:"aggregation_method" jsonschema:"enum=data_feeds"` + AggregationMethod string `mapstructure:"aggregation_method" json:"aggregation_method" jsonschema:"enum=data_feeds,enum=identical,enum=reduce"` AggregationConfig *values.Map `mapstructure:"aggregation_config" json:"aggregation_config"` Encoder string `mapstructure:"encoder" json:"encoder"` EncoderConfig *values.Map `mapstructure:"encoder_config" json:"encoder_config"` ReportID string `mapstructure:"report_id" json:"report_id" jsonschema:"required,pattern=^[a-f0-9]{4}$"` RequestTimeoutMS int64 `mapstructure:"request_timeout_ms" json:"request_timeout_ms"` - KeyID string `mapstructure:"key_id" json:"key_id"` + KeyID string `mapstructure:"key_id" json:"key_id,omitempty" jsonschema:"required"` } type inputs struct { diff --git a/pkg/capabilities/consensus/ocr3/ocr3.go b/pkg/capabilities/consensus/ocr3/ocr3.go index 1e38e9af5..5209d8f63 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3.go +++ b/pkg/capabilities/consensus/ocr3/ocr3.go @@ -21,7 +21,8 @@ var _ ocr3rp.ProviderServer[commontypes.PluginProvider] = (*Capability)(nil) type Capability struct { loop.Plugin reportingplugins.PluginProviderServer - config Config + config Config + capabilityRegistry core.CapabilitiesRegistry } type Config struct { @@ -101,6 +102,8 @@ func (o *Capability) NewReportingPluginFactory(ctx context.Context, cfg core.Rep return nil, err } + o.capabilityRegistry = capabilityRegistry + return factory, err } @@ -109,3 +112,17 @@ func (o *Capability) NewValidationService(ctx context.Context) (core.ValidationS o.SubService(s) return s, nil } + +func (o *Capability) Close() error { + o.Plugin.Close() + + if o.capabilityRegistry == nil { + return nil + } + + if err := o.capabilityRegistry.Remove(context.TODO(), o.config.capability.ID); err != nil { + return err + } + + return nil +} diff --git a/pkg/capabilities/consensus/ocr3/ocr3_test.go b/pkg/capabilities/consensus/ocr3/ocr3_test.go index e215c13bf..5c89f5707 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3_test.go +++ b/pkg/capabilities/consensus/ocr3/ocr3_test.go @@ -55,11 +55,18 @@ func TestOCR3_ReportingFactoryIsAService(t *testing.T) { var rs core.RelayerSet r := mocks.NewCapabilitiesRegistry(t) r.On("Add", mock.Anything, o.config.capability).Return(nil) + r.On("Remove", mock.Anything, o.config.capability.ID).Return(nil) factory, err := o.NewReportingPluginFactory(ctx, core.ReportingPluginServiceConfig{}, p, pr, tc, el, r, kv, rs) require.NoError(t, err) require.NoError(t, factory.Start(ctx)) + r.AssertCalled(t, "Add", mock.Anything, o.config.capability) assert.Nil(t, factory.Ready()) + + err = o.Close() + require.NoError(t, err) + + r.AssertCalled(t, "Remove", mock.Anything, o.config.capability.ID) } diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go index 1132816b6..758c1859c 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go @@ -6,12 +6,59 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) +// EncoderWrapper allows access to field from an sdk.CapDefinition[Encoder] +func EncoderWrapper(raw sdk.CapDefinition[Encoder]) EncoderCap { + wrapped, ok := raw.(EncoderCap) + if ok { + return wrapped + } + return EncoderCap(raw) +} + type EncoderCap sdk.CapDefinition[Encoder] +// EncoderConfigWrapper allows access to field from an sdk.CapDefinition[EncoderConfig] +func EncoderConfigWrapper(raw sdk.CapDefinition[EncoderConfig]) EncoderConfigCap { + wrapped, ok := raw.(EncoderConfigCap) + if ok { + return wrapped + } + return EncoderConfigCap(raw) +} + type EncoderConfigCap sdk.CapDefinition[EncoderConfig] +// KeyIdWrapper allows access to field from an sdk.CapDefinition[KeyId] +func KeyIdWrapper(raw sdk.CapDefinition[KeyId]) KeyIdCap { + wrapped, ok := raw.(KeyIdCap) + if ok { + return wrapped + } + return KeyIdCap(raw) +} + +type KeyIdCap sdk.CapDefinition[KeyId] + +// ReportIdWrapper allows access to field from an sdk.CapDefinition[ReportId] +func ReportIdWrapper(raw sdk.CapDefinition[ReportId]) ReportIdCap { + wrapped, ok := raw.(ReportIdCap) + if ok { + return wrapped + } + return ReportIdCap(raw) +} + type ReportIdCap sdk.CapDefinition[ReportId] +// SignedReportWrapper allows access to field from an sdk.CapDefinition[SignedReport] +func SignedReportWrapper(raw sdk.CapDefinition[SignedReport]) SignedReportCap { + wrapped, ok := raw.(SignedReportCap) + if ok { + return wrapped + } + return &signedReportCap{CapDefinition: raw} +} + type SignedReportCap interface { sdk.CapDefinition[SignedReport] Context() sdk.CapDefinition[[]uint8] @@ -21,30 +68,28 @@ type SignedReportCap interface { private() } -// SignedReportCapFromStep should only be called from generated code to assure type safety -func SignedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignedReport]) SignedReportCap { - raw := step.AddTo(w) - return &signedReport{CapDefinition: raw} -} - -type signedReport struct { +type signedReportCap struct { sdk.CapDefinition[SignedReport] } -func (*signedReport) private() {} -func (c *signedReport) Context() sdk.CapDefinition[[]uint8] { +func (*signedReportCap) private() {} +func (c *signedReportCap) Context() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "Context") } -func (c *signedReport) ID() sdk.CapDefinition[[]uint8] { +func (c *signedReportCap) ID() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "ID") } -func (c *signedReport) Report() sdk.CapDefinition[[]uint8] { +func (c *signedReportCap) Report() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "Report") } -func (c *signedReport) Signatures() sdk.CapDefinition[[][]uint8] { +func (c *signedReportCap) Signatures() sdk.CapDefinition[[][]uint8] { return sdk.AccessField[SignedReport, [][]uint8](c.CapDefinition, "Signatures") } +func ConstantSignedReport(value SignedReport) SignedReportCap { + return &signedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignedReportFromFields( context sdk.CapDefinition[[]uint8], iD sdk.CapDefinition[[]uint8], diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go index 1fbcb7534..ed7bfad07 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go @@ -4,7 +4,7 @@ package ocr3cap import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - streams "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) @@ -18,13 +18,24 @@ func (cfg DataFeedsConsensusConfig) New(w *sdk.WorkflowSpecFactory, ref string, "aggregation_method": cfg.AggregationMethod, "encoder": cfg.Encoder, "encoder_config": cfg.EncoderConfig, + "key_id": cfg.KeyId, "report_id": cfg.ReportId, }, CapabilityType: capabilities.CapabilityTypeConsensus, } step := sdk.Step[SignedReport]{Definition: def} - return SignedReportCapFromStep(w, step) + raw := step.AddTo(w) + return SignedReportWrapper(raw) +} + +// FeedValueWrapper allows access to field from an sdk.CapDefinition[FeedValue] +func FeedValueWrapper(raw sdk.CapDefinition[FeedValue]) FeedValueCap { + wrapped, ok := raw.(FeedValueCap) + if ok { + return wrapped + } + return &feedValueCap{CapDefinition: raw} } type FeedValueCap interface { @@ -35,27 +46,25 @@ type FeedValueCap interface { private() } -// FeedValueCapFromStep should only be called from generated code to assure type safety -func FeedValueCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedValue]) FeedValueCap { - raw := step.AddTo(w) - return &feedValue{CapDefinition: raw} -} - -type feedValue struct { +type feedValueCap struct { sdk.CapDefinition[FeedValue] } -func (*feedValue) private() {} -func (c *feedValue) Deviation() sdk.CapDefinition[string] { +func (*feedValueCap) private() {} +func (c *feedValueCap) Deviation() sdk.CapDefinition[string] { return sdk.AccessField[FeedValue, string](c.CapDefinition, "deviation") } -func (c *feedValue) Heartbeat() sdk.CapDefinition[uint64] { +func (c *feedValueCap) Heartbeat() sdk.CapDefinition[uint64] { return sdk.AccessField[FeedValue, uint64](c.CapDefinition, "heartbeat") } -func (c *feedValue) RemappedID() sdk.CapDefinition[string] { +func (c *feedValueCap) RemappedID() sdk.CapDefinition[string] { return sdk.AccessField[FeedValue, string](c.CapDefinition, "remappedID") } +func ConstantFeedValue(value FeedValue) FeedValueCap { + return &feedValueCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedValueFromFields( deviation sdk.CapDefinition[string], heartbeat sdk.CapDefinition[uint64], diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go index 12913126c..9fb49935e 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go @@ -11,6 +11,7 @@ type IdenticalConsensusConfig[T any] struct { Encoder Encoder EncoderConfig EncoderConfig ReportID ReportId + KeyID KeyId } func (c IdenticalConsensusConfig[T]) New(w *sdk.WorkflowSpecFactory, ref string, input IdenticalConsensusInput[T]) SignedReportCap { @@ -23,12 +24,13 @@ func (c IdenticalConsensusConfig[T]) New(w *sdk.WorkflowSpecFactory, ref string, "encoder_config": c.EncoderConfig, "aggregation_method": "identical", "report_id": c.ReportID, + "key_id": c.KeyID, }, CapabilityType: capabilities.CapabilityTypeConsensus, } - step := sdk.Step[SignedReport]{Definition: def} - return SignedReportCapFromStep(w, step) + step := &sdk.Step[SignedReport]{Definition: def} + return SignedReportWrapper(step.AddTo(w)) } type IdenticalConsensusInput[T any] struct { diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus_test.go b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus_test.go index 2affbeab7..0b3d02146 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus_test.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus_test.go @@ -15,10 +15,7 @@ import ( func TestIdenticalConsensus(t *testing.T) { t.Parallel() - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{ - Owner: "0x1234", - Name: "Test", - }) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "1234", Number: 1}.New(workflow) @@ -26,6 +23,7 @@ func TestIdenticalConsensus(t *testing.T) { Encoder: ocr3.EncoderEVM, EncoderConfig: ocr3.EncoderConfig{}, ReportID: "0001", + KeyID: "evm", }.New(workflow, "consensus", ocr3.IdenticalConsensusInput[basictrigger.TriggerOutputs]{ Observation: trigger, Encoder: "evm", @@ -42,8 +40,6 @@ func TestIdenticalConsensus(t *testing.T) { require.NoError(t, err) expected := sdk.WorkflowSpec{ - Name: "Test", - Owner: "0x1234", Triggers: []sdk.StepDefinition{ { ID: "basic-test-trigger@1.0.0", @@ -71,6 +67,7 @@ func TestIdenticalConsensus(t *testing.T) { "encoder_config": map[string]any{}, "aggregation_method": "identical", "report_id": "0001", + "key_id": "evm", }, CapabilityType: capabilities.CapabilityTypeConsensus, }, diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common-schema.json b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common-schema.json index 28f2d9998..dd4b9b699 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common-schema.json +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common-schema.json @@ -52,6 +52,12 @@ "type": "object", "additionalProperties": true }, + "key_id" : { + "type": "string", + "examples": [ + "evm" + ] + }, "report_id" : { "type": "string", "pattern": "^[a-f0-9]{4}$", diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common_generated.go index ac72a71ee..6d5869296 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_common_generated.go @@ -41,6 +41,8 @@ func (j *Encoder) UnmarshalJSON(b []byte) error { return nil } +type KeyId string + type ReportId string // UnmarshalJSON implements json.Unmarshaler. diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus-schema.json b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus-schema.json index d4809191a..c9164d112 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus-schema.json +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus-schema.json @@ -67,6 +67,9 @@ "encoder_config": { "$ref": "ocr3cap_common-schema.json#/$defs/encoder_config" }, + "key_id": { + "$ref": "ocr3cap_common-schema.json#/$defs/key_id" + }, "report_id": { "$ref": "ocr3cap_common-schema.json#/$defs/report_id" } @@ -78,6 +81,7 @@ "aggregation_config", "encoder", "encoder_config", + "key_id", "report_id" ] }, diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus_generated.go index a99eab52f..a39f57a5c 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/ocr3cap_data_feeds_consensus_generated.go @@ -36,6 +36,9 @@ type DataFeedsConsensusConfig struct { // EncoderConfig corresponds to the JSON schema field "encoder_config". EncoderConfig EncoderConfig `json:"encoder_config" yaml:"encoder_config" mapstructure:"encoder_config"` + // KeyId corresponds to the JSON schema field "key_id". + KeyId KeyId `json:"key_id" yaml:"key_id" mapstructure:"key_id"` + // ReportId corresponds to the JSON schema field "report_id". ReportId ReportId `json:"report_id" yaml:"report_id" mapstructure:"report_id"` } @@ -117,6 +120,9 @@ func (j *DataFeedsConsensusConfig) UnmarshalJSON(b []byte) error { if _, ok := raw["encoder_config"]; raw != nil && !ok { return fmt.Errorf("field encoder_config in DataFeedsConsensusConfig: required") } + if _, ok := raw["key_id"]; raw != nil && !ok { + return fmt.Errorf("field key_id in DataFeedsConsensusConfig: required") + } if _, ok := raw["report_id"]; raw != nil && !ok { return fmt.Errorf("field report_id in DataFeedsConsensusConfig: required") } diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go new file mode 100644 index 000000000..18b0ad86d --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go @@ -0,0 +1,53 @@ +package ocr3cap + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// Note this isn't generated because generics isn't supported in json schema + +type ReduceConsensusConfig[T any] struct { + Encoder Encoder + EncoderConfig EncoderConfig + ReportID ReportId + KeyID KeyId + AggregationConfig aggregators.ReduceAggConfig +} + +func (c ReduceConsensusConfig[T]) New(w *sdk.WorkflowSpecFactory, ref string, input ReduceConsensusInput[T]) SignedReportCap { + def := sdk.StepDefinition{ + ID: "offchain_reporting@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: map[string]any{ + "aggregation_method": "reduce", + "aggregation_config": c.AggregationConfig, + "encoder": c.Encoder, + "encoder_config": c.EncoderConfig, + "report_id": c.ReportID, + "key_id": c.KeyID, + }, + CapabilityType: capabilities.CapabilityTypeConsensus, + } + + step := sdk.Step[SignedReport]{Definition: def} + return SignedReportWrapper(step.AddTo(w)) +} + +type ReduceConsensusInput[T any] struct { + Observation sdk.CapDefinition[T] + Encoder Encoder + EncoderConfig EncoderConfig +} + +func (input ReduceConsensusInput[T]) ToSteps() sdk.StepInputs { + return sdk.StepInputs{ + Mapping: map[string]any{ + "observations": sdk.ListOf(input.Observation).Ref(), + "encoder": input.Encoder, + "encoderConfig": input.EncoderConfig, + }, + } +} diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go new file mode 100644 index 000000000..282949eb0 --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go @@ -0,0 +1,151 @@ +package ocr3cap_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + ocr3 "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/targets/chainwriter" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" +) + +func TestReduceConsensus(t *testing.T) { + t.Parallel() + workflow := sdk.NewWorkflowSpecFactory() + + trigger := basictrigger.TriggerConfig{Name: "1234", Number: 1}.New(workflow) + + consensus := ocr3.ReduceConsensusConfig[basictrigger.TriggerOutputs]{ + Encoder: ocr3.EncoderEVM, + EncoderConfig: ocr3.EncoderConfig{}, + ReportID: "0001", + KeyID: "evm", + AggregationConfig: aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "3600", // 1 hour in seconds + DeviationType: "absolute", + }, + { + InputKey: "Price", + OutputKey: "Price", + Method: "median", + DeviationString: "0.05", // 5% + DeviationType: "percent", + SubMapField: true, + }, + }, + OutputFieldName: "Reports", + ReportFormat: "array", + SubMapKey: "Report", + }, + }.New(workflow, "consensus", ocr3.ReduceConsensusInput[basictrigger.TriggerOutputs]{ + Observation: trigger, + Encoder: "evm", + EncoderConfig: ocr3.EncoderConfig(map[string]any{ + "abi": "(bytes32 FeedID, bytes Report, uint32 Timestamp)[] Reports", + }), + }) + + chainwriter.TargetConfig{ + Address: "0x1235", + DeltaStage: "45s", + Schedule: "oneAtATime", + }.New(workflow, "chainwriter@1.0.0", chainwriter.TargetInput{SignedReport: consensus}) + + actual, err := workflow.Spec() + require.NoError(t, err) + + expected := sdk.WorkflowSpec{ + Triggers: []sdk.StepDefinition{ + { + ID: "basic-test-trigger@1.0.0", + Ref: "trigger", + Inputs: sdk.StepInputs{}, + Config: map[string]any{ + "name": "1234", + "number": 1, + }, + CapabilityType: capabilities.CapabilityTypeTrigger, + }, + }, + Actions: []sdk.StepDefinition{}, + Consensus: []sdk.StepDefinition{ + { + ID: "offchain_reporting@1.0.0", + Ref: "consensus", + Inputs: sdk.StepInputs{Mapping: map[string]any{ + "observations": []any{"$(trigger.outputs)"}, + "encoder": "evm", + "encoderConfig": map[string]any{ + "abi": "(bytes32 FeedID, bytes Report, uint32 Timestamp)[] Reports", + }, + }}, + Config: map[string]any{ + "encoder": "EVM", + "encoder_config": map[string]any{}, + "report_id": "0001", + "aggregation_method": "reduce", + "key_id": "evm", + "aggregation_config": map[string]any{ + "outputFieldName": "Reports", + "reportFormat": "array", + "subMapKey": "Report", + "Fields": []map[string]any{ + { + "inputKey": "FeedID", + "outputKey": "FeedID", + "method": "mode", + }, + { + "inputKey": "Timestamp", + "outputKey": "Timestamp", + "method": "median", + "deviation": "3600", + "deviationType": "absolute", + }, + { + "inputKey": "Price", + "outputKey": "Price", + "method": "median", + "deviation": "0.05", + "deviationType": "percent", + "subMapField": true, + }, + }, + }, + }, + CapabilityType: capabilities.CapabilityTypeConsensus, + }, + }, + Targets: []sdk.StepDefinition{ + { + ID: "chainwriter@1.0.0", + Inputs: sdk.StepInputs{ + Mapping: map[string]any{"signed_report": "$(consensus.outputs)"}, + }, + Config: map[string]any{ + "address": "0x1235", + "deltaStage": "45s", + "schedule": "oneAtATime", + }, + CapabilityType: capabilities.CapabilityTypeTarget, + }, + }, + } + + testutils.AssertWorkflowSpec(t, expected, actual) +} diff --git a/pkg/capabilities/consensus/ocr3/reporting_plugin.go b/pkg/capabilities/consensus/ocr3/reporting_plugin.go index 923fdb67b..e0eb72f53 100644 --- a/pkg/capabilities/consensus/ocr3/reporting_plugin.go +++ b/pkg/capabilities/consensus/ocr3/reporting_plugin.go @@ -339,7 +339,7 @@ func (r *reportingPlugin) Outcome(ctx context.Context, outctx ocr3types.OutcomeC outcome, err2 := agg.Aggregate(lggr, workflowOutcome, obs, r.config.F) if err2 != nil { lggr.Errorw("error aggregating outcome", "error", err2) - return nil, err + continue } // Only if the previous outcome exists: diff --git a/pkg/capabilities/consensus/ocr3/reporting_plugin_test.go b/pkg/capabilities/consensus/ocr3/reporting_plugin_test.go index d6f8326a8..18cc26e53 100644 --- a/pkg/capabilities/consensus/ocr3/reporting_plugin_test.go +++ b/pkg/capabilities/consensus/ocr3/reporting_plugin_test.go @@ -2,6 +2,7 @@ package ocr3 import ( "context" + "errors" "sort" "testing" "time" @@ -75,7 +76,7 @@ func TestReportingPlugin_Query(t *testing.T) { type mockCapability struct { t *testing.T - aggregator *aggregator + aggregator pbtypes.Aggregator encoder *enc registeredWorkflows map[string]bool expectedEncoderName string @@ -102,6 +103,20 @@ func (a *aggregator) Aggregate(lggr logger.Logger, pout *pbtypes.AggregationOutc return a.outcome, nil } +type erroringAggregator struct { + aggregator + count int +} + +func (a *erroringAggregator) Aggregate(lggr logger.Logger, pout *pbtypes.AggregationOutcome, observations map[commontypes.OracleID][]values.Value, i int) (*pbtypes.AggregationOutcome, error) { + defer func() { a.count += 1 }() + if a.count == 0 { + return nil, errors.New("failed to aggregate") + } + + return a.aggregator.Aggregate(lggr, pout, observations, i) +} + type enc struct { gotInput values.Map } @@ -258,8 +273,9 @@ func TestReportingPlugin_Observation_NoResults(t *testing.T) { func TestReportingPlugin_Outcome(t *testing.T) { lggr := logger.Test(t) s := requests.NewStore() + aggregator := &aggregator{} mcap := &mockCapability{ - aggregator: &aggregator{}, + aggregator: aggregator, encoder: &enc{}, } rp, err := newReportingPlugin(s, mcap, defaultBatchSize, ocr3types.ReportingPluginConfig{}, defaultOutcomePruningThreshold, lggr) @@ -310,8 +326,83 @@ func TestReportingPlugin_Outcome(t *testing.T) { cr := opb.CurrentReports[0] assert.EqualExportedValues(t, cr.Id, id) - assert.EqualExportedValues(t, cr.Outcome, mcap.aggregator.outcome) - assert.EqualExportedValues(t, opb.Outcomes[workflowTestID], mcap.aggregator.outcome) + assert.EqualExportedValues(t, cr.Outcome, aggregator.outcome) + assert.EqualExportedValues(t, opb.Outcomes[workflowTestID], aggregator.outcome) +} + +func TestReportingPlugin_Outcome_AggregatorErrorDoesntInterruptOtherWorkflows(t *testing.T) { + lggr := logger.Test(t) + s := requests.NewStore() + aggregator := &erroringAggregator{} + mcap := &mockCapability{ + aggregator: aggregator, + encoder: &enc{}, + } + rp, err := newReportingPlugin(s, mcap, defaultBatchSize, ocr3types.ReportingPluginConfig{}, defaultOutcomePruningThreshold, lggr) + require.NoError(t, err) + + weid := uuid.New().String() + wowner := uuid.New().String() + id := &pbtypes.Id{ + WorkflowExecutionId: weid, + WorkflowId: workflowTestID, + WorkflowOwner: wowner, + WorkflowName: workflowTestName, + ReportId: reportTestID, + } + weid2 := uuid.New().String() + id2 := &pbtypes.Id{ + WorkflowExecutionId: weid2, + WorkflowId: workflowTestID, + WorkflowOwner: wowner, + WorkflowName: workflowTestName, + ReportId: reportTestID, + } + q := &pbtypes.Query{ + Ids: []*pbtypes.Id{id, id2}, + } + qb, err := proto.Marshal(q) + require.NoError(t, err) + o, err := values.NewList([]any{"hello"}) + require.NoError(t, err) + + o2, err := values.NewList([]any{"world"}) + require.NoError(t, err) + obs := &pbtypes.Observations{ + Observations: []*pbtypes.Observation{ + { + Id: id, + Observations: values.Proto(o).GetListValue(), + }, + { + Id: id2, + Observations: values.Proto(o2).GetListValue(), + }, + }, + } + + rawObs, err := proto.Marshal(obs) + require.NoError(t, err) + aos := []types.AttributedObservation{ + { + Observation: rawObs, + Observer: commontypes.OracleID(1), + }, + } + + outcome, err := rp.Outcome(tests.Context(t), ocr3types.OutcomeContext{}, qb, aos) + require.NoError(t, err) + + opb := &pbtypes.Outcome{} + err = proto.Unmarshal(outcome, opb) + require.NoError(t, err) + + assert.Len(t, opb.CurrentReports, 1) + + cr := opb.CurrentReports[0] + assert.EqualExportedValues(t, cr.Id, id2) + assert.EqualExportedValues(t, cr.Outcome, aggregator.outcome) + assert.EqualExportedValues(t, opb.Outcomes[workflowTestID], aggregator.outcome) } func TestReportingPlugin_Outcome_NilDerefs(t *testing.T) { @@ -372,6 +463,73 @@ func TestReportingPlugin_Outcome_NilDerefs(t *testing.T) { require.NoError(t, err) } +func TestReportingPlugin_Outcome_AggregatorErrorDoesntInterruptOtherIDs(t *testing.T) { + ctx := tests.Context(t) + lggr := logger.Test(t) + s := requests.NewStore() + mcap := &mockCapability{ + aggregator: &aggregator{}, + encoder: &enc{}, + } + rp, err := newReportingPlugin(s, mcap, defaultBatchSize, ocr3types.ReportingPluginConfig{}, defaultOutcomePruningThreshold, lggr) + require.NoError(t, err) + + weid := uuid.New().String() + wowner := uuid.New().String() + id1 := &pbtypes.Id{ + WorkflowExecutionId: weid, + WorkflowId: workflowTestID, + WorkflowOwner: wowner, + WorkflowName: workflowTestName, + ReportId: reportTestID, + } + + weid2 := uuid.New().String() + id2 := &pbtypes.Id{ + WorkflowExecutionId: weid2, + WorkflowId: workflowTestID, + WorkflowOwner: wowner, + WorkflowName: workflowTestName, + ReportId: reportTestID, + } + q := &pbtypes.Query{ + Ids: []*pbtypes.Id{ + id1, + id2, + }, + } + qb, err := proto.Marshal(q) + require.NoError(t, err) + aos := []types.AttributedObservation{ + { + Observer: commontypes.OracleID(1), + }, + {}, + } + + _, err = rp.Outcome(ctx, ocr3types.OutcomeContext{}, qb, aos) + require.NoError(t, err) + + obs := &pbtypes.Observations{ + Observations: []*pbtypes.Observation{ + nil, + {}, + }, + RegisteredWorkflowIds: nil, + } + obsb, err := proto.Marshal(obs) + require.NoError(t, err) + + aos = []types.AttributedObservation{ + { + Observation: obsb, + Observer: commontypes.OracleID(1), + }, + } + _, err = rp.Outcome(ctx, ocr3types.OutcomeContext{}, qb, aos) + require.NoError(t, err) +} + func TestReportingPlugin_Reports_ShouldReportFalse(t *testing.T) { lggr := logger.Test(t) s := requests.NewStore() diff --git a/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json b/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json index ebdabb38d..264eff920 100644 --- a/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json +++ b/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json @@ -7,7 +7,9 @@ "aggregation_method": { "type": "string", "enum": [ - "data_feeds" + "data_feeds", + "identical", + "reduce" ] }, "aggregation_config": { diff --git a/pkg/capabilities/consensus/ocr3/transmitter.go b/pkg/capabilities/consensus/ocr3/transmitter.go index 2c8d1d6fc..ab50ab44c 100644 --- a/pkg/capabilities/consensus/ocr3/transmitter.go +++ b/pkg/capabilities/consensus/ocr3/transmitter.go @@ -15,6 +15,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" pbtypes "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/types/core" "github.com/smartcontractkit/chainlink-common/pkg/values" @@ -22,7 +23,7 @@ import ( "google.golang.org/protobuf/types/known/structpb" ) -var _ (ocr3types.ContractTransmitter[[]byte]) = (*ContractTransmitter)(nil) +var _ ocr3types.ContractTransmitter[[]byte] = (*ContractTransmitter)(nil) // ContractTransmitter is a custom transmitter for the OCR3 capability. // When called it will forward the report + its signatures back to the @@ -33,6 +34,7 @@ type ContractTransmitter struct { registry core.CapabilitiesRegistry capability capabilities.ExecutableCapability fromAccount string + emitter custmsg.MessageEmitter } func extractReportInfo(data []byte) (*pbtypes.ReportInfo, error) { @@ -72,7 +74,7 @@ func (c *ContractTransmitter) Transmit(ctx context.Context, configDigest types.C signedReport := &pbtypes.SignedReport{} if info.ShouldReport { - signedReport.Report = []byte(rwi.Report) + signedReport.Report = rwi.Report // report context is the config digest + the sequence number padded with zeros // (see OCR3OnchainKeyringAdapter in core) @@ -82,7 +84,7 @@ func (c *ContractTransmitter) Transmit(ctx context.Context, configDigest types.C repContext := append(append(configDigest[:], seqToEpoch[:]...), zeros...) signedReport.Context = repContext - sigs := [][]byte{} + var sigs [][]byte for _, s := range signatures { sigs = append(sigs, s.Signature) } @@ -116,6 +118,18 @@ func (c *ContractTransmitter) Transmit(ctx context.Context, configDigest types.C c.capability = cp.(capabilities.ExecutableCapability) } + msg := "report with id " + info.Id.ReportId + " should be reported: " + fmt.Sprint(info.ShouldReport) + err = c.emitter.With( + "workflowExecutionID", info.Id.WorkflowExecutionId, + "workflowID", info.Id.WorkflowId, + "workflowOwner", info.Id.WorkflowOwner, + "workflowName", info.Id.WorkflowName, + "reportId", info.Id.ReportId, + ).Emit(ctx, msg) + if err != nil { + c.lggr.Errorw(fmt.Sprintf("could not emit message: %s", msg), "error", err) + } + _, err = c.capability.Execute(ctx, capabilities.CapabilityRequest{ Metadata: capabilities.RequestMetadata{ WorkflowExecutionID: info.Id.WorkflowExecutionId, @@ -131,10 +145,10 @@ func (c *ContractTransmitter) Transmit(ctx context.Context, configDigest types.C return err } -func (c *ContractTransmitter) FromAccount(ctx context.Context) (types.Account, error) { +func (c *ContractTransmitter) FromAccount(_ context.Context) (types.Account, error) { return types.Account(c.fromAccount), nil } func NewContractTransmitter(lggr logger.Logger, registry core.CapabilitiesRegistry, fromAccount string) *ContractTransmitter { - return &ContractTransmitter{lggr: lggr, registry: registry, fromAccount: fromAccount} + return &ContractTransmitter{lggr: lggr, registry: registry, fromAccount: fromAccount, emitter: custmsg.NewLabeler()} } diff --git a/pkg/capabilities/consensus/ocr3/transmitter_test.go b/pkg/capabilities/consensus/ocr3/transmitter_test.go index 0f9276268..c872ff7a5 100644 --- a/pkg/capabilities/consensus/ocr3/transmitter_test.go +++ b/pkg/capabilities/consensus/ocr3/transmitter_test.go @@ -57,6 +57,7 @@ func TestTransmitter(t *testing.T) { "encoder": "", "encoder_config": map[string]any{}, "report_id": hex.EncodeToString(repID), + "key_id": "evm", }) require.NoError(t, err) @@ -145,6 +146,7 @@ func TestTransmitter_ShouldReportFalse(t *testing.T) { "encoder": "", "encoder_config": map[string]any{}, "report_id": "aaff", + "key_id": "evm", }) require.NoError(t, err) diff --git a/pkg/capabilities/consensus/ocr3/types/aggregator.go b/pkg/capabilities/consensus/ocr3/types/aggregator.go index af86026ed..6484a5fbc 100644 --- a/pkg/capabilities/consensus/ocr3/types/aggregator.go +++ b/pkg/capabilities/consensus/ocr3/types/aggregator.go @@ -13,22 +13,23 @@ const MetadataFieldName = "INTERNAL_METADATA" type Metadata struct { Version uint32 // 1 byte - ExecutionID string // 32 hex bytes + ExecutionID string // 32 hex bytes (string len = 64) Timestamp uint32 // 4 bytes DONID uint32 // 4 bytes DONConfigVersion uint32 // 4 bytes - WorkflowID string // 32 hex bytes - WorkflowName string // 10 hex bytes - WorkflowOwner string // 20 hex bytes - ReportID string // 2 hex bytes + WorkflowID string // 32 hex bytes (string len = 64) + WorkflowName string // 10 hex bytes (string len = 20) + WorkflowOwner string // 20 hex bytes (string len = 40) + ReportID string // 2 hex bytes (string len = 4) } // the contract requires exactly 10 bytes for the workflow name -// the json schema allows for a variable length string <= len(10) -// pad with trailing spaces to meet the contract requirements +// the resulting workflow name should be up to 10 bytes long +// so pad accordingly to meet the contract requirements func (m *Metadata) padWorkflowName() { - if len(m.WorkflowName) < 10 { - suffix := strings.Repeat(" ", 10-len(m.WorkflowName)) + // it should have 10 hex bytes, so 20 characters total + if len(m.WorkflowName) < 20 { + suffix := strings.Repeat("0", 20-len(m.WorkflowName)) m.WorkflowName += suffix } } diff --git a/pkg/capabilities/consensus/ocr3/types/aggregator_test.go b/pkg/capabilities/consensus/ocr3/types/aggregator_test.go index dce0179b3..94660d103 100644 --- a/pkg/capabilities/consensus/ocr3/types/aggregator_test.go +++ b/pkg/capabilities/consensus/ocr3/types/aggregator_test.go @@ -16,25 +16,32 @@ func TestMetadata_padWorkflowName(t *testing.T) { want string }{ { - name: "padWorkflowName 1", + name: "padWorkflowName hex with 9 bytes", fields: fields{ - WorkflowName: "123456789", + WorkflowName: "ABCD1234EF567890AB", }, - want: "123456789 ", + want: "ABCD1234EF567890AB00", }, { - name: "padWorkflowName 0", + name: "padWorkflowName hex with 5 bytes", fields: fields{ - WorkflowName: "1234567890", + WorkflowName: "1234ABCD56", }, - want: "1234567890", + want: "1234ABCD560000000000", }, { - name: "padWorkflowName 10", + name: "padWorkflowName empty", fields: fields{ WorkflowName: "", }, - want: " ", + want: "00000000000000000000", + }, + { + name: "padWorkflowName non-hex string", + fields: fields{ + WorkflowName: "not-hex", + }, + want: "not-hex0000000000000", }, } for _, tt := range tests { diff --git a/pkg/capabilities/events/events.go b/pkg/capabilities/events/events.go index 1443fb2ff..444f45705 100644 --- a/pkg/capabilities/events/events.go +++ b/pkg/capabilities/events/events.go @@ -9,19 +9,18 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/beholder" "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb" - "github.com/smartcontractkit/chainlink-common/pkg/values" ) +// Duplicates the attributes in beholder/message.go::Metadata const ( - // Duplicates the attributes in beholder/message.go::Metadata - labelWorkflowOwner = "workflow_owner_address" - labelWorkflowID = "workflow_id" - labelWorkflowExecutionID = "workflow_execution_id" - labelWorkflowName = "workflow_name" - labelCapabilityContractAddress = "capability_contract_address" - labelCapabilityID = "capability_id" - labelCapabilityVersion = "capability_version" - labelCapabilityName = "capability_name" + LabelWorkflowOwner = "workflow_owner_address" + LabelWorkflowID = "workflow_id" + LabelWorkflowExecutionID = "workflow_execution_id" + LabelWorkflowName = "workflow_name" + LabelCapabilityContractAddress = "capability_contract_address" + LabelCapabilityID = "capability_id" + LabelCapabilityVersion = "capability_version" + LabelCapabilityName = "capability_name" ) type EmitMetadata struct { @@ -93,35 +92,35 @@ func (e EmitMetadata) attrs() []any { a := []any{} if e.WorkflowOwner != "" { - a = append(a, labelWorkflowOwner, e.WorkflowOwner) + a = append(a, LabelWorkflowOwner, e.WorkflowOwner) } if e.WorkflowID != "" { - a = append(a, labelWorkflowID, e.WorkflowID) + a = append(a, LabelWorkflowID, e.WorkflowID) } if e.WorkflowExecutionID != "" { - a = append(a, labelWorkflowExecutionID, e.WorkflowExecutionID) + a = append(a, LabelWorkflowExecutionID, e.WorkflowExecutionID) } if e.WorkflowName != "" { - a = append(a, labelWorkflowName, e.WorkflowName) + a = append(a, LabelWorkflowName, e.WorkflowName) } if e.CapabilityContractAddress != "" { - a = append(a, labelCapabilityContractAddress, e.CapabilityContractAddress) + a = append(a, LabelCapabilityContractAddress, e.CapabilityContractAddress) } if e.CapabilityID != "" { - a = append(a, labelCapabilityID, e.CapabilityID) + a = append(a, LabelCapabilityID, e.CapabilityID) } if e.CapabilityVersion != "" { - a = append(a, labelCapabilityVersion, e.CapabilityVersion) + a = append(a, LabelCapabilityVersion, e.CapabilityVersion) } if e.CapabilityName != "" { - a = append(a, labelCapabilityName, e.CapabilityName) + a = append(a, LabelCapabilityName, e.CapabilityName) } return a @@ -167,16 +166,27 @@ func (e *Emitter) Emit(ctx context.Context, msg Message) error { return errors.New("must provide workflow name to emit event") } - wm, err := values.WrapMap(msg.Labels) - if err != nil { - return fmt.Errorf("could not wrap map: %w", err) - } - - pm := values.ProtoMap(wm) + // TODO un-comment after INFOPLAT-1386 + //wm, err := values.WrapMap(msg.Labels) + //if err != nil { + // return fmt.Errorf("could not wrap map: %w", err) + //} + // + //pm := values.ProtoMap(wm) bytes, err := proto.Marshal(&pb.BaseMessage{ - Labels: pm, - Msg: msg.Msg, + // any empty values will not be serialized (including the key) + Labels: map[string]string{ + LabelWorkflowID: nmd.WorkflowID, + LabelWorkflowName: nmd.WorkflowName, + LabelWorkflowOwner: nmd.WorkflowOwner, + LabelCapabilityContractAddress: nmd.CapabilityContractAddress, + LabelCapabilityID: nmd.CapabilityID, + LabelCapabilityVersion: nmd.CapabilityVersion, + LabelCapabilityName: nmd.CapabilityName, + LabelWorkflowExecutionID: nmd.WorkflowExecutionID, + }, + Msg: msg.Msg, }) if err != nil { return fmt.Errorf("could not marshal operational event: %w", err) diff --git a/pkg/capabilities/pb/capabilities.pb.go b/pkg/capabilities/pb/capabilities.pb.go index 4f16b4367..e0c206f95 100644 --- a/pkg/capabilities/pb/capabilities.pb.go +++ b/pkg/capabilities/pb/capabilities.pb.go @@ -168,6 +168,7 @@ type RequestMetadata struct { WorkflowDonId uint32 `protobuf:"varint,6,opt,name=workflow_don_id,json=workflowDonId,proto3" json:"workflow_don_id,omitempty"` WorkflowDonConfigVersion uint32 `protobuf:"varint,7,opt,name=workflow_don_config_version,json=workflowDonConfigVersion,proto3" json:"workflow_don_config_version,omitempty"` ReferenceId string `protobuf:"bytes,8,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"` + DecodedWorkflowName string `protobuf:"bytes,9,opt,name=decoded_workflow_name,json=decodedWorkflowName,proto3" json:"decoded_workflow_name,omitempty"` } func (x *RequestMetadata) Reset() { @@ -251,6 +252,13 @@ func (x *RequestMetadata) GetReferenceId() string { return "" } +func (x *RequestMetadata) GetDecodedWorkflowName() string { + if x != nil { + return x.DecodedWorkflowName + } + return "" +} + type CapabilityRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -636,7 +644,9 @@ type RegistrationMetadata struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + ReferenceId string `protobuf:"bytes,2,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"` + WorkflowOwner string `protobuf:"bytes,3,opt,name=workflow_owner,json=workflowOwner,proto3" json:"workflow_owner,omitempty"` } func (x *RegistrationMetadata) Reset() { @@ -678,6 +688,20 @@ func (x *RegistrationMetadata) GetWorkflowId() string { return "" } +func (x *RegistrationMetadata) GetReferenceId() string { + if x != nil { + return x.ReferenceId + } + return "" +} + +func (x *RegistrationMetadata) GetWorkflowOwner() string { + if x != nil { + return x.WorkflowOwner + } + return "" +} + type RegisterToWorkflowRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -959,7 +983,7 @@ var file_capabilities_pb_capabilities_proto_rawDesc = []byte{ 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x22, 0xc2, 0x02, 0x0a, + 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x22, 0xf6, 0x02, 0x0a, 0x0f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, @@ -979,161 +1003,169 @@ var file_capabilities_pb_capabilities_proto_rawDesc = []byte{ 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x44, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, - 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x4a, 0x04, 0x08, 0x05, 0x10, - 0x06, 0x22, 0x98, 0x01, 0x0a, 0x11, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x61, 0x70, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, - 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x22, 0x9a, 0x01, 0x0a, - 0x1a, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x74, - 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x49, 0x64, 0x12, 0x39, 0x0a, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x61, - 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, - 0x70, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x67, 0x0a, 0x0c, 0x54, 0x72, 0x69, - 0x67, 0x67, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x74, 0x72, 0x69, - 0x67, 0x67, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, - 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x25, 0x0a, 0x07, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x22, 0x59, 0x0a, 0x0f, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x52, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x8c, 0x01, - 0x0a, 0x16, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x03, 0x61, 0x63, 0x6b, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, - 0x03, 0x61, 0x63, 0x6b, 0x12, 0x3b, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, - 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x4d, 0x0a, 0x12, - 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x37, 0x0a, 0x14, 0x52, - 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x49, 0x64, 0x22, 0x80, 0x01, 0x0a, 0x19, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, - 0x72, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, - 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x84, 0x01, 0x0a, 0x1d, 0x55, 0x6e, 0x72, 0x65, - 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x63, 0x61, - 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, - 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xc2, - 0x02, 0x0a, 0x11, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x73, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x20, 0x0a, 0x0c, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x2c, - 0x0a, 0x12, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x75, 0x6e, 0x6e, 0x65, - 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, - 0x74, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x0b, 0x74, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x49, 0x64, 0x12, - 0x25, 0x0a, 0x0e, 0x63, 0x61, 0x70, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x79, 0x5f, 0x69, - 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x61, 0x70, 0x52, 0x65, 0x67, 0x69, - 0x73, 0x74, 0x72, 0x79, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, - 0x52, 0x0f, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x49, - 0x64, 0x12, 0x24, 0x0a, 0x0e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x74, - 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x53, 0x65, 0x74, 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x72, 0x61, 0x63, 0x6c, - 0x65, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x0f, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, - 0x79, 0x49, 0x64, 0x22, 0x4f, 0x0a, 0x14, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x37, 0x0a, 0x05, 0x69, - 0x6e, 0x66, 0x6f, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x63, 0x61, 0x70, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x52, 0x05, 0x69, - 0x6e, 0x66, 0x6f, 0x73, 0x2a, 0xa1, 0x01, 0x0a, 0x0e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, - 0x69, 0x74, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x43, 0x41, 0x50, 0x41, 0x42, - 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, - 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, - 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x10, - 0x01, 0x12, 0x1a, 0x0a, 0x16, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, - 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x12, 0x1d, 0x0a, - 0x19, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, - 0x5f, 0x43, 0x4f, 0x4e, 0x53, 0x45, 0x4e, 0x53, 0x55, 0x53, 0x10, 0x03, 0x12, 0x1a, 0x0a, 0x16, - 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, - 0x54, 0x41, 0x52, 0x47, 0x45, 0x54, 0x10, 0x04, 0x32, 0x55, 0x0a, 0x0e, 0x42, 0x61, 0x73, 0x65, - 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x43, 0x0a, 0x04, 0x49, 0x6e, - 0x66, 0x6f, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x21, 0x2e, 0x63, 0x61, 0x70, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, - 0xd3, 0x01, 0x0a, 0x11, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x65, 0x0a, 0x0f, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, - 0x72, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x28, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, - 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, - 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x57, 0x0a, 0x11, - 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, - 0x72, 0x12, 0x28, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0x98, 0x02, 0x0a, 0x0a, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x61, 0x62, 0x6c, 0x65, 0x12, 0x57, 0x0a, 0x12, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, - 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x27, 0x2e, 0x63, 0x61, 0x70, + 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x32, 0x0a, 0x15, 0x64, + 0x65, 0x63, 0x6f, 0x64, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, 0x65, 0x63, 0x6f, + 0x64, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x4a, + 0x04, 0x08, 0x05, 0x10, 0x06, 0x22, 0x98, 0x01, 0x0a, 0x11, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, + 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, + 0x4d, 0x61, 0x70, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x06, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, + 0x22, 0x9a, 0x01, 0x0a, 0x1a, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, + 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x49, 0x64, 0x12, 0x39, 0x0a, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1d, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x67, 0x0a, + 0x0c, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, + 0x0b, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x25, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x6f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x22, 0x59, 0x0a, 0x0f, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, + 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x05, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x05, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x22, 0x8c, 0x01, 0x0a, 0x16, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x03, + 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x48, 0x00, 0x52, 0x03, 0x61, 0x63, 0x6b, 0x12, 0x3b, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x61, 0x70, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, + 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x22, 0x4d, 0x0a, 0x12, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, + 0x61, 0x70, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x81, 0x01, 0x0a, 0x14, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x72, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0b, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x0e, + 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4f, 0x77, + 0x6e, 0x65, 0x72, 0x22, 0x80, 0x01, 0x0a, 0x19, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x3e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x84, 0x01, 0x0a, 0x1d, 0x55, 0x6e, 0x72, 0x65, 0x67, + 0x69, 0x73, 0x74, 0x65, 0x72, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, - 0x65, 0x72, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xc2, 0x02, + 0x0a, 0x11, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x20, 0x0a, 0x0c, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x2c, 0x0a, + 0x12, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72, + 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x74, + 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x0b, 0x74, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x49, 0x64, 0x12, 0x25, + 0x0a, 0x0e, 0x63, 0x61, 0x70, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x79, 0x5f, 0x69, 0x64, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x61, 0x70, 0x52, 0x65, 0x67, 0x69, 0x73, + 0x74, 0x72, 0x79, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, + 0x0f, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x49, 0x64, + 0x12, 0x24, 0x0a, 0x0e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x74, 0x5f, + 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, + 0x72, 0x53, 0x65, 0x74, 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, + 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x0f, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x49, 0x64, 0x22, 0x4f, 0x0a, 0x14, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, + 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x37, 0x0a, 0x05, 0x69, 0x6e, + 0x66, 0x6f, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x63, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x52, 0x05, 0x69, 0x6e, + 0x66, 0x6f, 0x73, 0x2a, 0xa1, 0x01, 0x0a, 0x0e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, + 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, + 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, + 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x10, 0x01, + 0x12, 0x1a, 0x0a, 0x16, 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x12, 0x1d, 0x0a, 0x19, + 0x43, 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x43, 0x4f, 0x4e, 0x53, 0x45, 0x4e, 0x53, 0x55, 0x53, 0x10, 0x03, 0x12, 0x1a, 0x0a, 0x16, 0x43, + 0x41, 0x50, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, + 0x41, 0x52, 0x47, 0x45, 0x54, 0x10, 0x04, 0x32, 0x55, 0x0a, 0x0e, 0x42, 0x61, 0x73, 0x65, 0x43, + 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x43, 0x0a, 0x04, 0x49, 0x6e, 0x66, + 0x6f, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x21, 0x2e, 0x63, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0xd3, + 0x01, 0x0a, 0x11, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x61, 0x62, 0x6c, 0x65, 0x12, 0x65, 0x0a, 0x0f, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x28, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x24, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, + 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x57, 0x0a, 0x11, 0x55, + 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, + 0x12, 0x28, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x22, 0x00, 0x32, 0x98, 0x02, 0x0a, 0x0a, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x12, 0x57, 0x0a, 0x12, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x54, + 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x27, 0x2e, 0x63, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, + 0x72, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x5f, 0x0a, 0x16, + 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x2b, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x5f, 0x0a, - 0x16, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x46, 0x72, 0x6f, 0x6d, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x2b, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, - 0x72, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x50, - 0x0a, 0x07, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x1f, 0x2e, 0x63, 0x61, 0x70, 0x61, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x50, 0x0a, + 0x07, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x1f, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, - 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x61, 0x70, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, - 0x32, 0xa6, 0x01, 0x0a, 0x14, 0x53, 0x74, 0x61, 0x6e, 0x64, 0x61, 0x72, 0x64, 0x43, 0x61, 0x70, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x0a, 0x49, 0x6e, 0x69, - 0x74, 0x69, 0x61, 0x6c, 0x69, 0x73, 0x65, 0x12, 0x1f, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x73, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, - 0x22, 0x00, 0x12, 0x45, 0x0a, 0x05, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x12, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x1a, 0x22, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, - 0x6f, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, - 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x61, - 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x32, + 0xa6, 0x01, 0x0a, 0x14, 0x53, 0x74, 0x61, 0x6e, 0x64, 0x61, 0x72, 0x64, 0x43, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x47, 0x0a, 0x0a, 0x49, 0x6e, 0x69, 0x74, + 0x69, 0x61, 0x6c, 0x69, 0x73, 0x65, 0x12, 0x1f, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x73, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, + 0x00, 0x12, 0x45, 0x0a, 0x05, 0x49, 0x6e, 0x66, 0x6f, 0x73, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x1a, 0x22, 0x2e, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x2e, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, + 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, + 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x61, 0x70, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/pkg/capabilities/pb/capabilities.proto b/pkg/capabilities/pb/capabilities.proto index 58baa43c1..48d9d5d1b 100644 --- a/pkg/capabilities/pb/capabilities.proto +++ b/pkg/capabilities/pb/capabilities.proto @@ -36,6 +36,7 @@ message RequestMetadata { uint32 workflow_don_id = 6; uint32 workflow_don_config_version = 7; string reference_id = 8; + string decoded_workflow_name = 9; } message CapabilityRequest { @@ -80,6 +81,8 @@ message CapabilityResponse { message RegistrationMetadata { string workflow_id = 1; + string reference_id = 2; + string workflow_owner = 3; } message RegisterToWorkflowRequest { diff --git a/pkg/capabilities/pb/capabilities_helpers.go b/pkg/capabilities/pb/capabilities_helpers.go index c62620cda..b558492f0 100644 --- a/pkg/capabilities/pb/capabilities_helpers.go +++ b/pkg/capabilities/pb/capabilities_helpers.go @@ -60,6 +60,7 @@ func CapabilityRequestToProto(req capabilities.CapabilityRequest) *CapabilityReq WorkflowDonId: req.Metadata.WorkflowDonID, WorkflowDonConfigVersion: req.Metadata.WorkflowDonConfigVersion, ReferenceId: req.Metadata.ReferenceID, + DecodedWorkflowName: req.Metadata.DecodedWorkflowName, }, Inputs: values.ProtoMap(inputs), Config: values.ProtoMap(config), @@ -101,6 +102,7 @@ func CapabilityRequestFromProto(pr *CapabilityRequest) (capabilities.CapabilityR WorkflowDonID: md.WorkflowDonId, WorkflowDonConfigVersion: md.WorkflowDonConfigVersion, ReferenceID: md.ReferenceId, + DecodedWorkflowName: md.DecodedWorkflowName, }, Config: config, Inputs: inputs, @@ -149,6 +151,110 @@ func UnmarshalTriggerResponse(raw []byte) (capabilities.TriggerResponse, error) return TriggerResponseFromProto(&tr) } +func RegisterToWorkflowRequestToProto(req capabilities.RegisterToWorkflowRequest) *RegisterToWorkflowRequest { + config := values.EmptyMap() + if req.Config != nil { + config = req.Config + } + + return &RegisterToWorkflowRequest{ + Metadata: &RegistrationMetadata{ + WorkflowId: req.Metadata.WorkflowID, + ReferenceId: req.Metadata.ReferenceID, + WorkflowOwner: req.Metadata.WorkflowOwner, + }, + Config: values.ProtoMap(config), + } +} + +func RegisterToWorkflowRequestFromProto(req *RegisterToWorkflowRequest) (capabilities.RegisterToWorkflowRequest, error) { + if req == nil { + return capabilities.RegisterToWorkflowRequest{}, errors.New("received nil register to workflow request") + } + + if req.Metadata == nil { + return capabilities.RegisterToWorkflowRequest{}, errors.New("received nil metadata in register to workflow request") + } + + config, err := values.FromMapValueProto(req.Config) + if err != nil { + return capabilities.RegisterToWorkflowRequest{}, err + } + + return capabilities.RegisterToWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: req.Metadata.WorkflowId, + ReferenceID: req.Metadata.ReferenceId, + WorkflowOwner: req.Metadata.WorkflowOwner, + }, + Config: config, + }, nil +} + +func UnregisterFromWorkflowRequestToProto(req capabilities.UnregisterFromWorkflowRequest) *UnregisterFromWorkflowRequest { + config := values.EmptyMap() + if req.Config != nil { + config = req.Config + } + + return &UnregisterFromWorkflowRequest{ + Metadata: &RegistrationMetadata{ + WorkflowId: req.Metadata.WorkflowID, + ReferenceId: req.Metadata.ReferenceID, + WorkflowOwner: req.Metadata.WorkflowOwner, + }, + Config: values.ProtoMap(config), + } +} + +func UnregisterFromWorkflowRequestFromProto(req *UnregisterFromWorkflowRequest) (capabilities.UnregisterFromWorkflowRequest, error) { + if req == nil { + return capabilities.UnregisterFromWorkflowRequest{}, errors.New("received nil unregister from workflow request") + } + + if req.Metadata == nil { + return capabilities.UnregisterFromWorkflowRequest{}, errors.New("received nil metadata in unregister from workflow request") + } + + config, err := values.FromMapValueProto(req.Config) + if err != nil { + return capabilities.UnregisterFromWorkflowRequest{}, err + } + + return capabilities.UnregisterFromWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: req.Metadata.WorkflowId, + ReferenceID: req.Metadata.ReferenceId, + WorkflowOwner: req.Metadata.WorkflowOwner, + }, + Config: config, + }, nil +} + +func UnmarshalUnregisterFromWorkflowRequest(raw []byte) (capabilities.UnregisterFromWorkflowRequest, error) { + var r UnregisterFromWorkflowRequest + if err := proto.Unmarshal(raw, &r); err != nil { + return capabilities.UnregisterFromWorkflowRequest{}, err + } + return UnregisterFromWorkflowRequestFromProto(&r) +} + +func MarshalUnregisterFromWorkflowRequest(req capabilities.UnregisterFromWorkflowRequest) ([]byte, error) { + return proto.MarshalOptions{Deterministic: true}.Marshal(UnregisterFromWorkflowRequestToProto(req)) +} + +func UnmarshalRegisterToWorkflowRequest(raw []byte) (capabilities.RegisterToWorkflowRequest, error) { + var r RegisterToWorkflowRequest + if err := proto.Unmarshal(raw, &r); err != nil { + return capabilities.RegisterToWorkflowRequest{}, err + } + return RegisterToWorkflowRequestFromProto(&r) +} + +func MarshalRegisterToWorkflowRequest(req capabilities.RegisterToWorkflowRequest) ([]byte, error) { + return proto.MarshalOptions{Deterministic: true}.Marshal(RegisterToWorkflowRequestToProto(req)) +} + func TriggerRegistrationRequestToProto(req capabilities.TriggerRegistrationRequest) *TriggerRegistrationRequest { md := req.Metadata diff --git a/pkg/capabilities/pb/capabilities_helpers_test.go b/pkg/capabilities/pb/capabilities_helpers_test.go index 8dc80ecd3..f86951c8a 100644 --- a/pkg/capabilities/pb/capabilities_helpers_test.go +++ b/pkg/capabilities/pb/capabilities_helpers_test.go @@ -12,13 +12,14 @@ import ( ) const ( - testWorkflowID = "test-id-1" - testConfigKey = "test-key" - testConfigValue = "test-value" - testInputsKey = "input-key" - testInputsValue = "input-value" - testError = "test-error" - anyReferenceID = "anything" + testWorkflowID = "test-id-1" + testConfigKey = "test-key" + testConfigValue = "test-value" + testInputsKey = "input-key" + testInputsValue = "input-value" + testError = "test-error" + anyReferenceID = "anything" + testWorkflowOwner = "testowner" ) func TestCapabilityRequestFromProto(t *testing.T) { @@ -79,6 +80,7 @@ func TestMarshalUnmarshalRequest(t *testing.T) { WorkflowDonID: 1, WorkflowDonConfigVersion: 1, ReferenceID: anyReferenceID, + DecodedWorkflowName: "test-workflow-name", }, Config: &values.Map{Underlying: map[string]values.Value{ testConfigKey: &values.String{Underlying: testConfigValue}, @@ -119,3 +121,102 @@ func TestMarshalUnmarshalResponse(t *testing.T) { require.Equal(t, resp, unmarshaled) } + +func TestRegisterToWorkflowRequestToProto(t *testing.T) { + req := capabilities.RegisterToWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: testWorkflowID, + WorkflowOwner: testWorkflowOwner, + }, + Config: &values.Map{Underlying: map[string]values.Value{ + testConfigKey: &values.String{Underlying: testConfigValue}, + }}, + } + pr := pb.RegisterToWorkflowRequestToProto(req) + assert.Equal(t, testWorkflowID, pr.Metadata.WorkflowId) + assert.Equal(t, testWorkflowOwner, pr.Metadata.WorkflowOwner) + + assert.Equal(t, testConfigValue, pr.Config.GetFields()[testConfigKey].GetStringValue()) +} + +func TestRegisterToWorkflowRequestFromProto(t *testing.T) { + configMap, err := values.NewMap(map[string]any{ + testConfigKey: testConfigValue, + }) + require.NoError(t, err) + + pr := &pb.RegisterToWorkflowRequest{ + Metadata: &pb.RegistrationMetadata{ + WorkflowId: testWorkflowID, + ReferenceId: anyReferenceID, + WorkflowOwner: testWorkflowOwner, + }, + Config: values.ProtoMap(configMap), + } + + req, err := pb.RegisterToWorkflowRequestFromProto(pr) + require.NoError(t, err) + + expectedMap, err := values.NewMap(map[string]any{ + testConfigKey: testConfigValue, + }) + require.NoError(t, err) + assert.Equal(t, capabilities.RegisterToWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: testWorkflowID, + WorkflowOwner: testWorkflowOwner, + ReferenceID: anyReferenceID, + }, + Config: expectedMap, + }, req) +} + +func TestUnregisterFromWorkflowRequestToProto(t *testing.T) { + req := capabilities.UnregisterFromWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: testWorkflowID, + ReferenceID: anyReferenceID, + WorkflowOwner: testWorkflowOwner, + }, + Config: &values.Map{Underlying: map[string]values.Value{ + testConfigKey: &values.String{Underlying: testConfigValue}, + }}, + } + pr := pb.UnregisterFromWorkflowRequestToProto(req) + assert.Equal(t, testWorkflowID, pr.Metadata.WorkflowId) + assert.Equal(t, anyReferenceID, pr.Metadata.ReferenceId) + assert.Equal(t, testWorkflowOwner, pr.Metadata.WorkflowOwner) + assert.Equal(t, testConfigValue, pr.Config.GetFields()[testConfigKey].GetStringValue()) +} + +func TestUnregisterFromWorkflowRequestFromProto(t *testing.T) { + configMap, err := values.NewMap(map[string]any{ + testConfigKey: testConfigValue, + }) + require.NoError(t, err) + + pr := &pb.UnregisterFromWorkflowRequest{ + Metadata: &pb.RegistrationMetadata{ + WorkflowId: testWorkflowID, + WorkflowOwner: testWorkflowOwner, + ReferenceId: anyReferenceID, + }, + Config: values.ProtoMap(configMap), + } + + req, err := pb.UnregisterFromWorkflowRequestFromProto(pr) + require.NoError(t, err) + + expectedMap, err := values.NewMap(map[string]any{ + testConfigKey: testConfigValue, + }) + require.NoError(t, err) + assert.Equal(t, capabilities.UnregisterFromWorkflowRequest{ + Metadata: capabilities.RegistrationMetadata{ + WorkflowID: testWorkflowID, + ReferenceID: anyReferenceID, + WorkflowOwner: testWorkflowOwner, + }, + Config: expectedMap, + }, req) +} diff --git a/pkg/capabilities/pb/registry.pb.go b/pkg/capabilities/pb/registry.pb.go index a3127491d..d6a2f6617 100644 --- a/pkg/capabilities/pb/registry.pb.go +++ b/pkg/capabilities/pb/registry.pb.go @@ -159,6 +159,72 @@ func (x *RemoteTargetConfig) GetRequestHashExcludedAttributes() []string { return nil } +type RemoteExecutableConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A collection of dot seperated paths to attributes that should be excluded from the request sent to the remote executable capability + // when calculating the hash of the request. This is useful for excluding attributes that are not deterministic to ensure + // that the hash of logically identical requests is consistent. + RequestHashExcludedAttributes []string `protobuf:"bytes,1,rep,name=requestHashExcludedAttributes,proto3" json:"requestHashExcludedAttributes,omitempty"` + RegistrationRefresh *durationpb.Duration `protobuf:"bytes,2,opt,name=registrationRefresh,proto3" json:"registrationRefresh,omitempty"` + RegistrationExpiry *durationpb.Duration `protobuf:"bytes,3,opt,name=registrationExpiry,proto3" json:"registrationExpiry,omitempty"` +} + +func (x *RemoteExecutableConfig) Reset() { + *x = RemoteExecutableConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_capabilities_pb_registry_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoteExecutableConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoteExecutableConfig) ProtoMessage() {} + +func (x *RemoteExecutableConfig) ProtoReflect() protoreflect.Message { + mi := &file_capabilities_pb_registry_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoteExecutableConfig.ProtoReflect.Descriptor instead. +func (*RemoteExecutableConfig) Descriptor() ([]byte, []int) { + return file_capabilities_pb_registry_proto_rawDescGZIP(), []int{2} +} + +func (x *RemoteExecutableConfig) GetRequestHashExcludedAttributes() []string { + if x != nil { + return x.RequestHashExcludedAttributes + } + return nil +} + +func (x *RemoteExecutableConfig) GetRegistrationRefresh() *durationpb.Duration { + if x != nil { + return x.RegistrationRefresh + } + return nil +} + +func (x *RemoteExecutableConfig) GetRegistrationExpiry() *durationpb.Duration { + if x != nil { + return x.RegistrationExpiry + } + return nil +} + type CapabilityConfig struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -169,13 +235,14 @@ type CapabilityConfig struct { // // *CapabilityConfig_RemoteTriggerConfig // *CapabilityConfig_RemoteTargetConfig + // *CapabilityConfig_RemoteExecutableConfig RemoteConfig isCapabilityConfig_RemoteConfig `protobuf_oneof:"remote_config"` } func (x *CapabilityConfig) Reset() { *x = CapabilityConfig{} if protoimpl.UnsafeEnabled { - mi := &file_capabilities_pb_registry_proto_msgTypes[2] + mi := &file_capabilities_pb_registry_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -188,7 +255,7 @@ func (x *CapabilityConfig) String() string { func (*CapabilityConfig) ProtoMessage() {} func (x *CapabilityConfig) ProtoReflect() protoreflect.Message { - mi := &file_capabilities_pb_registry_proto_msgTypes[2] + mi := &file_capabilities_pb_registry_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -201,7 +268,7 @@ func (x *CapabilityConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use CapabilityConfig.ProtoReflect.Descriptor instead. func (*CapabilityConfig) Descriptor() ([]byte, []int) { - return file_capabilities_pb_registry_proto_rawDescGZIP(), []int{2} + return file_capabilities_pb_registry_proto_rawDescGZIP(), []int{3} } func (x *CapabilityConfig) GetDefaultConfig() *pb.Map { @@ -232,6 +299,13 @@ func (x *CapabilityConfig) GetRemoteTargetConfig() *RemoteTargetConfig { return nil } +func (x *CapabilityConfig) GetRemoteExecutableConfig() *RemoteExecutableConfig { + if x, ok := x.GetRemoteConfig().(*CapabilityConfig_RemoteExecutableConfig); ok { + return x.RemoteExecutableConfig + } + return nil +} + type isCapabilityConfig_RemoteConfig interface { isCapabilityConfig_RemoteConfig() } @@ -244,10 +318,16 @@ type CapabilityConfig_RemoteTargetConfig struct { RemoteTargetConfig *RemoteTargetConfig `protobuf:"bytes,3,opt,name=remote_target_config,json=remoteTargetConfig,proto3,oneof"` } +type CapabilityConfig_RemoteExecutableConfig struct { + RemoteExecutableConfig *RemoteExecutableConfig `protobuf:"bytes,4,opt,name=remote_executable_config,json=remoteExecutableConfig,proto3,oneof"` +} + func (*CapabilityConfig_RemoteTriggerConfig) isCapabilityConfig_RemoteConfig() {} func (*CapabilityConfig_RemoteTargetConfig) isCapabilityConfig_RemoteConfig() {} +func (*CapabilityConfig_RemoteExecutableConfig) isCapabilityConfig_RemoteConfig() {} + var File_capabilities_pb_registry_proto protoreflect.FileDescriptor var file_capabilities_pb_registry_proto_rawDesc = []byte{ @@ -288,27 +368,49 @@ var file_capabilities_pb_registry_proto_rawDesc = []byte{ 0x61, 0x73, 0x68, 0x45, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x1d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x61, 0x73, 0x68, 0x45, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x64, - 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x22, 0xf6, 0x01, 0x0a, 0x10, 0x43, - 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x32, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x4f, 0x0a, 0x15, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x74, 0x72, - 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, - 0x13, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4c, 0x0a, 0x14, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x12, - 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x42, 0x0f, 0x0a, 0x0d, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, - 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x22, 0xf6, 0x01, 0x0a, 0x16, 0x52, + 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x44, 0x0a, 0x1d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x48, 0x61, 0x73, 0x68, 0x45, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x64, 0x41, 0x74, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x1d, 0x72, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x61, 0x73, 0x68, 0x45, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, + 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x4b, 0x0a, 0x13, 0x72, + 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x72, 0x65, + 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x12, 0x49, 0x0a, 0x12, 0x72, 0x65, 0x67, 0x69, + 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, 0x69, 0x72, 0x79, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x12, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x70, + 0x69, 0x72, 0x79, 0x22, 0xd0, 0x02, 0x0a, 0x10, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x0d, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4f, 0x0a, 0x15, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x13, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4c, 0x0a, + 0x14, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x12, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x54, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x58, 0x0a, 0x18, 0x72, + 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65, + 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x16, 0x72, + 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x0f, 0x0a, 0x0d, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x5f, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x61, 0x70, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -323,27 +425,31 @@ func file_capabilities_pb_registry_proto_rawDescGZIP() []byte { return file_capabilities_pb_registry_proto_rawDescData } -var file_capabilities_pb_registry_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_capabilities_pb_registry_proto_msgTypes = make([]protoimpl.MessageInfo, 4) var file_capabilities_pb_registry_proto_goTypes = []interface{}{ - (*RemoteTriggerConfig)(nil), // 0: loop.RemoteTriggerConfig - (*RemoteTargetConfig)(nil), // 1: loop.RemoteTargetConfig - (*CapabilityConfig)(nil), // 2: loop.CapabilityConfig - (*durationpb.Duration)(nil), // 3: google.protobuf.Duration - (*pb.Map)(nil), // 4: values.Map + (*RemoteTriggerConfig)(nil), // 0: loop.RemoteTriggerConfig + (*RemoteTargetConfig)(nil), // 1: loop.RemoteTargetConfig + (*RemoteExecutableConfig)(nil), // 2: loop.RemoteExecutableConfig + (*CapabilityConfig)(nil), // 3: loop.CapabilityConfig + (*durationpb.Duration)(nil), // 4: google.protobuf.Duration + (*pb.Map)(nil), // 5: values.Map } var file_capabilities_pb_registry_proto_depIdxs = []int32{ - 3, // 0: loop.RemoteTriggerConfig.registrationRefresh:type_name -> google.protobuf.Duration - 3, // 1: loop.RemoteTriggerConfig.registrationExpiry:type_name -> google.protobuf.Duration - 3, // 2: loop.RemoteTriggerConfig.messageExpiry:type_name -> google.protobuf.Duration - 3, // 3: loop.RemoteTriggerConfig.batchCollectionPeriod:type_name -> google.protobuf.Duration - 4, // 4: loop.CapabilityConfig.default_config:type_name -> values.Map - 0, // 5: loop.CapabilityConfig.remote_trigger_config:type_name -> loop.RemoteTriggerConfig - 1, // 6: loop.CapabilityConfig.remote_target_config:type_name -> loop.RemoteTargetConfig - 7, // [7:7] is the sub-list for method output_type - 7, // [7:7] is the sub-list for method input_type - 7, // [7:7] is the sub-list for extension type_name - 7, // [7:7] is the sub-list for extension extendee - 0, // [0:7] is the sub-list for field type_name + 4, // 0: loop.RemoteTriggerConfig.registrationRefresh:type_name -> google.protobuf.Duration + 4, // 1: loop.RemoteTriggerConfig.registrationExpiry:type_name -> google.protobuf.Duration + 4, // 2: loop.RemoteTriggerConfig.messageExpiry:type_name -> google.protobuf.Duration + 4, // 3: loop.RemoteTriggerConfig.batchCollectionPeriod:type_name -> google.protobuf.Duration + 4, // 4: loop.RemoteExecutableConfig.registrationRefresh:type_name -> google.protobuf.Duration + 4, // 5: loop.RemoteExecutableConfig.registrationExpiry:type_name -> google.protobuf.Duration + 5, // 6: loop.CapabilityConfig.default_config:type_name -> values.Map + 0, // 7: loop.CapabilityConfig.remote_trigger_config:type_name -> loop.RemoteTriggerConfig + 1, // 8: loop.CapabilityConfig.remote_target_config:type_name -> loop.RemoteTargetConfig + 2, // 9: loop.CapabilityConfig.remote_executable_config:type_name -> loop.RemoteExecutableConfig + 10, // [10:10] is the sub-list for method output_type + 10, // [10:10] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name } func init() { file_capabilities_pb_registry_proto_init() } @@ -377,6 +483,18 @@ func file_capabilities_pb_registry_proto_init() { } } file_capabilities_pb_registry_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoteExecutableConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_capabilities_pb_registry_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CapabilityConfig); i { case 0: return &v.state @@ -389,9 +507,10 @@ func file_capabilities_pb_registry_proto_init() { } } } - file_capabilities_pb_registry_proto_msgTypes[2].OneofWrappers = []interface{}{ + file_capabilities_pb_registry_proto_msgTypes[3].OneofWrappers = []interface{}{ (*CapabilityConfig_RemoteTriggerConfig)(nil), (*CapabilityConfig_RemoteTargetConfig)(nil), + (*CapabilityConfig_RemoteExecutableConfig)(nil), } type x struct{} out := protoimpl.TypeBuilder{ @@ -399,7 +518,7 @@ func file_capabilities_pb_registry_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_capabilities_pb_registry_proto_rawDesc, NumEnums: 0, - NumMessages: 3, + NumMessages: 4, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/capabilities/pb/registry.proto b/pkg/capabilities/pb/registry.proto index 601770af6..601027399 100644 --- a/pkg/capabilities/pb/registry.proto +++ b/pkg/capabilities/pb/registry.proto @@ -23,12 +23,22 @@ message RemoteTargetConfig { repeated string requestHashExcludedAttributes = 1; } +message RemoteExecutableConfig { + // A collection of dot seperated paths to attributes that should be excluded from the request sent to the remote executable capability + // when calculating the hash of the request. This is useful for excluding attributes that are not deterministic to ensure + // that the hash of logically identical requests is consistent. + repeated string requestHashExcludedAttributes = 1; + google.protobuf.Duration registrationRefresh = 2; + google.protobuf.Duration registrationExpiry = 3; +} + message CapabilityConfig { values.Map default_config = 1; oneof remote_config { RemoteTriggerConfig remote_trigger_config = 2; RemoteTargetConfig remote_target_config = 3; + RemoteExecutableConfig remote_executable_config = 4; } } diff --git a/pkg/capabilities/targets/chainwriter/target_builders_generated.go b/pkg/capabilities/targets/chainwriter/target_builders_generated.go index 71f7db1dd..d2c6602c7 100644 --- a/pkg/capabilities/targets/chainwriter/target_builders_generated.go +++ b/pkg/capabilities/targets/chainwriter/target_builders_generated.go @@ -4,7 +4,7 @@ package chainwriter import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - ocr3cap "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) diff --git a/pkg/capabilities/triggers/cron/cron_trigger-schema.json b/pkg/capabilities/triggers/cron/cron_trigger-schema.json new file mode 100644 index 000000000..5710378a4 --- /dev/null +++ b/pkg/capabilities/triggers/cron/cron_trigger-schema.json @@ -0,0 +1,43 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/cron/cron-trigger@1.0.0", + "$defs": { + "Payload": { + "type": "object", + "properties": { + "ActualExecutionTime": { + "type": "string", + "description": "Time that cron trigger's task execution occurred (RFC3339Nano formatted)" + }, + "ScheduledExecutionTime": { + "type": "string", + "description": "Time that cron trigger's task execution had been scheduled to occur (RFC3339Nano formatted)" + } + }, + "required": ["ActualExecutionTime", "ScheduledExecutionTime"], + "additionalProperties": false + }, + "Config": { + "type": "object", + "properties": { + "schedule": { + "type": "string" + } + }, + "required": ["schedule"], + "additionalProperties": false + } + }, + "type": "object", + "properties": { + "config": { + "$ref": "#/$defs/Config" + }, + "outputs": { + "$ref": "#/$defs/Payload" + } + }, + "required": ["config", "outputs"], + "additionalProperties": false, + "description": "A trigger that uses a cron schedule to run periodically at fixed times, dates, or intervals." +} \ No newline at end of file diff --git a/pkg/capabilities/triggers/cron/cron_trigger_generated.go b/pkg/capabilities/triggers/cron/cron_trigger_generated.go new file mode 100644 index 000000000..5c2e2a5c0 --- /dev/null +++ b/pkg/capabilities/triggers/cron/cron_trigger_generated.go @@ -0,0 +1,92 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package cron + +import ( + "encoding/json" + "fmt" +) + +type Config struct { + // Schedule corresponds to the JSON schema field "schedule". + Schedule string `json:"schedule" yaml:"schedule" mapstructure:"schedule"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Config) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["schedule"]; raw != nil && !ok { + return fmt.Errorf("field schedule in Config: required") + } + type Plain Config + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Config(plain) + return nil +} + +type Payload struct { + // Time that cron trigger's task execution occurred (RFC3339Nano formatted) + ActualExecutionTime string `json:"ActualExecutionTime" yaml:"ActualExecutionTime" mapstructure:"ActualExecutionTime"` + + // Time that cron trigger's task execution had been scheduled to occur + // (RFC3339Nano formatted) + ScheduledExecutionTime string `json:"ScheduledExecutionTime" yaml:"ScheduledExecutionTime" mapstructure:"ScheduledExecutionTime"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Payload) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["ActualExecutionTime"]; raw != nil && !ok { + return fmt.Errorf("field ActualExecutionTime in Payload: required") + } + if _, ok := raw["ScheduledExecutionTime"]; raw != nil && !ok { + return fmt.Errorf("field ScheduledExecutionTime in Payload: required") + } + type Plain Payload + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Payload(plain) + return nil +} + +// A trigger that uses a cron schedule to run periodically at fixed times, dates, +// or intervals. +type Trigger struct { + // Config corresponds to the JSON schema field "config". + Config Config `json:"config" yaml:"config" mapstructure:"config"` + + // Outputs corresponds to the JSON schema field "outputs". + Outputs Payload `json:"outputs" yaml:"outputs" mapstructure:"outputs"` +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Trigger) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + if _, ok := raw["config"]; raw != nil && !ok { + return fmt.Errorf("field config in Trigger: required") + } + if _, ok := raw["outputs"]; raw != nil && !ok { + return fmt.Errorf("field outputs in Trigger: required") + } + type Plain Trigger + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err + } + *j = Trigger(plain) + return nil +} diff --git a/pkg/capabilities/triggers/cron/crontest/trigger_mock_generated.go b/pkg/capabilities/triggers/cron/crontest/trigger_mock_generated.go new file mode 100644 index 000000000..ad683b1d5 --- /dev/null +++ b/pkg/capabilities/triggers/cron/crontest/trigger_mock_generated.go @@ -0,0 +1,17 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package crontest + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/cron" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" +) + +// Trigger registers a new capability mock with the runner +func Trigger(runner *testutils.Runner, fn func() (cron.Payload, error)) *testutils.TriggerMock[cron.Payload] { + mock := testutils.MockTrigger[cron.Payload]("cron-trigger@1.0.0", fn) + runner.MockCapability("cron-trigger@1.0.0", nil, mock) + return mock +} diff --git a/pkg/capabilities/triggers/cron/trigger_builders_generated.go b/pkg/capabilities/triggers/cron/trigger_builders_generated.go new file mode 100644 index 000000000..e84dac176 --- /dev/null +++ b/pkg/capabilities/triggers/cron/trigger_builders_generated.go @@ -0,0 +1,84 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package cron + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func (cfg Config) New(w *sdk.WorkflowSpecFactory) PayloadCap { + ref := "trigger" + def := sdk.StepDefinition{ + ID: "cron-trigger@1.0.0", Ref: ref, + Inputs: sdk.StepInputs{}, + Config: map[string]any{ + "schedule": cfg.Schedule, + }, + CapabilityType: capabilities.CapabilityTypeTrigger, + } + + step := sdk.Step[Payload]{Definition: def} + raw := step.AddTo(w) + return PayloadWrapper(raw) +} + +// PayloadWrapper allows access to field from an sdk.CapDefinition[Payload] +func PayloadWrapper(raw sdk.CapDefinition[Payload]) PayloadCap { + wrapped, ok := raw.(PayloadCap) + if ok { + return wrapped + } + return &payloadCap{CapDefinition: raw} +} + +type PayloadCap interface { + sdk.CapDefinition[Payload] + ActualExecutionTime() sdk.CapDefinition[string] + ScheduledExecutionTime() sdk.CapDefinition[string] + private() +} + +type payloadCap struct { + sdk.CapDefinition[Payload] +} + +func (*payloadCap) private() {} +func (c *payloadCap) ActualExecutionTime() sdk.CapDefinition[string] { + return sdk.AccessField[Payload, string](c.CapDefinition, "ActualExecutionTime") +} +func (c *payloadCap) ScheduledExecutionTime() sdk.CapDefinition[string] { + return sdk.AccessField[Payload, string](c.CapDefinition, "ScheduledExecutionTime") +} + +func ConstantPayload(value Payload) PayloadCap { + return &payloadCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewPayloadFromFields( + actualExecutionTime sdk.CapDefinition[string], + scheduledExecutionTime sdk.CapDefinition[string]) PayloadCap { + return &simplePayload{ + CapDefinition: sdk.ComponentCapDefinition[Payload]{ + "ActualExecutionTime": actualExecutionTime.Ref(), + "ScheduledExecutionTime": scheduledExecutionTime.Ref(), + }, + actualExecutionTime: actualExecutionTime, + scheduledExecutionTime: scheduledExecutionTime, + } +} + +type simplePayload struct { + sdk.CapDefinition[Payload] + actualExecutionTime sdk.CapDefinition[string] + scheduledExecutionTime sdk.CapDefinition[string] +} + +func (c *simplePayload) ActualExecutionTime() sdk.CapDefinition[string] { + return c.actualExecutionTime +} +func (c *simplePayload) ScheduledExecutionTime() sdk.CapDefinition[string] { + return c.scheduledExecutionTime +} + +func (c *simplePayload) private() {} diff --git a/pkg/capabilities/triggers/mercury_remote_aggregator.go b/pkg/capabilities/triggers/mercury_remote_aggregator.go index 479c09bdc..5cc3f75dd 100644 --- a/pkg/capabilities/triggers/mercury_remote_aggregator.go +++ b/pkg/capabilities/triggers/mercury_remote_aggregator.go @@ -15,13 +15,14 @@ type mercuryRemoteAggregator struct { allowedSigners [][]byte minRequiredSignatures int previousLatestReports map[datastreams.FeedID]datastreams.FeedReport + capID string lggr logger.Logger } // This aggregator is used by TriggerSubscriber to aggregate trigger events from multiple remote nodes. // NOTE: Once Mercury supports parallel composition (and thus guarantee identical sets of reports), // this will be replaced by the default MODE aggregator. -func NewMercuryRemoteAggregator(codec datastreams.ReportCodec, allowedSigners [][]byte, minRequiredSignatures int, lggr logger.Logger) *mercuryRemoteAggregator { +func NewMercuryRemoteAggregator(codec datastreams.ReportCodec, allowedSigners [][]byte, minRequiredSignatures int, capID string, lggr logger.Logger) *mercuryRemoteAggregator { if allowedSigners == nil { allowedSigners = [][]byte{} } @@ -30,6 +31,7 @@ func NewMercuryRemoteAggregator(codec datastreams.ReportCodec, allowedSigners [] allowedSigners: allowedSigners, minRequiredSignatures: minRequiredSignatures, previousLatestReports: make(map[datastreams.FeedID]datastreams.FeedReport), + capID: capID, lggr: lggr, } } @@ -91,5 +93,5 @@ func (a *mercuryRemoteAggregator) Aggregate(triggerEventID string, responses [][ Signers: a.allowedSigners, MinRequiredSignatures: a.minRequiredSignatures, } - return wrapReports(reportList, triggerEventID, latestGlobalTs, meta) + return wrapReports(reportList, triggerEventID, latestGlobalTs, meta, a.capID) } diff --git a/pkg/capabilities/triggers/mercury_remote_aggregator_test.go b/pkg/capabilities/triggers/mercury_remote_aggregator_test.go index a659393f5..914a5ba38 100644 --- a/pkg/capabilities/triggers/mercury_remote_aggregator_test.go +++ b/pkg/capabilities/triggers/mercury_remote_aggregator_test.go @@ -16,6 +16,7 @@ const ( eventID = "ev_id_1" rawReport1 = "abcd" rawReport2 = "efgh" + capID = "streams-trigger@3.2.1" ) type testMercuryCodec struct { @@ -36,7 +37,7 @@ func (c testMercuryCodec) Wrap(reports []datastreams.FeedReport) (values.Value, } func TestMercuryRemoteAggregator(t *testing.T) { - agg := NewMercuryRemoteAggregator(testMercuryCodec{}, nil, 0, logger.Nop()) + agg := NewMercuryRemoteAggregator(testMercuryCodec{}, nil, 0, capID, logger.Nop()) signatures := [][]byte{{1, 2, 3}} feed1Old := datastreams.FeedReport{ @@ -99,7 +100,7 @@ func TestMercuryRemoteAggregator(t *testing.T) { } func getRawResponse(t *testing.T, reports []datastreams.FeedReport, timestamp int64) []byte { - resp, err := wrapReports(reports, eventID, timestamp, datastreams.Metadata{}) + resp, err := wrapReports(reports, eventID, timestamp, datastreams.Metadata{}, capID) require.NoError(t, err) rawResp, err := pb.MarshalTriggerResponse(resp) require.NoError(t, err) diff --git a/pkg/capabilities/triggers/mercury_trigger.go b/pkg/capabilities/triggers/mercury_trigger.go index cc456d863..a7ec78e38 100644 --- a/pkg/capabilities/triggers/mercury_trigger.go +++ b/pkg/capabilities/triggers/mercury_trigger.go @@ -16,19 +16,14 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/values" ) -const triggerID = "streams-trigger@1.0.0" - -var capInfo = capabilities.MustNewCapabilityInfo( - triggerID, - capabilities.CapabilityTypeTrigger, - "Streams Trigger", +const ( + defaultCapabilityName = "streams-trigger" + defaultCapabilityVersion = "1.1.0" + defaultTickerResolutionMs = 1000 + // TODO pending capabilities configuration implementation - this should be configurable with a sensible default + defaultSendChannelBufferSize = 1000 ) -const defaultTickerResolutionMs = 1000 - -// TODO pending capabilities configuration implementation - this should be configurable with a sensible default -const defaultSendChannelBufferSize = 1000 - // This Trigger Service allows for the registration and deregistration of triggers. You can also send reports to the service. type MercuryTriggerService struct { capabilities.CapabilityInfo @@ -54,17 +49,31 @@ type subscriber struct { // Mercury Trigger will send events to each subscriber every MaxFrequencyMs (configurable per subscriber). // Event generation happens whenever local unix time is a multiple of tickerResolutionMs. Therefore, // all subscribers' MaxFrequencyMs values need to be a multiple of tickerResolutionMs. -func NewMercuryTriggerService(tickerResolutionMs int64, lggr logger.Logger) *MercuryTriggerService { +func NewMercuryTriggerService(tickerResolutionMs int64, capName string, capVersion string, lggr logger.Logger) (*MercuryTriggerService, error) { if tickerResolutionMs == 0 { tickerResolutionMs = defaultTickerResolutionMs } + if capName == "" { + capName = defaultCapabilityName + } + if capVersion == "" { + capVersion = defaultCapabilityVersion + } + capInfo, err := capabilities.NewCapabilityInfo( + capName+"@"+capVersion, + capabilities.CapabilityTypeTrigger, + "Streams Trigger", + ) + if err != nil { + return nil, err + } return &MercuryTriggerService{ CapabilityInfo: capInfo, tickerResolutionMs: tickerResolutionMs, subscribers: make(map[string]*subscriber), latestReports: make(map[datastreams.FeedID]datastreams.FeedReport), stopCh: make(services.StopChan), - lggr: logger.Named(lggr, "MercuryTriggerService")} + lggr: logger.Named(lggr, "MercuryTriggerService")}, nil } func (o *MercuryTriggerService) SetMetaOverride(meta datastreams.Metadata) { @@ -95,7 +104,7 @@ func (o *MercuryTriggerService) RegisterTrigger(ctx context.Context, req capabil // If triggerId is already registered, return an error if _, ok := o.subscribers[req.TriggerID]; ok { - return nil, fmt.Errorf("triggerId %s already registered", triggerID) + return nil, fmt.Errorf("triggerId %s already registered", o.ID) } if int64(config.MaxFrequencyMs)%o.tickerResolutionMs != 0 { @@ -133,7 +142,7 @@ func (o *MercuryTriggerService) UnregisterTrigger(ctx context.Context, req capab subscriber, ok := o.subscribers[req.TriggerID] if !ok { - return fmt.Errorf("triggerId %s not registered", triggerID) + return fmt.Errorf("triggerId %s not registered", o.ID) } close(subscriber.ch) delete(o.subscribers, req.TriggerID) @@ -186,7 +195,7 @@ func (o *MercuryTriggerService) process(timestamp int64) { // use 32-byte-padded timestamp as EventID (human-readable) eventID := fmt.Sprintf("streams_%024s", strconv.FormatInt(timestamp, 10)) - capabilityResponse, err := wrapReports(reportList, eventID, timestamp, o.metaOverride) + capabilityResponse, err := wrapReports(reportList, eventID, timestamp, o.metaOverride, o.ID) if err != nil { o.lggr.Errorw("error wrapping reports", "err", err) continue @@ -202,7 +211,7 @@ func (o *MercuryTriggerService) process(timestamp int64) { } } -func wrapReports(reportList []datastreams.FeedReport, eventID string, timestamp int64, meta datastreams.Metadata) (capabilities.TriggerResponse, error) { +func wrapReports(reportList []datastreams.FeedReport, eventID string, timestamp int64, meta datastreams.Metadata, capID string) (capabilities.TriggerResponse, error) { out := datastreams.StreamsTriggerEvent{ Payload: reportList, Metadata: meta, @@ -216,7 +225,7 @@ func wrapReports(reportList []datastreams.FeedReport, eventID string, timestamp // Create a new TriggerRegistrationResponse with the MercuryTriggerEvent return capabilities.TriggerResponse{ Event: capabilities.TriggerEvent{ - TriggerType: triggerID, + TriggerType: capID, ID: eventID, Outputs: outputsv, }, @@ -246,5 +255,5 @@ func (o *MercuryTriggerService) HealthReport() map[string]error { } func (o *MercuryTriggerService) Name() string { - return "MercuryTriggerService" + return o.lggr.Name() } diff --git a/pkg/capabilities/triggers/mercury_trigger_test.go b/pkg/capabilities/triggers/mercury_trigger_test.go index a3c404728..80ea04940 100644 --- a/pkg/capabilities/triggers/mercury_trigger_test.go +++ b/pkg/capabilities/triggers/mercury_trigger_test.go @@ -51,7 +51,8 @@ func registerTrigger( return triggerEventsCh, registerRequest } -var ( +const ( + triggerID = "streams-trigger@4.5.6" feedOne = "0x1111111111111111111100000000000000000000000000000000000000000000" feedTwo = "0x2222222222222222222200000000000000000000000000000000000000000000" feedThree = "0x3333333333333333333300000000000000000000000000000000000000000000" @@ -60,9 +61,10 @@ var ( ) func TestMercuryTrigger(t *testing.T) { - ts := NewMercuryTriggerService(100, logger.Nop()) + ts, err := NewMercuryTriggerService(100, "", "4.5.6", logger.Nop()) + require.NoError(t, err) ctx := tests.Context(t) - err := ts.Start(ctx) + err = ts.Start(ctx) require.NoError(t, err) // use registerTriggerHelper to register a trigger callback, registerUnregisterRequest := registerTrigger( @@ -100,9 +102,10 @@ func TestMercuryTrigger(t *testing.T) { } func TestMultipleMercuryTriggers(t *testing.T) { - ts := NewMercuryTriggerService(100, logger.Nop()) + ts, err := NewMercuryTriggerService(100, "", "4.5.6", logger.Nop()) + require.NoError(t, err) ctx := tests.Context(t) - err := ts.Start(ctx) + err = ts.Start(ctx) require.NoError(t, err) callback1, cr1 := registerTrigger( ctx, @@ -214,7 +217,8 @@ func TestMultipleMercuryTriggers(t *testing.T) { } func TestMercuryTrigger_RegisterTriggerErrors(t *testing.T) { - ts := NewMercuryTriggerService(100, logger.Nop()) + ts, err := NewMercuryTriggerService(100, "", "4.5.6", logger.Nop()) + require.NoError(t, err) ctx := tests.Context(t) require.NoError(t, ts.Start(ctx)) @@ -293,7 +297,8 @@ func TestMercuryTrigger_ConfigValidation(t *testing.T) { return newConfig(t, []string{feedID}, 1000) } - ts := NewMercuryTriggerService(1000, logger.Nop()) + ts, err := NewMercuryTriggerService(1000, "", "4.5.6", logger.Nop()) + require.NoError(t, err) rawConf := newConfigSingleFeed(t, "012345678901234567890123456789012345678901234567890123456789000000") conf, err := ts.ValidateConfig(rawConf) require.Error(t, err) @@ -355,7 +360,7 @@ func TestMercuryTrigger_WrapReports(t *testing.T) { ObservationTimestamp: 876543, }) } - wrapped, err := wrapReports(reportList, "event_id", 1234, meta) + wrapped, err := wrapReports(reportList, "event_id", 1234, meta, triggerID) require.NoError(t, err) require.NotNil(t, wrapped.Event) require.Len(t, wrapped.Event.Outputs.Underlying["Payload"].(*values.List).Underlying, P) diff --git a/pkg/capabilities/triggers/streams/trigger_builders_generated.go b/pkg/capabilities/triggers/streams/trigger_builders_generated.go index 677c2a6f6..2a8692f9f 100644 --- a/pkg/capabilities/triggers/streams/trigger_builders_generated.go +++ b/pkg/capabilities/triggers/streams/trigger_builders_generated.go @@ -20,7 +20,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) FeedCap { } step := sdk.Step[Feed]{Definition: def} - return FeedCapFromStep(w, step) + raw := step.AddTo(w) + return FeedWrapper(raw) +} + +// FeedWrapper allows access to field from an sdk.CapDefinition[Feed] +func FeedWrapper(raw sdk.CapDefinition[Feed]) FeedCap { + wrapped, ok := raw.(FeedCap) + if ok { + return wrapped + } + return &feedCap{CapDefinition: raw} } type FeedCap interface { @@ -31,27 +41,25 @@ type FeedCap interface { private() } -// FeedCapFromStep should only be called from generated code to assure type safety -func FeedCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[Feed]) FeedCap { - raw := step.AddTo(w) - return &feed{CapDefinition: raw} -} - -type feed struct { +type feedCap struct { sdk.CapDefinition[Feed] } -func (*feed) private() {} -func (c *feed) Metadata() SignersMetadataCap { - return &signersMetadata{CapDefinition: sdk.AccessField[Feed, SignersMetadata](c.CapDefinition, "Metadata")} +func (*feedCap) private() {} +func (c *feedCap) Metadata() SignersMetadataCap { + return SignersMetadataWrapper(sdk.AccessField[Feed, SignersMetadata](c.CapDefinition, "Metadata")) } -func (c *feed) Payload() sdk.CapDefinition[[]FeedReport] { +func (c *feedCap) Payload() sdk.CapDefinition[[]FeedReport] { return sdk.AccessField[Feed, []FeedReport](c.CapDefinition, "Payload") } -func (c *feed) Timestamp() sdk.CapDefinition[int64] { +func (c *feedCap) Timestamp() sdk.CapDefinition[int64] { return sdk.AccessField[Feed, int64](c.CapDefinition, "Timestamp") } +func ConstantFeed(value Feed) FeedCap { + return &feedCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedFromFields( metadata SignersMetadataCap, payload sdk.CapDefinition[[]FeedReport], @@ -87,8 +95,26 @@ func (c *simpleFeed) Timestamp() sdk.CapDefinition[int64] { func (c *simpleFeed) private() {} +// FeedIdWrapper allows access to field from an sdk.CapDefinition[FeedId] +func FeedIdWrapper(raw sdk.CapDefinition[FeedId]) FeedIdCap { + wrapped, ok := raw.(FeedIdCap) + if ok { + return wrapped + } + return FeedIdCap(raw) +} + type FeedIdCap sdk.CapDefinition[FeedId] +// FeedReportWrapper allows access to field from an sdk.CapDefinition[FeedReport] +func FeedReportWrapper(raw sdk.CapDefinition[FeedReport]) FeedReportCap { + wrapped, ok := raw.(FeedReportCap) + if ok { + return wrapped + } + return &feedReportCap{CapDefinition: raw} +} + type FeedReportCap interface { sdk.CapDefinition[FeedReport] BenchmarkPrice() sdk.CapDefinition[[]uint8] @@ -100,36 +126,34 @@ type FeedReportCap interface { private() } -// FeedReportCapFromStep should only be called from generated code to assure type safety -func FeedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedReport]) FeedReportCap { - raw := step.AddTo(w) - return &feedReport{CapDefinition: raw} -} - -type feedReport struct { +type feedReportCap struct { sdk.CapDefinition[FeedReport] } -func (*feedReport) private() {} -func (c *feedReport) BenchmarkPrice() sdk.CapDefinition[[]uint8] { +func (*feedReportCap) private() {} +func (c *feedReportCap) BenchmarkPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "BenchmarkPrice") } -func (c *feedReport) FeedID() FeedIdCap { - return FeedIdCap(sdk.AccessField[FeedReport, FeedId](c.CapDefinition, "FeedID")) +func (c *feedReportCap) FeedID() FeedIdCap { + return FeedIdWrapper(sdk.AccessField[FeedReport, FeedId](c.CapDefinition, "FeedID")) } -func (c *feedReport) FullReport() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) FullReport() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "FullReport") } -func (c *feedReport) ObservationTimestamp() sdk.CapDefinition[int64] { +func (c *feedReportCap) ObservationTimestamp() sdk.CapDefinition[int64] { return sdk.AccessField[FeedReport, int64](c.CapDefinition, "ObservationTimestamp") } -func (c *feedReport) ReportContext() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) ReportContext() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "ReportContext") } -func (c *feedReport) Signatures() sdk.CapDefinition[[][]uint8] { +func (c *feedReportCap) Signatures() sdk.CapDefinition[[][]uint8] { return sdk.AccessField[FeedReport, [][]uint8](c.CapDefinition, "Signatures") } +func ConstantFeedReport(value FeedReport) FeedReportCap { + return &feedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedReportFromFields( benchmarkPrice sdk.CapDefinition[[]uint8], feedID FeedIdCap, @@ -186,6 +210,15 @@ func (c *simpleFeedReport) Signatures() sdk.CapDefinition[[][]uint8] { func (c *simpleFeedReport) private() {} +// SignersMetadataWrapper allows access to field from an sdk.CapDefinition[SignersMetadata] +func SignersMetadataWrapper(raw sdk.CapDefinition[SignersMetadata]) SignersMetadataCap { + wrapped, ok := raw.(SignersMetadataCap) + if ok { + return wrapped + } + return &signersMetadataCap{CapDefinition: raw} +} + type SignersMetadataCap interface { sdk.CapDefinition[SignersMetadata] MinRequiredSignatures() sdk.CapDefinition[int64] @@ -193,24 +226,22 @@ type SignersMetadataCap interface { private() } -// SignersMetadataCapFromStep should only be called from generated code to assure type safety -func SignersMetadataCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignersMetadata]) SignersMetadataCap { - raw := step.AddTo(w) - return &signersMetadata{CapDefinition: raw} -} - -type signersMetadata struct { +type signersMetadataCap struct { sdk.CapDefinition[SignersMetadata] } -func (*signersMetadata) private() {} -func (c *signersMetadata) MinRequiredSignatures() sdk.CapDefinition[int64] { +func (*signersMetadataCap) private() {} +func (c *signersMetadataCap) MinRequiredSignatures() sdk.CapDefinition[int64] { return sdk.AccessField[SignersMetadata, int64](c.CapDefinition, "MinRequiredSignatures") } -func (c *signersMetadata) Signers() sdk.CapDefinition[[]string] { +func (c *signersMetadataCap) Signers() sdk.CapDefinition[[]string] { return sdk.AccessField[SignersMetadata, []string](c.CapDefinition, "Signers") } +func ConstantSignersMetadata(value SignersMetadata) SignersMetadataCap { + return &signersMetadataCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignersMetadataFromFields( minRequiredSignatures sdk.CapDefinition[int64], signers sdk.CapDefinition[[]string]) SignersMetadataCap { diff --git a/pkg/codec/byte_string_modifier.go b/pkg/codec/byte_string_modifier.go new file mode 100644 index 000000000..153cc6e20 --- /dev/null +++ b/pkg/codec/byte_string_modifier.go @@ -0,0 +1,256 @@ +package codec + +import ( + "fmt" + "reflect" + + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +// AddressModifier defines the interface for encoding, decoding, and handling addresses. +// This interface allows for chain-specific logic to be injected into the modifier without +// modifying the common repository. +type AddressModifier interface { + // EncodeAddress converts byte array representing an address into its string form using chain-specific logic. + EncodeAddress([]byte) (string, error) + // DecodeAddress converts a string representation of an address back into its byte array form using chain-specific logic. + DecodeAddress(string) ([]byte, error) + // Length returns the expected byte length of the address for the specific chain. + Length() int +} + +// NewAddressBytesToStringModifier creates and returns a new modifier that transforms address byte +// arrays to their corresponding string representation (or vice versa) based on the provided +// AddressModifier. +// +// The fields parameter specifies which fields within a struct should be modified. The AddressModifier +// is injected into the modifier to handle chain-specific logic during the contractReader relayer configuration. +func NewAddressBytesToStringModifier(fields []string, modifier AddressModifier) Modifier { + // bool is a placeholder value + fieldMap := map[string]bool{} + for _, field := range fields { + fieldMap[field] = true + } + + m := &bytesToStringModifier{ + modifier: modifier, + modifierBase: modifierBase[bool]{ + fields: fieldMap, + onToOffChainType: map[reflect.Type]reflect.Type{}, + offToOnChainType: map[reflect.Type]reflect.Type{}, + }, + } + + // Modify field for input using the modifier to convert the byte array to string + m.modifyFieldForInput = func(_ string, field *reflect.StructField, _ string, _ bool) error { + t, err := createStringTypeForBytes(field.Type, field.Name, modifier.Length()) + if err != nil { + return err + } + field.Type = t + return nil + } + + return m +} + +type bytesToStringModifier struct { + // Injected modifier that contains chain-specific logic + modifier AddressModifier + modifierBase[bool] +} + +func (t *bytesToStringModifier) RetypeToOffChain(onChainType reflect.Type, _ string) (tpe reflect.Type, err error) { + defer func() { + // StructOf can panic if the fields are not valid + if r := recover(); r != nil { + tpe = nil + err = fmt.Errorf("%w: %v", types.ErrInvalidType, r) + } + }() + + // Attempt to retype using the shared functionality in modifierBase + offChainType, err := t.modifierBase.RetypeToOffChain(onChainType, "") + if err != nil { + // Handle additional cases specific to bytesToStringModifier + if onChainType.Kind() == reflect.Array { + addrType := reflect.ArrayOf(t.modifier.Length(), reflect.TypeOf(byte(0))) + // Check for nested byte arrays (e.g., [n][20]byte) + if onChainType.Elem() == addrType.Elem() { + return reflect.ArrayOf(onChainType.Len(), reflect.TypeOf("")), nil + } + } + } + + return offChainType, err +} + +// TransformToOnChain uses the AddressModifier for string-to-address conversion. +func (t *bytesToStringModifier) TransformToOnChain(offChainValue any, _ string) (any, error) { + return transformWithMaps(offChainValue, t.offToOnChainType, t.fields, noop, stringToAddressHookForOnChain(t.modifier)) +} + +// TransformToOffChain uses the AddressModifier for address-to-string conversion. +func (t *bytesToStringModifier) TransformToOffChain(onChainValue any, _ string) (any, error) { + return transformWithMaps(onChainValue, t.onToOffChainType, t.fields, + addressTransformationAction(t.modifier.Length()), + addressToStringHookForOffChain(t.modifier), + ) +} + +// addressTransformationAction performs conversions over the fields we want to modify. +// It handles byte arrays, ensuring they are convertible to the expected length. +// It then replaces the field in the map with the transformed value. +func addressTransformationAction(length int) func(extractMap map[string]any, key string, _ bool) error { + return func(em map[string]any, fieldName string, _ bool) error { + if val, ok := em[fieldName]; ok { + rVal := reflect.ValueOf(val) + + if !rVal.IsValid() { + return fmt.Errorf("invalid value for field %s", fieldName) + } + + if rVal.Kind() == reflect.Ptr && !rVal.IsNil() { + rVal = reflect.Indirect(rVal) + } + + expectedType := reflect.ArrayOf(length, reflect.TypeOf(byte(0))) + if rVal.Type().ConvertibleTo(expectedType) { + if !rVal.CanConvert(expectedType) { + return fmt.Errorf("cannot convert type %v to expected type %v for field %s", rVal.Type(), expectedType, fieldName) + } + rVal = rVal.Convert(expectedType) + } + + switch rVal.Kind() { + case reflect.Array: + // Handle outer arrays (e.g., [n][length]byte) + if rVal.Type().Elem().Kind() == reflect.Array && rVal.Type().Elem().Len() == length { + addressArray := reflect.New(reflect.ArrayOf(rVal.Len(), expectedType)).Elem() + for i := 0; i < rVal.Len(); i++ { + elem := rVal.Index(i) + if elem.Len() != length { + return fmt.Errorf("expected [%d]byte but got length %d for element %d in field %s", length, elem.Len(), i, fieldName) + } + reflect.Copy(addressArray.Index(i), elem) + } + em[fieldName] = addressArray.Interface() + } else if rVal.Type() == expectedType { + // Handle a single array (e.g., [length]byte) + addressVal := reflect.New(expectedType).Elem() + reflect.Copy(addressVal, rVal) + em[fieldName] = addressVal.Interface() + } else { + return fmt.Errorf("expected [%d]byte but got %v for field %s", length, rVal.Type(), fieldName) + } + case reflect.Slice: + // Handle slices of byte arrays (e.g., [][length]byte) + if rVal.Len() > 0 && rVal.Index(0).Type() == expectedType { + addressSlice := reflect.MakeSlice(reflect.SliceOf(expectedType), rVal.Len(), rVal.Len()) + for i := 0; i < rVal.Len(); i++ { + elem := rVal.Index(i) + if elem.Len() != length { + return fmt.Errorf("expected element of [%d]byte but got length %d at index %d for field %s", length, elem.Len(), i, fieldName) + } + reflect.Copy(addressSlice.Index(i), elem) + } + em[fieldName] = addressSlice.Interface() + } else { + return fmt.Errorf("expected slice of [%d]byte but got %v for field %s", length, rVal.Type(), fieldName) + } + default: + return fmt.Errorf("unexpected type %v for field %s", rVal.Kind(), fieldName) + } + } + return nil + } +} + +// createStringTypeForBytes converts a byte array, pointer, or slice type to a string type for a given field. +// This function inspects the kind of the input type (array, pointer, slice) and performs the conversion +// if the element type matches the specified byte array length. Returns an error if the conversion is not possible. +func createStringTypeForBytes(t reflect.Type, field string, length int) (reflect.Type, error) { + switch t.Kind() { + case reflect.Pointer: + return createStringTypeForBytes(t.Elem(), field, length) + + case reflect.Array: + // Handle arrays, convert array of bytes to array of strings + if t.Elem().Kind() == reflect.Uint8 && t.Len() == length { + return reflect.TypeOf(""), nil + } else if t.Elem().Kind() == reflect.Array && t.Elem().Len() == length { + // Handle nested arrays (e.g., [2][20]byte to [2]string) + return reflect.ArrayOf(t.Len(), reflect.TypeOf("")), nil + } + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + + case reflect.Slice: + // Handle slices of byte arrays, convert to slice of strings + if t.Elem().Kind() == reflect.Array && t.Elem().Len() == length { + return reflect.SliceOf(reflect.TypeOf("")), nil + } + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + + default: + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + } +} + +// stringToAddressHookForOnChain converts a string representation of an address back into a byte array for on-chain use. +func stringToAddressHookForOnChain(modifier AddressModifier) func(from reflect.Type, to reflect.Type, data any) (any, error) { + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + byteArrTyp := reflect.ArrayOf(modifier.Length(), reflect.TypeOf(byte(0))) + strTyp := reflect.TypeOf("") + + // Convert from string to byte array (e.g., string -> [20]byte) + if from == strTyp && (to == byteArrTyp || to.ConvertibleTo(byteArrTyp)) { + addr, ok := data.(string) + if !ok { + return nil, fmt.Errorf("invalid type: expected string but got %T", data) + } + + bts, err := modifier.DecodeAddress(addr) + if err != nil { + return nil, err + } + + if len(bts) != modifier.Length() { + return nil, fmt.Errorf("length mismatch: expected %d bytes, got %d", modifier.Length(), len(bts)) + } + + val := reflect.New(byteArrTyp).Elem() + reflect.Copy(val, reflect.ValueOf(bts)) + return val.Interface(), nil + } + return data, nil + } +} + +// addressToStringHookForOffChain converts byte arrays to their string representation for off-chain use. +func addressToStringHookForOffChain(modifier AddressModifier) func(from reflect.Type, to reflect.Type, data any) (any, error) { + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + byteArrTyp := reflect.ArrayOf(modifier.Length(), reflect.TypeOf(byte(0))) + strTyp := reflect.TypeOf("") + rVal := reflect.ValueOf(data) + + if !reflect.ValueOf(data).IsValid() { + return nil, fmt.Errorf("invalid value for conversion: got %T", data) + } + + // Convert from byte array to string (e.g., [20]byte -> string) + if from.ConvertibleTo(byteArrTyp) && to == strTyp { + bts := make([]byte, rVal.Len()) + for i := 0; i < rVal.Len(); i++ { + bts[i] = byte(rVal.Index(i).Uint()) + } + + encoded, err := modifier.EncodeAddress(bts) + if err != nil { + return nil, fmt.Errorf("failed to encode address: %w", err) + } + + return encoded, nil + } + return data, nil + } +} diff --git a/pkg/codec/byte_string_modifier_test.go b/pkg/codec/byte_string_modifier_test.go new file mode 100644 index 000000000..f6fdafed0 --- /dev/null +++ b/pkg/codec/byte_string_modifier_test.go @@ -0,0 +1,341 @@ +package codec_test + +import ( + "encoding/hex" + "errors" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/codec" +) + +// MockAddressModifier is a mock implementation of the AddressModifier interface. +type MockAddressModifier struct { + length int +} + +func (m MockAddressModifier) EncodeAddress(bytes []byte) (string, error) { + return "0x" + hex.EncodeToString(bytes), nil +} + +func (m MockAddressModifier) DecodeAddress(str string) ([]byte, error) { + if len(str) == 0 { + return nil, errors.New("empty address") + } + return hex.DecodeString(str[2:]) // Skip the "0x" prefix for hex encoding +} + +func (m MockAddressModifier) Length() int { + return m.length +} + +func TestAddressBytesToString(t *testing.T) { + // Mocking AddressModifier for 20-byte addresses + mockModifier := MockAddressModifier{length: 20} + + type concreteStruct struct { + A string + T [20]byte + } + + type concreteStructWithLargeAddress struct { + A string + T [20]byte + } + + type pointerStruct struct { + A string + T *[20]byte + } + + type arrayStruct struct { + A string + T [2][20]byte + } + + type sliceStruct struct { + A string + T [][20]byte + } + + concretest := reflect.TypeOf(&concreteStruct{}) + concreteLargest := reflect.TypeOf(&concreteStructWithLargeAddress{}) + pointertst := reflect.TypeOf(&pointerStruct{}) + arrayst := reflect.TypeOf(&arrayStruct{}) + slicest := reflect.TypeOf(&sliceStruct{}) + + type Bytes20AddressType [20]byte + + type otherIntegerType struct { + A string + T Bytes20AddressType + } + + type pointerOtherIntegerType struct { + A string + T *Bytes20AddressType + } + oit := reflect.TypeOf(&otherIntegerType{}) + oitpt := reflect.TypeOf(&pointerOtherIntegerType{}) + + testAddrBytes := [20]byte{} + testAddrStr := "0x" + hex.EncodeToString(testAddrBytes[:]) + anyString := "test" + + t.Run("RetypeToOffChain converts fixed length bytes to string", func(t *testing.T) { + for _, test := range []struct { + name string + tp reflect.Type + }{ + {"[20]byte", concretest}, + {"typed address", oit}, + {"[20]byte pointer", pointertst}, + {"*typed address", oitpt}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.tp, "") + + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, test.tp.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, test.tp.Elem().Field(1).Name, convertedType.Field(1).Name) + assert.Equal(t, reflect.TypeOf(""), convertedType.Field(1).Type) + }) + } + }) + + t.Run("RetypeToOffChain converts arrays of fixed length bytes to array of string", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, arrayst.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, reflect.TypeOf([2]string{}), convertedType.Field(1).Type) + }) + + t.Run("RetypeToOffChain converts slices of fixed length bytes to slices of string", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, slicest.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, reflect.TypeOf([]string{}), convertedType.Field(1).Type) + }) + + t.Run("TransformToOnChain converts string to bytes", func(t *testing.T) { + for _, test := range []struct { + name string + t reflect.Type + expected any + }{ + {"[20]byte", concretest, &concreteStruct{A: anyString, T: [20]byte{}}}, + {"*[20]byte", pointertst, &pointerStruct{A: anyString, T: &[20]byte{}}}, + {"typed address", oit, &otherIntegerType{A: anyString, T: Bytes20AddressType{}}}, + {"*typed address", oitpt, &pointerOtherIntegerType{A: anyString, T: &Bytes20AddressType{}}}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.t, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(testAddrStr)) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + assert.Equal(t, test.expected, actual) + }) + } + }) + + t.Run("TransformToOnChain converts string array to array of fixed length bytes", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + + arrayValue := [2]string{testAddrStr, testAddrStr} + + iOffChain.FieldByName("T").Set(reflect.ValueOf(arrayValue)) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + expected := &arrayStruct{A: "", T: [2][20]byte{}} + assert.Equal(t, expected, actual) + }) + + t.Run("TransformToOnChain converts string slice to slice of [length]byte", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + + iOffChain.FieldByName("T").Set(reflect.ValueOf([]string{testAddrStr, testAddrStr})) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + expected := &sliceStruct{ + A: "", + T: [][20]byte{ + testAddrBytes, + testAddrBytes, + }, + } + + assert.Equal(t, expected, actual) + }) + + t.Run("TransformToOnChain returns error on invalid inputs", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + tests := []struct { + name string + addrStr string + structType reflect.Type + }{ + { + name: "Invalid length input", + addrStr: "0x" + hex.EncodeToString([]byte{1, 2, 3}), + structType: concretest, + }, + { + name: "Larger than expected input", + addrStr: "0x" + hex.EncodeToString(make([]byte, 40)), + structType: concreteLargest, + }, + { + name: "Empty string input", + addrStr: "", + structType: concretest, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + convertedType, err := converter.RetypeToOffChain(tt.structType, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(tt.addrStr)) + + _, err = converter.TransformToOnChain(rOffchain.Interface(), "") + require.Error(t, err) + }) + } + }) + + t.Run("TransformToOffChain converts bytes to string", func(t *testing.T) { + for _, test := range []struct { + name string + t reflect.Type + offChain any + }{ + {"[20]byte", concretest, &concreteStruct{A: anyString, T: [20]byte{}}}, + {"*[20]byte", pointertst, &pointerStruct{A: anyString, T: &[20]byte{}}}, + {"typed address", oit, &otherIntegerType{A: anyString, T: Bytes20AddressType{}}}, + {"*typed address", oitpt, &pointerOtherIntegerType{A: anyString, T: &Bytes20AddressType{}}}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.t, "") + require.NoError(t, err) + + actual, err := converter.TransformToOffChain(test.offChain, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(expected) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(testAddrStr)) + assert.Equal(t, expected.Interface(), actual) + }) + } + }) + + t.Run("TransformToOffChain converts array of bytes to string array", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + expectedAddrs := [2]string{testAddrStr, testAddrStr} + iOffChain.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + + actual, err := converter.TransformToOffChain(&arrayStruct{A: anyString, T: [2][20]byte{}}, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iExpected := reflect.Indirect(expected) + iExpected.FieldByName("A").SetString(anyString) + iExpected.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + assert.Equal(t, expected.Interface(), actual) + }) + + t.Run("TransformToOffChain converts slice bytes to string slice", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + expectedAddrs := []string{testAddrStr, testAddrStr} + iOffChain.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + + actual, err := converter.TransformToOffChain(&sliceStruct{ + A: anyString, + T: [][20]byte{testAddrBytes, testAddrBytes}, + }, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iExpected := reflect.Indirect(expected) + iExpected.FieldByName("A").SetString(anyString) + iExpected.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + assert.Equal(t, expected.Interface(), actual) + }) + + t.Run("Unsupported field type returns error", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + unsupportedStruct := struct { + A string + T int // Unsupported type + }{} + + // We expect RetypeToOffChain to return an error because 'T' is not a supported type. + _, err := converter.RetypeToOffChain(reflect.TypeOf(&unsupportedStruct), "") + require.Error(t, err) + assert.Contains(t, err.Error(), "cannot convert bytes for field T") + }) +} diff --git a/pkg/codec/config.go b/pkg/codec/config.go index d7581fcb7..21b6cca04 100644 --- a/pkg/codec/config.go +++ b/pkg/codec/config.go @@ -22,6 +22,9 @@ import ( // - hard code -> [HardCodeModifierConfig] // - extract element -> [ElementExtractorModifierConfig] // - epoch to time -> [EpochToTimeModifierConfig] +// - address to string -> [AddressBytesToStringModifierConfig] +// - field wrapper -> [WrapperModifierConfig] +// - precodec -> [PrecodecModifierConfig] type ModifiersConfig []ModifierConfig func (m *ModifiersConfig) UnmarshalJSON(data []byte) error { @@ -52,6 +55,12 @@ func (m *ModifiersConfig) UnmarshalJSON(data []byte) error { (*m)[i] = &EpochToTimeModifierConfig{} case ModifierExtractProperty: (*m)[i] = &PropertyExtractorConfig{} + case ModifierAddressToString: + (*m)[i] = &AddressBytesToStringModifierConfig{} + case ModifierWrapper: + (*m)[i] = &WrapperModifierConfig{} + case ModifierPreCodec: + (*m)[i] = &PreCodecModifierConfig{} default: return fmt.Errorf("%w: unknown modifier type: %s", types.ErrInvalidConfig, mType) } @@ -78,12 +87,15 @@ func (m *ModifiersConfig) ToModifier(onChainHooks ...mapstructure.DecodeHookFunc type ModifierType string const ( + ModifierPreCodec ModifierType = "precodec" ModifierRename ModifierType = "rename" ModifierDrop ModifierType = "drop" ModifierHardCode ModifierType = "hard code" ModifierExtractElement ModifierType = "extract element" ModifierEpochToTime ModifierType = "epoch to time" ModifierExtractProperty ModifierType = "extract property" + ModifierAddressToString ModifierType = "address to string" + ModifierWrapper ModifierType = "wrapper" ) type ModifierConfig interface { @@ -191,6 +203,69 @@ func (h *HardCodeModifierConfig) MarshalJSON() ([]byte, error) { }) } +// PreCodec creates a modifier that will transform data using a preliminary encoding/decoding step. +// 'Off-chain' values will be overwritten with the encoded data as a byte array. +// 'On-chain' values will be typed using the optimistic types from the codec. +// This is useful when wanting to move the data as generic bytes. +// +// Example: +// +// Based on this input struct: +// type example struct { +// A []B +// } +// +// type B struct { +// C string +// D string +// } +// +// And the fields config defined as: +// {"A": "string C, string D"} +// +// The codec config gives a map of strings (the values from fields config map) to implementation for encoding/decoding +// +// RemoteCodec { +// func (types.TypeProvider) CreateType(itemType string, forEncoding bool) (any, error) +// func (types.Decoder) Decode(ctx context.Context, raw []byte, into any, itemType string) error +// func (types.Encoder) Encode(ctx context.Context, item any, itemType string) ([]byte, error) +// func (types.Decoder) GetMaxDecodingSize(ctx context.Context, n int, itemType string) (int, error) +// func (types.Encoder) GetMaxEncodingSize(ctx context.Context, n int, itemType string) (int, error) +// } +// +// {"string C, string D": RemoteCodec} +// +// Result: +// type example struct { +// A [][]bytes +// } +// +// Where []bytes are the encoded input struct B +type PreCodecModifierConfig struct { + // A map of a path of properties to encoding scheme. + // If the path leads to an array, encoding will occur on every entry. + // + // Example: "a.b" -> "uint256 Value" + Fields map[string]string + // Codecs is skipped in JSON serialization, it will be injected later. + // The map should be keyed using the value from "Fields" to a corresponding Codec that can encode/decode for it + // This allows encoding and decoding implementations to be handled outside of the modifier. + // + // Example: "uint256 Value" -> a chain specific encoder for "uint256 Value" + Codecs map[string]types.RemoteCodec `json:"-"` +} + +func (c *PreCodecModifierConfig) ToModifier(_ ...mapstructure.DecodeHookFunc) (Modifier, error) { + return NewPreCodec(c.Fields, c.Codecs) +} + +func (c *PreCodecModifierConfig) MarshalJSON() ([]byte, error) { + return json.Marshal(&modifierMarshaller[PreCodecModifierConfig]{ + Type: ModifierPreCodec, + T: c, + }) +} + // EpochToTimeModifierConfig is used to convert epoch seconds as uint64 fields on-chain to time.Time type EpochToTimeModifierConfig struct { Fields []string @@ -225,6 +300,99 @@ func (c *PropertyExtractorConfig) MarshalJSON() ([]byte, error) { }) } +// AddressBytesToStringModifierConfig is used to transform address byte fields into string fields. +// It holds the list of fields that should be modified and the chain-specific logic to do the modifications. +type AddressBytesToStringModifierConfig struct { + Fields []string + // Modifier is skipped in JSON serialization, will be injected later. + Modifier AddressModifier `json:"-"` +} + +func (c *AddressBytesToStringModifierConfig) ToModifier(_ ...mapstructure.DecodeHookFunc) (Modifier, error) { + return NewAddressBytesToStringModifier(c.Fields, c.Modifier), nil +} + +func (c *AddressBytesToStringModifierConfig) MarshalJSON() ([]byte, error) { + return json.Marshal(&modifierMarshaller[AddressBytesToStringModifierConfig]{ + Type: ModifierAddressToString, + T: c, + }) +} + +// WrapperModifierConfig replaces each field based on cfg map keys with a struct containing one field with the value of the original field which has is named based on map values. +// Wrapper modifier does not maintain the original pointers. +// Wrapper modifier config shouldn't edit fields that affect each other since the results are not deterministic. +// +// Example #1: +// +// Based on this input struct: +// type example struct { +// A string +// } +// +// And the wrapper config defined as: +// {"D": "W"} +// +// Result: +// type example struct { +// D +// } +// +// where D is a struct that contains the original value of D under the name W: +// type D struct { +// W string +// } +// +// +// Example #2: +// Wrapper modifier works on any type of field, including nested fields or nested fields in slices etc.! +// +// Based on this input struct: +// type example struct { +// A []B +// } +// +// type B struct { +// C string +// D string +// } +// +// And the wrapper config defined as: +// {"A.C": "E", "A.D": "F"} +// +// Result: +// type example struct { +// A []B +// } +// +// type B struct { +// C type struct { E string } +// D type struct { F string } +// } +// +// Where each element of slice A under fields C.E and D.F retains the values of their respective input slice elements A.C and A.D . +type WrapperModifierConfig struct { + // Fields key defines the fields to be wrapped and the name of the wrapper struct. + // The field becomes a subfield of the wrapper struct where the name of the subfield is map value. + Fields map[string]string +} + +func (r *WrapperModifierConfig) ToModifier(_ ...mapstructure.DecodeHookFunc) (Modifier, error) { + fields := map[string]string{} + for i, f := range r.Fields { + // using a private variable will make the field not serialize, essentially dropping the field + fields[upperFirstCharacter(f)] = fmt.Sprintf("dropFieldPrivateName-%s", i) + } + return NewWrapperModifier(r.Fields), nil +} + +func (r *WrapperModifierConfig) MarshalJSON() ([]byte, error) { + return json.Marshal(&modifierMarshaller[WrapperModifierConfig]{ + Type: ModifierWrapper, + T: r, + }) +} + type typer struct { Type string } diff --git a/pkg/codec/config_test.go b/pkg/codec/config_test.go index 185680bc0..245a37c28 100644 --- a/pkg/codec/config_test.go +++ b/pkg/codec/config_test.go @@ -90,6 +90,7 @@ func TestModifiersConfig(t *testing.T) { type testStruct struct { A int C int + D int T int64 } @@ -119,7 +120,13 @@ func TestModifiersConfig(t *testing.T) { { "Type": "Epoch To time", "Fields": ["T"] - } + }, + { + "Type": "Wrapper", + "Fields": { + "D": "F" + } + } ]` lowerJSONConfig := `[ @@ -148,7 +155,13 @@ func TestModifiersConfig(t *testing.T) { { "type": "epoch to time", "fields": ["t"] - } + }, + { + "Type": "Wrapper", + "Fields": { + "D": "F" + } + } ]` for _, test := range []struct{ name, json string }{ @@ -170,6 +183,7 @@ func TestModifiersConfig(t *testing.T) { A: 1, C: 100, T: 631515600, + D: 123, } offChain, err := modifier.TransformToOffChain(onChain, "") @@ -191,6 +205,7 @@ func TestModifiersConfig(t *testing.T) { "B": float64(2), // drop the quotes around the string "T": string(j)[1 : len(j)-1], + "D": map[string]any{"F": float64(123)}, } assert.Equal(t, expectedMap, actualMap) @@ -216,6 +231,12 @@ func TestModifiersConfig(t *testing.T) { &codec.EpochToTimeModifierConfig{ Fields: []string{"T"}, }, + &codec.AddressBytesToStringModifierConfig{ + Fields: []string{"D"}, + }, + &codec.WrapperModifierConfig{ + Fields: map[string]string{"A": "Z"}, + }, } b, err := json.Marshal(&configs) diff --git a/pkg/codec/encodings/type_codec_test.go b/pkg/codec/encodings/type_codec_test.go index d7762fe61..874819ff2 100644 --- a/pkg/codec/encodings/type_codec_test.go +++ b/pkg/codec/encodings/type_codec_test.go @@ -133,8 +133,13 @@ func (*interfaceTesterBase) GetAccountBytes(i int) []byte { return []byte{ib, ib + 1, ib + 2, ib + 3, ib + 4, ib + 5, ib + 6, ib + 7} } +func (t *interfaceTesterBase) GetAccountString(i int) string { + return string(t.GetAccountBytes(i)) +} + type bigEndianInterfaceTester struct { interfaceTesterBase + TestSelectionSupport lenient bool } @@ -168,8 +173,10 @@ func (b *bigEndianInterfaceTester) encode(t *testing.T, bytes []byte, ts TestStr for _, oid := range ts.OracleIDs { bytes = append(bytes, byte(oid)) } - bytes = append(bytes, byte(len(ts.Account))) - bytes = append(bytes, ts.Account...) + bytes = append(bytes, byte(len(ts.AccountStruct.Account))) + bytes = append(bytes, ts.AccountStruct.Account...) + bytes = rawbin.BigEndian.AppendUint32(bytes, uint32(len(ts.AccountStruct.AccountStr))) + bytes = append(bytes, []byte(ts.AccountStruct.AccountStr)...) bytes = append(bytes, byte(len(ts.Accounts))) for _, account := range ts.Accounts { bytes = append(bytes, byte(len(account))) @@ -234,6 +241,12 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC }) require.NoError(t, err) + accountStructCodec, err := encodings.NewStructCodec([]encodings.NamedTypeCodec{ + {Name: "Account", Codec: acc}, + {Name: "AccountStr", Codec: sCodec}, + }) + require.NoError(t, err) + oIDs, err := encodings.NewArray(32, builder.OracleID()) require.NoError(t, err) @@ -248,7 +261,7 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC {Name: "DifferentField", Codec: sCodec}, {Name: "OracleID", Codec: builder.OracleID()}, {Name: "OracleIDs", Codec: oIDs}, - {Name: "Account", Codec: acc}, + {Name: "AccountStruct", Codec: accountStructCodec}, {Name: "Accounts", Codec: accs}, {Name: "BigField", Codec: bi}, {Name: "NestedDynamicStruct", Codec: midDynamicCodec}, @@ -288,8 +301,8 @@ func (b *bigEndianInterfaceTester) GetCodec(t *testing.T) types.Codec { } mod, err := codec.NewHardCoder(map[string]any{ - "BigField": ts.BigField.String(), - "Account": ts.Account, + "BigField": ts.BigField.String(), + "AccountStruct.Account": ts.AccountStruct.Account, }, map[string]any{"ExtraField": AnyExtraValue}, codec.BigIntHook) require.NoError(t, err) diff --git a/pkg/codec/example_test.go b/pkg/codec/example_test.go index 54f4f93e1..96a6fa032 100644 --- a/pkg/codec/example_test.go +++ b/pkg/codec/example_test.go @@ -36,7 +36,7 @@ func (ExampleStructJSONCodec) GetMaxEncodingSize(_ context.Context, n int, _ str func (ExampleStructJSONCodec) Decode(_ context.Context, raw []byte, into any, _ string) error { err := json.Unmarshal(raw, into) if err != nil { - return fmt.Errorf("%w: %s", types.ErrInvalidType, err) + return fmt.Errorf("%w: %w", types.ErrInvalidType, err) } return nil } diff --git a/pkg/codec/modifier_base.go b/pkg/codec/modifier_base.go index 6c7285e9b..8a092fe9b 100644 --- a/pkg/codec/modifier_base.go +++ b/pkg/codec/modifier_base.go @@ -37,6 +37,7 @@ func (m *modifierBase[T]) RetypeToOffChain(onChainType reflect.Type, itemType st return cached, nil } + var offChainType reflect.Type switch onChainType.Kind() { case reflect.Pointer: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") @@ -44,35 +45,30 @@ func (m *modifierBase[T]) RetypeToOffChain(onChainType reflect.Type, itemType st return nil, err } - ptr := reflect.PointerTo(elm) - m.onToOffChainType[onChainType] = ptr - m.offToOnChainType[ptr] = onChainType - return ptr, nil + offChainType = reflect.PointerTo(elm) case reflect.Slice: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") if err != nil { return nil, err } - sliceType := reflect.SliceOf(elm) - m.onToOffChainType[onChainType] = sliceType - m.offToOnChainType[sliceType] = onChainType - return sliceType, nil + offChainType = reflect.SliceOf(elm) case reflect.Array: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") if err != nil { return nil, err } - arrayType := reflect.ArrayOf(onChainType.Len(), elm) - m.onToOffChainType[onChainType] = arrayType - m.offToOnChainType[arrayType] = onChainType - return arrayType, nil + offChainType = reflect.ArrayOf(onChainType.Len(), elm) case reflect.Struct: return m.getStructType(onChainType) default: return nil, fmt.Errorf("%w: cannot retype the kind %v", types.ErrInvalidType, onChainType.Kind()) } + + m.onToOffChainType[onChainType] = offChainType + m.offToOnChainType[offChainType] = onChainType + return offChainType, nil } func (m *modifierBase[T]) getStructType(outputType reflect.Type) (reflect.Type, error) { diff --git a/pkg/codec/precodec.go b/pkg/codec/precodec.go new file mode 100644 index 000000000..de5dec055 --- /dev/null +++ b/pkg/codec/precodec.go @@ -0,0 +1,113 @@ +package codec + +import ( + "context" + "fmt" + "reflect" + + "github.com/go-viper/mapstructure/v2" + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +// PreCodec creates a modifier that will run a preliminary encoding/decoding step. +// This is useful when wanting to move nested data as generic bytes. +func NewPreCodec(fields map[string]string, codecs map[string]types.RemoteCodec) (Modifier, error) { + m := &preCodec{ + modifierBase: modifierBase[string]{ + fields: fields, + onToOffChainType: map[reflect.Type]reflect.Type{}, + offToOnChainType: map[reflect.Type]reflect.Type{}, + }, + codecs: codecs, + } + + // validate that there is a codec for each unique type definition + for _, typeDef := range fields { + if _, ok := m.codecs[typeDef]; ok { + continue + } + return nil, fmt.Errorf("codec not supplied for: %s", typeDef) + } + + m.modifyFieldForInput = func(_ string, field *reflect.StructField, _ string, typeDef string) error { + if field.Type != reflect.SliceOf(reflect.TypeFor[uint8]()) && field.Type != reflect.PointerTo(reflect.SliceOf(reflect.TypeFor[uint8]())) { + return fmt.Errorf("can only decode []byte from on-chain: %s", field.Type) + } + + codec, ok := m.codecs[typeDef] + if !ok || codec == nil { + return fmt.Errorf("codec not found for type definition: '%s'", typeDef) + } + + newType, err := codec.CreateType("", false) + if err != nil { + return err + } + field.Type = reflect.TypeOf(newType) + + return nil + } + + return m, nil +} + +type preCodec struct { + modifierBase[string] + codecs map[string]types.RemoteCodec +} + +func (pc *preCodec) TransformToOffChain(onChainValue any, _ string) (any, error) { + allHooks := make([]mapstructure.DecodeHookFunc, 1) + allHooks[0] = hardCodeManyHook + + return transformWithMaps(onChainValue, pc.onToOffChainType, pc.fields, pc.decodeFieldMapAction, allHooks...) +} + +func (pc *preCodec) decodeFieldMapAction(extractMap map[string]any, key string, typeDef string) error { + _, exists := extractMap[key] + if !exists { + return fmt.Errorf("field %s does not exist", key) + } + + codec, ok := pc.codecs[typeDef] + if !ok || codec == nil { + return fmt.Errorf("codec not found for type definition: '%s'", typeDef) + } + + to, err := codec.CreateType("", false) + if err != nil { + return err + } + err = codec.Decode(context.Background(), extractMap[key].([]byte), &to, "") + if err != nil { + return err + } + extractMap[key] = to + return nil +} + +func (pc *preCodec) TransformToOnChain(offChainValue any, _ string) (any, error) { + allHooks := make([]mapstructure.DecodeHookFunc, 1) + allHooks[0] = hardCodeManyHook + + return transformWithMaps(offChainValue, pc.offToOnChainType, pc.fields, pc.encodeFieldMapAction, allHooks...) +} + +func (pc *preCodec) encodeFieldMapAction(extractMap map[string]any, key string, typeDef string) error { + _, exists := extractMap[key] + if !exists { + return fmt.Errorf("field %s does not exist", key) + } + + codec, ok := pc.codecs[typeDef] + if !ok || codec == nil { + return fmt.Errorf("codec not found for type definition: '%s'", typeDef) + } + + encoded, err := codec.Encode(context.Background(), extractMap[key], "") + if err != nil { + return err + } + extractMap[key] = encoded + return nil +} diff --git a/pkg/codec/precodec_test.go b/pkg/codec/precodec_test.go new file mode 100644 index 000000000..e7835bd16 --- /dev/null +++ b/pkg/codec/precodec_test.go @@ -0,0 +1,418 @@ +package codec_test + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "math" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/codec" + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +var _ types.RemoteCodec = &ExampleCodec{} + +type ExampleCodec struct { + offChainType any +} + +func (ec ExampleCodec) Encode(_ context.Context, item any, _ string) ([]byte, error) { + return json.Marshal(item) +} + +func (ec ExampleCodec) GetMaxEncodingSize(_ context.Context, n int, _ string) (int, error) { + // not used in the example + return math.MaxInt32, nil +} + +func (ec ExampleCodec) Decode(_ context.Context, raw []byte, into any, _ string) error { + err := json.Unmarshal(raw, into) + if err != nil { + return fmt.Errorf("%w: %w", types.ErrInvalidType, err) + } + return nil +} + +func (ec ExampleCodec) GetMaxDecodingSize(ctx context.Context, n int, _ string) (int, error) { + // not used in the example + return math.MaxInt32, nil +} + +func (ec ExampleCodec) CreateType(_ string, _ bool) (any, error) { + // parameters here are unused in the example, but can be used to determine what type to expect. + // this allows remote execution to know how to decode the incoming message + // and for [codec.NewModifierCodec] to know what type to expect for intermediate phases. + return ec.offChainType, nil +} + +type testStructOff struct { + Ask int + Bid int +} + +type testStructOn struct { + Ask []byte + Bid int +} + +type testStructOnPointer struct { + Ask *[]byte + Bid int +} + +type nestedTestStructOn struct { + Report []byte + FeedID [32]byte + Timestamp int64 +} + +type deepNestedTestStructOn struct { + Reports []nestedTestStructOn +} + +const ( + TestStructOffDef = "uint256 Ask, uint256 Bid" +) + +func TestPreCodec(t *testing.T) { + t.Parallel() + + preCodec, err := codec.NewPreCodec( + map[string]string{"Ask": "uint256"}, + map[string]types.RemoteCodec{"uint256": ExampleCodec{offChainType: int(0)}}, + ) + require.NoError(t, err) + + pointerPreCodec, err := codec.NewPreCodec( + map[string]string{"Ask": "uint256"}, + map[string]types.RemoteCodec{"uint256": ExampleCodec{offChainType: int(0)}}, + ) + require.NoError(t, err) + + nestedPreCodec, err := codec.NewPreCodec( + map[string]string{"Report": TestStructOffDef}, + map[string]types.RemoteCodec{TestStructOffDef: ExampleCodec{offChainType: testStructOff{}}}, + ) + require.NoError(t, err) + + deepNestedPreCodec, err := codec.NewPreCodec( + map[string]string{"Reports.Report": TestStructOffDef}, + map[string]types.RemoteCodec{TestStructOffDef: ExampleCodec{offChainType: testStructOff{}}}, + ) + require.NoError(t, err) + + invalidPreCodec, err := codec.NewPreCodec( + map[string]string{"Unknown": TestStructOffDef}, + map[string]types.RemoteCodec{TestStructOffDef: ExampleCodec{offChainType: testStructOff{}}}, + ) + require.NoError(t, err) + + t.Run("NOK codec not supplied", func(t *testing.T) { + _, err := codec.NewPreCodec( + map[string]string{"Unknown": TestStructOffDef}, + map[string]types.RemoteCodec{"invalid def": ExampleCodec{offChainType: testStructOff{}}}, + ) + require.Error(t, err) + }) + + t.Run("RetypeToOffChain converts type to codec.CreateType type", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.TypeOf(testStructOn{}), "") + require.NoError(t, err) + require.Equal(t, 2, offChainType.NumField()) + field0 := offChainType.Field(0) + assert.Equal(t, "Ask", field0.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field0.Type) + field1 := offChainType.Field(1) + assert.Equal(t, "Bid", field1.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field1.Type) + }) + + t.Run("RetypeToOffChain works on pointers to type", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.PointerTo(reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + assert.Equal(t, reflect.Ptr, offChainType.Kind()) + elem := offChainType.Elem() + require.Equal(t, 2, elem.NumField()) + field0 := elem.Field(0) + assert.Equal(t, "Ask", field0.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field0.Type) + field1 := elem.Field(1) + assert.Equal(t, "Bid", field1.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field1.Type) + }) + + t.Run("RetypeToOffChain works on pointers", func(t *testing.T) { + offChainType, err := pointerPreCodec.RetypeToOffChain(reflect.PointerTo(reflect.TypeOf(testStructOnPointer{})), "") + require.NoError(t, err) + assert.Equal(t, reflect.Ptr, offChainType.Kind()) + elem := offChainType.Elem() + require.Equal(t, 2, elem.NumField()) + field0 := elem.Field(0) + assert.Equal(t, "Ask", field0.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field0.Type) + field1 := elem.Field(1) + assert.Equal(t, "Bid", field1.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field1.Type) + }) + + t.Run("RetypeToOffChain works on slices", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.SliceOf(reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + assert.Equal(t, reflect.Slice, offChainType.Kind()) + elem := offChainType.Elem() + require.Equal(t, 2, elem.NumField()) + field0 := elem.Field(0) + assert.Equal(t, "Ask", field0.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field0.Type) + field1 := elem.Field(1) + assert.Equal(t, "Bid", field1.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field1.Type) + }) + + t.Run("RetypeToOffChain works on arrays", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.ArrayOf(1, reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + assert.Equal(t, reflect.Array, offChainType.Kind()) + elem := offChainType.Elem() + require.Equal(t, 2, elem.NumField()) + field0 := elem.Field(0) + assert.Equal(t, "Ask", field0.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field0.Type) + field1 := elem.Field(1) + assert.Equal(t, "Bid", field1.Name) + assert.Equal(t, reflect.TypeOf(int(0)), field1.Type) + }) + + t.Run("RetypeToOffChain converts nested type to codec.CreateType type", func(t *testing.T) { + offChainType, err := nestedPreCodec.RetypeToOffChain(reflect.TypeOf(nestedTestStructOn{}), "") + + require.NoError(t, err) + + require.Equal(t, 3, offChainType.NumField()) + field0 := offChainType.Field(0) + assert.Equal(t, "Report", field0.Name) + assert.Equal(t, reflect.TypeOf(testStructOff{}), field0.Type) + field1 := offChainType.Field(1) + assert.Equal(t, "FeedID", field1.Name) + assert.Equal(t, reflect.TypeOf([32]byte{}), field1.Type) + field2 := offChainType.Field(2) + assert.Equal(t, "Timestamp", field2.Name) + assert.Equal(t, reflect.TypeOf(int64(0)), field2.Type) + }) + + t.Run("RetypeToOffChain converts deep nested type to codec.CreateType type", func(t *testing.T) { + offChainType, err := deepNestedPreCodec.RetypeToOffChain(reflect.TypeOf(deepNestedTestStructOn{}), "") + + require.NoError(t, err) + + reports, exists := offChainType.FieldByName("Reports") + assert.True(t, exists) + report := reports.Type.Elem() + require.Equal(t, 3, report.NumField()) + field0 := report.Field(0) + assert.Equal(t, "Report", field0.Name) + assert.Equal(t, reflect.TypeOf(testStructOff{}), field0.Type) + field1 := report.Field(1) + assert.Equal(t, "FeedID", field1.Name) + assert.Equal(t, reflect.TypeOf([32]byte{}), field1.Type) + field2 := report.Field(2) + assert.Equal(t, "Timestamp", field2.Name) + assert.Equal(t, reflect.TypeOf(int64(0)), field2.Type) + }) + + t.Run("RetypeToOffChain only works on byte arrays", func(t *testing.T) { + _, err := preCodec.RetypeToOffChain(reflect.TypeOf(testStructOff{}), "") + require.Error(t, err) + assert.Equal(t, err.Error(), "can only decode []byte from on-chain: int") + }) + + t.Run("RetypeToOffChain only works with a valid path", func(t *testing.T) { + _, err := invalidPreCodec.RetypeToOffChain(reflect.TypeOf(testStructOn{}), "") + require.Error(t, err) + assert.Equal(t, err.Error(), "invalid type: cannot find Unknown") + }) + + t.Run("TransformToOnChain and TransformToOffChain returns error if input type was not from TransformToOnChain", func(t *testing.T) { + incorrectVal := struct{}{} + _, err := preCodec.TransformToOnChain(incorrectVal, "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + _, err = preCodec.TransformToOffChain(incorrectVal, "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on structs", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.TypeOf(testStructOn{}), "") + require.NoError(t, err) + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iOffchain.FieldByName("Ask").SetInt(20) + iOffchain.FieldByName("Bid").SetInt(10) + + output, err := preCodec.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + jsonEncoded, err := json.Marshal(20) + require.NoError(t, err) + expected := testStructOn{ + Ask: jsonEncoded, + Bid: 10, + } + assert.Equal(t, expected, output) + newInput, err := preCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on pointers", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.PointerTo(reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + + rOffchain := reflect.New(offChainType.Elem()) + iOffchain := reflect.Indirect(rOffchain) + iOffchain.FieldByName("Ask").SetInt(20) + iOffchain.FieldByName("Bid").SetInt(10) + + output, err := preCodec.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + jsonEncoded, err := json.Marshal(20) + require.NoError(t, err) + expected := testStructOn{ + Ask: jsonEncoded, + Bid: 10, + } + assert.Equal(t, &expected, output) + newInput, err := preCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on slices", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.SliceOf(reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + + iOffchain := reflect.MakeSlice(offChainType, 2, 2) + iElm := iOffchain.Index(0) + iElm.FieldByName("Ask").SetInt(20) + iElm.FieldByName("Bid").SetInt(10) + iElm2 := iOffchain.Index(1) + iElm2.FieldByName("Ask").SetInt(20) + iElm2.FieldByName("Bid").SetInt(30) + + output, err := preCodec.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + jsonEncoded, err := json.Marshal(20) + require.NoError(t, err) + expected := []testStructOn{ + { + Ask: jsonEncoded, + Bid: 10, + }, + { + Ask: jsonEncoded, + Bid: 30, + }, + } + assert.Equal(t, expected, output) + newInput, err := preCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on arrays", func(t *testing.T) { + offChainType, err := preCodec.RetypeToOffChain(reflect.ArrayOf(2, reflect.TypeOf(testStructOn{})), "") + require.NoError(t, err) + + iOffchain := reflect.New(offChainType).Elem() + iElm := iOffchain.Index(0) + iElm.FieldByName("Ask").SetInt(20) + iElm.FieldByName("Bid").SetInt(10) + iElm2 := iOffchain.Index(1) + iElm2.FieldByName("Ask").SetInt(20) + iElm2.FieldByName("Bid").SetInt(30) + + output, err := preCodec.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + jsonEncoded, err := json.Marshal(20) + require.NoError(t, err) + expected := [2]testStructOn{ + { + Ask: jsonEncoded, + Bid: 10, + }, + { + Ask: jsonEncoded, + Bid: 30, + }, + } + assert.Equal(t, expected, output) + newInput, err := preCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on nested fields", func(t *testing.T) { + offChainType, err := nestedPreCodec.RetypeToOffChain(reflect.TypeOf(nestedTestStructOn{}), "") + require.NoError(t, err) + + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iReport := iOffchain.FieldByName("Report") + iReport.FieldByName("Ask").SetInt(20) + iReport.FieldByName("Bid").SetInt(10) + + output, err := nestedPreCodec.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + report := testStructOff{ + Ask: 20, + Bid: 10, + } + jsonEncoded, err := json.Marshal(report) + require.NoError(t, err) + expected := nestedTestStructOn{ + Report: jsonEncoded, + } + assert.Equal(t, expected, output) + newInput, err := nestedPreCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on deeply nested fields", func(t *testing.T) { + offChainType, err := deepNestedPreCodec.RetypeToOffChain(reflect.TypeOf(deepNestedTestStructOn{}), "") + require.NoError(t, err) + + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iReports := iOffchain.FieldByName("Reports") + iReports.Set(reflect.MakeSlice(iReports.Type(), 1, 1)) + iElm := iReports.Index(0) + iReport := iElm.FieldByName("Report") + iReport.FieldByName("Ask").SetInt(20) + iReport.FieldByName("Bid").SetInt(10) + + output, err := deepNestedPreCodec.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + report := testStructOff{ + Ask: 20, + Bid: 10, + } + jsonEncoded, err := json.Marshal(report) + require.NoError(t, err) + expected := deepNestedTestStructOn{ + Reports: []nestedTestStructOn{ + {Report: jsonEncoded}, + }, + } + assert.Equal(t, expected, output) + newInput, err := deepNestedPreCodec.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) +} diff --git a/pkg/codec/utils.go b/pkg/codec/utils.go index 9c4e83a1b..650ef7976 100644 --- a/pkg/codec/utils.go +++ b/pkg/codec/utils.go @@ -321,6 +321,10 @@ func getMapsFromPath(valueMap map[string]any, path []string) ([]map[string]any, } iItem := reflect.ValueOf(item) + if iItem.Kind() == reflect.Ptr { + iItem = iItem.Elem() + } + switch iItem.Kind() { case reflect.Array, reflect.Slice: length := iItem.Len() @@ -340,7 +344,8 @@ func getMapsFromPath(valueMap map[string]any, path []string) ([]map[string]any, // cleanup empty values for non path keys for k, v := range m { - if k != p && reflect.ValueOf(v).IsZero() { + valueOfV := reflect.ValueOf(v) + if k != p && valueOfV.IsValid() && valueOfV.IsZero() { delete(m, k) } } diff --git a/pkg/codec/utils_test.go b/pkg/codec/utils_test.go index 335bb7a0d..dcf6bbc2c 100644 --- a/pkg/codec/utils_test.go +++ b/pkg/codec/utils_test.go @@ -25,21 +25,36 @@ func TestGetMapsFromPath(t *testing.T) { TestASlice []testA } + type testC struct { + TestAPtrSlice *[]testA + } + type testStruct struct { A testA B testB - C, D int + C testC + D, F int } - testMap := map[string]any{"A": map[string]any{"B": []testStruct{{B: testB{TestASlice: []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, C: 10, D: 100}, {C: 20, D: 200}}}} + ptrSlice := &[]testA{{IntSlice: []int{4, 3, 2}}, {IntSlice: []int{1, 2, 3}}} + testMap := map[string]any{"A": map[string]any{"B": []testStruct{{B: testB{TestASlice: []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, C: testC{TestAPtrSlice: ptrSlice}, D: 10, F: 100}, {D: 20, F: 200}}}} t.Parallel() actual, err := getMapsFromPath(testMap, []string{"A"}) require.NoError(t, err) - assert.Equal(t, []map[string]any{{"B": []testStruct{{B: testB{TestASlice: []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, C: 10, D: 100}, {C: 20, D: 200}}}}, actual) + assert.Equal(t, []map[string]any{{"B": []testStruct{{B: testB{TestASlice: []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, C: testC{TestAPtrSlice: ptrSlice}, D: 10, F: 100}, {D: 20, F: 200}}}}, actual) actual, err = getMapsFromPath(testMap, []string{"A", "B"}) require.NoError(t, err) - assert.Equal(t, []map[string]any{{"A": map[string]any{"IntSlice": []int(nil)}, "B": map[string]any{"TestASlice": []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, "C": 10, "D": 100}, {"A": map[string]any{"IntSlice": []int(nil)}, "B": map[string]any{"TestASlice": []testA(nil)}, "C": 20, "D": 200}}, actual) + assert.Equal(t, []map[string]interface{}{ + { + "A": map[string]interface{}{"IntSlice": []int(nil)}, + "B": map[string]interface{}{"TestASlice": []testA{{IntSlice: []int{3, 2, 0}}, {IntSlice: []int{0, 1, 2}}}}, + "C": map[string]interface{}{"TestAPtrSlice": ptrSlice}, "D": 10, "F": 100}, + { + "A": map[string]interface{}{"IntSlice": []int(nil)}, + "B": map[string]interface{}{"TestASlice": []testA(nil)}, "C": map[string]interface{}{"TestAPtrSlice": (*[]testA)(nil)}, "D": 20, "F": 200, + }, + }, actual) actual, err = getMapsFromPath(testMap, []string{"A", "B", "B"}) require.NoError(t, err) @@ -48,6 +63,10 @@ func TestGetMapsFromPath(t *testing.T) { actual, err = getMapsFromPath(testMap, []string{"A", "B", "B", "TestASlice"}) require.NoError(t, err) assert.Equal(t, []map[string]any{{"IntSlice": []int{3, 2, 0}}, {"IntSlice": []int{0, 1, 2}}}, actual) + + actual, err = getMapsFromPath(testMap, []string{"A", "B", "C", "TestAPtrSlice"}) + require.NoError(t, err) + assert.Equal(t, []map[string]any{{"IntSlice": []int{4, 3, 2}}, {"IntSlice": []int{1, 2, 3}}}, actual) } func TestFitsInNBitsSigned(t *testing.T) { diff --git a/pkg/codec/wrapper.go b/pkg/codec/wrapper.go new file mode 100644 index 000000000..dd1061244 --- /dev/null +++ b/pkg/codec/wrapper.go @@ -0,0 +1,62 @@ +package codec + +import ( + "fmt" + "reflect" +) + +func NewWrapperModifier(fields map[string]string) Modifier { + m := &wrapperModifier{ + modifierBase: modifierBase[string]{ + fields: fields, + onToOffChainType: map[reflect.Type]reflect.Type{}, + offToOnChainType: map[reflect.Type]reflect.Type{}, + }, + } + + m.modifyFieldForInput = func(_ string, field *reflect.StructField, _ string, fieldName string) error { + field.Type = reflect.StructOf([]reflect.StructField{{ + Name: fieldName, + Type: field.Type, + }}) + return nil + } + + return m +} + +type wrapperModifier struct { + modifierBase[string] +} + +func (t *wrapperModifier) TransformToOnChain(offChainValue any, _ string) (any, error) { + return transformWithMaps(offChainValue, t.offToOnChainType, t.fields, unwrapFieldMapAction) +} + +func (t *wrapperModifier) TransformToOffChain(onChainValue any, _ string) (any, error) { + return transformWithMaps(onChainValue, t.onToOffChainType, t.fields, wrapFieldMapAction) +} + +func wrapFieldMapAction(typesMap map[string]any, fieldName string, wrappedFieldName string) error { + field, exists := typesMap[fieldName] + if !exists { + return fmt.Errorf("field %s does not exist", fieldName) + } + + typesMap[fieldName] = map[string]any{wrappedFieldName: field} + return nil +} + +func unwrapFieldMapAction(typesMap map[string]any, fieldName string, wrappedFieldName string) error { + _, exists := typesMap[fieldName] + if !exists { + return fmt.Errorf("field %s does not exist", fieldName) + } + val, isOk := typesMap[fieldName].(map[string]any)[wrappedFieldName] + if !isOk { + return fmt.Errorf("field %s.%s does not exist", fieldName, wrappedFieldName) + } + + typesMap[fieldName] = val + return nil +} diff --git a/pkg/codec/wrapper_test.go b/pkg/codec/wrapper_test.go new file mode 100644 index 000000000..11bf148b6 --- /dev/null +++ b/pkg/codec/wrapper_test.go @@ -0,0 +1,390 @@ +package codec_test + +import ( + "errors" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/codec" + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +func TestWrapper(t *testing.T) { + t.Parallel() + + type testStruct struct { + A string + B int64 + C int64 + } + + type nestedTestStruct struct { + A string + B testStruct + C []testStruct + D string + } + + wrapper := codec.NewWrapperModifier(map[string]string{"A": "X", "C": "Z"}) + invalidWrapper := codec.NewWrapperModifier(map[string]string{"W": "X", "C": "Z"}) + nestedWrapper := codec.NewWrapperModifier(map[string]string{"A": "X", "B.A": "X", "B.C": "Z", "C.A": "X", "C.C": "Z"}) + t.Run("RetypeToOffChain works on slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Slice, offChainType.Kind()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain works on pointers", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Pointer, offChainType.Kind()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain works on pointers to non structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&[]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Pointer, offChainType.Kind()) + assert.Equal(t, reflect.Slice, offChainType.Elem().Kind()) + assertBasicWrapperTransform(t, offChainType.Elem().Elem()) + }) + + t.Run("RetypeToOffChain works on arrays", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([2]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Array, offChainType.Kind()) + assert.Equal(t, 2, offChainType.Len()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain returns exception if a field is not on the type", func(t *testing.T) { + _, err := invalidWrapper.RetypeToOffChain(reflect.TypeOf(testStruct{}), "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + }) + + t.Run("RetypeToOffChain works on nested fields", func(t *testing.T) { + offChainType, err := nestedWrapper.RetypeToOffChain(reflect.TypeOf(nestedTestStruct{}), "") + require.NoError(t, err) + assert.Equal(t, 4, offChainType.NumField()) + + f0 := offChainType.Field(0) + f0PreRetype := reflect.TypeOf(nestedTestStruct{}).Field(0) + assert.Equal(t, wrapType("X", f0PreRetype.Type).String(), f0.Type.String()) + assert.Equal(t, "struct { A struct { X string }; B int64; C struct { Z int64 } }", offChainType.Field(1).Type.String()) + + f2 := offChainType.Field(2) + assert.Equal(t, reflect.Slice, f2.Type.Kind()) + assertBasicWrapperTransform(t, f2.Type.Elem()) + f3 := offChainType.Field(3) + assert.Equal(t, reflect.TypeOf(""), f3.Type) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(testStruct{}), "") + require.NoError(t, err) + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + + output, err := wrapper.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + expected := testStruct{ + A: "foo", + B: 10, + C: 20, + } + assert.Equal(t, expected, output) + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain returns error if input type was not from TransformToOnChain", func(t *testing.T) { + _, err := invalidWrapper.TransformToOnChain(testStruct{}, "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on pointers, but doesn't maintain same addresses", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType.Elem()) + iOffchain := reflect.Indirect(rInput) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + require.NoError(t, err) + + expected := &testStruct{ + A: "foo", + B: 10, + C: 20, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(output, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + + }) + + t.Run("TransformToOnChain and TransformToOffChain works on slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.MakeSlice(offChainType, 2, 2) + iOffchain := rInput.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rInput.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := []testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on nested slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([][]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.MakeSlice(offChainType, 2, 2) + rOuter := rInput.Index(0) + rOuter.Set(reflect.MakeSlice(rOuter.Type(), 2, 2)) + iOffchain := rOuter.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rOuter.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + rOuter = rInput.Index(1) + rOuter.Set(reflect.MakeSlice(rOuter.Type(), 2, 2)) + iOffchain = rOuter.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("fooz") + iOffchain.FieldByName("B").SetInt(100) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(200) + iOffchain = rOuter.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("bazz") + iOffchain.FieldByName("B").SetInt(150) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(250) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := [][]testStruct{ + { + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + }, + { + { + A: "fooz", + B: 100, + C: 200, + }, + { + A: "bazz", + B: 150, + C: 250, + }, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on pointers to non structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&[]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType.Elem()) + rElm := reflect.MakeSlice(offChainType.Elem(), 2, 2) + iElm := rElm.Index(0) + iElm.FieldByName("A").FieldByName("X").SetString("foo") + iElm.FieldByName("B").SetInt(10) + iElm.FieldByName("C").FieldByName("Z").SetInt(20) + iElm = rElm.Index(1) + iElm.FieldByName("A").FieldByName("X").SetString("baz") + iElm.FieldByName("B").SetInt(15) + iElm.FieldByName("C").FieldByName("Z").SetInt(25) + reflect.Indirect(rInput).Set(rElm) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := &[]testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on arrays", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([2]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType).Elem() + iOffchain := rInput.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rInput.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := [2]testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on nested fields", func(t *testing.T) { + offChainType, err := nestedWrapper.RetypeToOffChain(reflect.TypeOf(nestedTestStruct{}), "") + require.NoError(t, err) + + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + rB := iOffchain.FieldByName("B") + assert.Equal(t, "struct { A struct { X string }; B int64; C struct { Z int64 } }", offChainType.Field(1).Type.String()) + + rB.FieldByName("A").FieldByName("X").SetString("foo") + rB.FieldByName("B").SetInt(10) + rB.FieldByName("C").FieldByName("Z").SetInt(20) + + rC := iOffchain.FieldByName("C") + rC.Set(reflect.MakeSlice(rC.Type(), 2, 2)) + iElm := rC.Index(0) + iElm.FieldByName("A").FieldByName("X").SetString("foo") + iElm.FieldByName("B").SetInt(10) + iElm.FieldByName("C").FieldByName("Z").SetInt(20) + iElm = rC.Index(1) + iElm.FieldByName("A").FieldByName("X").SetString("baz") + iElm.FieldByName("B").SetInt(15) + iElm.FieldByName("C").FieldByName("Z").SetInt(25) + + iOffchain.FieldByName("D").SetString("bar") + + output, err := nestedWrapper.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + expected := nestedTestStruct{ + A: "foo", + B: testStruct{ + A: "foo", + B: 10, + C: 20, + }, + C: []testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + }, + D: "bar", + } + assert.Equal(t, expected, output) + newInput, err := nestedWrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) +} + +func assertBasicWrapperTransform(t *testing.T, offChainType reflect.Type) { + require.Equal(t, 3, offChainType.NumField()) + + f0 := offChainType.Field(0).Type.Field(0) + assert.Equal(t, wrapType(f0.Name, f0.Type).String(), offChainType.Field(0).Type.String()) + + f1 := offChainType.Field(1) + assert.Equal(t, reflect.TypeOf(int64(0)), f1.Type) + + f2 := offChainType.Field(2).Type.Field(0) + assert.Equal(t, wrapType(f2.Name, f2.Type).String(), offChainType.Field(2).Type.String()) +} + +func wrapType(name string, typ reflect.Type) reflect.Type { + wrapped := reflect.StructOf([]reflect.StructField{{ + Name: name, + Type: typ, + }}) + return wrapped +} diff --git a/pkg/config/validate.go b/pkg/config/validate.go index 5ba476a15..6fd9b09f9 100644 --- a/pkg/config/validate.go +++ b/pkg/config/validate.go @@ -78,7 +78,7 @@ func validate(v reflect.Value, checkInterface bool) (err error) { for iter.Next() { mk := iter.Key() mv := iter.Value() - if !v.CanInterface() { + if !mv.CanInterface() { continue } if mv.Kind() == reflect.Ptr && mv.IsNil() { @@ -92,7 +92,7 @@ func validate(v reflect.Value, checkInterface bool) (err error) { case reflect.Slice, reflect.Array: for i := 0; i < v.Len(); i++ { iv := v.Index(i) - if !v.CanInterface() { + if !iv.CanInterface() { continue } if iv.Kind() == reflect.Ptr && iv.IsNil() { diff --git a/pkg/custmsg/custom_message.go b/pkg/custmsg/custom_message.go new file mode 100644 index 000000000..da2595555 --- /dev/null +++ b/pkg/custmsg/custom_message.go @@ -0,0 +1,128 @@ +package custmsg + +import ( + "context" + "fmt" + + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/beholder" + "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb" +) + +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(context.Context, string) error + + // WithMapLabels sets the labels for the message to be emitted. Labels are cumulative. + WithMapLabels(map[string]string) MessageEmitter + + // With adds multiple key-value pairs to the emission. + With(keyValues ...string) MessageEmitter + + // Labels returns a view of the current labels. + Labels() map[string]string +} + +type Labeler struct { + labels map[string]string +} + +func NewLabeler() Labeler { + return Labeler{labels: make(map[string]string)} +} + +// WithMapLabels adds multiple key-value pairs to the CustomMessageLabeler for transmission +// With SendLogAsCustomMessage +func (l Labeler) WithMapLabels(labels map[string]string) MessageEmitter { + newCustomMessageLabeler := NewLabeler() + + // Copy existing labels from the current agent + for k, v := range l.labels { + newCustomMessageLabeler.labels[k] = v + } + + // Add new key-value pairs + for k, v := range labels { + newCustomMessageLabeler.labels[k] = v + } + + return newCustomMessageLabeler +} + +// With adds multiple key-value pairs to the CustomMessageLabeler for transmission With SendLogAsCustomMessage +func (l Labeler) With(keyValues ...string) MessageEmitter { + newCustomMessageLabeler := NewLabeler() + + if len(keyValues)%2 != 0 { + // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged + return l + } + + // Copy existing labels from the current agent + for k, v := range l.labels { + newCustomMessageLabeler.labels[k] = v + } + + // Add new key-value pairs + for i := 0; i < len(keyValues); i += 2 { + key := keyValues[i] + value := keyValues[i+1] + newCustomMessageLabeler.labels[key] = value + } + + return newCustomMessageLabeler +} + +func (l Labeler) Emit(ctx context.Context, msg string) error { + return sendLogAsCustomMessageW(ctx, msg, l.labels) +} + +func (l Labeler) Labels() map[string]string { + copied := make(map[string]string, len(l.labels)) + for k, v := range l.labels { + copied[k] = v + } + return copied +} + +// SendLogAsCustomMessage emits a BaseMessage With msg and labels as data. +// any key in labels that is not part of orderedLabelKeys will not be transmitted +func (l Labeler) SendLogAsCustomMessage(ctx context.Context, msg string) error { + return sendLogAsCustomMessageW(ctx, msg, l.labels) +} + +func sendLogAsCustomMessageW(ctx context.Context, msg string, labels map[string]string) error { + // TODO un-comment after INFOPLAT-1386 + // cast to map[string]any + //newLabels := map[string]any{} + //for k, v := range labels { + // newLabels[k] = v + //} + + //m, err := values.NewMap(newLabels) + //if err != nil { + // return fmt.Errorf("could not wrap labels to map: %w", err) + //} + + // Define a custom protobuf payload to emit + payload := &pb.BaseMessage{ + Msg: msg, + Labels: labels, + } + payloadBytes, err := proto.Marshal(payload) + if err != nil { + return fmt.Errorf("sending custom message failed to marshal protobuf: %w", err) + } + + err = beholder.GetEmitter().Emit(ctx, payloadBytes, + "beholder_data_schema", "/beholder-base-message/versions/1", // required + "beholder_domain", "platform", // required + "beholder_entity", "BaseMessage", // required + ) + if err != nil { + return fmt.Errorf("sending custom message failed on emit: %w", err) + } + + return nil +} diff --git a/pkg/custmsg/custom_message_test.go b/pkg/custmsg/custom_message_test.go new file mode 100644 index 000000000..d6881f1c1 --- /dev/null +++ b/pkg/custmsg/custom_message_test.go @@ -0,0 +1,26 @@ +package custmsg + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// tests CustomMessageAgent does not share state across new instances created by `With` +func Test_CustomMessageAgent(t *testing.T) { + cma := NewLabeler() + cma1 := cma.With("key1", "value1") + cma2 := cma1.With("key2", "value2") + + assert.NotEqual(t, cma1.Labels(), cma2.Labels()) +} + +func Test_CustomMessageAgent_With(t *testing.T) { + cma := NewLabeler().With("key1", "value1") + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) +} + +func Test_CustomMessageAgent_WithMapLabels(t *testing.T) { + cma := NewLabeler().WithMapLabels(map[string]string{"key1": "value1"}) + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) +} diff --git a/pkg/loop/config.go b/pkg/loop/config.go index 7250c94b4..ea68cfa4a 100644 --- a/pkg/loop/config.go +++ b/pkg/loop/config.go @@ -6,6 +6,7 @@ import ( "os" "strconv" "strings" + "time" "github.com/hashicorp/go-plugin" ) @@ -20,12 +21,19 @@ const ( envTracingAttribute = "CL_TRACING_ATTRIBUTE_" envTracingTLSCertPath = "CL_TRACING_TLS_CERT_PATH" - envTelemetryEnabled = "CL_TELEMETRY_ENABLED" - envTelemetryEndpoint = "CL_TELEMETRY_ENDPOINT" - envTelemetryInsecureConn = "CL_TELEMETRY_INSECURE_CONNECTION" - envTelemetryCACertFile = "CL_TELEMETRY_CA_CERT_FILE" - envTelemetryAttribute = "CL_TELEMETRY_ATTRIBUTE_" - envTelemetryTraceSampleRatio = "CL_TELEMETRY_TRACE_SAMPLE_RATIO" + envTelemetryEnabled = "CL_TELEMETRY_ENABLED" + envTelemetryEndpoint = "CL_TELEMETRY_ENDPOINT" + envTelemetryInsecureConn = "CL_TELEMETRY_INSECURE_CONNECTION" + envTelemetryCACertFile = "CL_TELEMETRY_CA_CERT_FILE" + envTelemetryAttribute = "CL_TELEMETRY_ATTRIBUTE_" + envTelemetryTraceSampleRatio = "CL_TELEMETRY_TRACE_SAMPLE_RATIO" + envTelemetryAuthHeader = "CL_TELEMETRY_AUTH_HEADER" + envTelemetryAuthPubKeyHex = "CL_TELEMETRY_AUTH_PUB_KEY_HEX" + envTelemetryEmitterBatchProcessor = "CL_TELEMETRY_EMITTER_BATCH_PROCESSOR" + envTelemetryEmitterExportTimeout = "CL_TELEMETRY_EMITTER_EXPORT_TIMEOUT" + envTelemetryEmitterExportInterval = "CL_TELEMETRY_EMITTER_EXPORT_INTERVAL" + envTelemetryEmitterExportMaxBatchSize = "CL_TELEMETRY_EMITTER_EXPORT_MAX_BATCH_SIZE" + envTelemetryEmitterMaxQueueSize = "CL_TELEMETRY_EMITTER_MAX_QUEUE_SIZE" ) // EnvConfig is the configuration between the application and the LOOP executable. The values @@ -41,12 +49,19 @@ type EnvConfig struct { TracingTLSCertPath string TracingAttributes map[string]string - TelemetryEnabled bool - TelemetryEndpoint string - TelemetryInsecureConnection bool - TelemetryCACertFile string - TelemetryAttributes OtelAttributes - TelemetryTraceSampleRatio float64 + TelemetryEnabled bool + TelemetryEndpoint string + TelemetryInsecureConnection bool + TelemetryCACertFile string + TelemetryAttributes OtelAttributes + TelemetryTraceSampleRatio float64 + TelemetryAuthHeaders map[string]string + TelemetryAuthPubKeyHex string + TelemetryEmitterBatchProcessor bool + TelemetryEmitterExportTimeout time.Duration + TelemetryEmitterExportInterval time.Duration + TelemetryEmitterExportMaxBatchSize int + TelemetryEmitterMaxQueueSize int } // AsCmdEnv returns a slice of environment variable key/value pairs for an exec.Cmd. @@ -78,6 +93,15 @@ func (e *EnvConfig) AsCmdEnv() (env []string) { add(envTelemetryAttribute+k, v) } + for k, v := range e.TelemetryAuthHeaders { + add(envTelemetryAuthHeader+k, v) + } + add(envTelemetryAuthPubKeyHex, e.TelemetryAuthPubKeyHex) + add(envTelemetryEmitterBatchProcessor, strconv.FormatBool(e.TelemetryEmitterBatchProcessor)) + add(envTelemetryEmitterExportTimeout, e.TelemetryEmitterExportTimeout.String()) + add(envTelemetryEmitterExportInterval, e.TelemetryEmitterExportInterval.String()) + add(envTelemetryEmitterExportMaxBatchSize, strconv.Itoa(e.TelemetryEmitterExportMaxBatchSize)) + add(envTelemetryEmitterMaxQueueSize, strconv.Itoa(e.TelemetryEmitterMaxQueueSize)) return } @@ -87,7 +111,7 @@ func (e *EnvConfig) parse() error { var err error e.DatabaseURL, err = getDatabaseURL() if err != nil { - return fmt.Errorf("failed to parse %s: %q", envDatabaseURL, err) + return fmt.Errorf("failed to parse %s: %w", envDatabaseURL, err) } e.PrometheusPort, err = strconv.Atoi(promPortStr) @@ -105,7 +129,7 @@ func (e *EnvConfig) parse() error { if err != nil { return err } - e.TracingAttributes = getAttributes(envTracingAttribute) + e.TracingAttributes = getMap(envTracingAttribute) e.TracingSamplingRatio = getFloat64OrZero(envTracingSamplingRatio) e.TracingTLSCertPath = os.Getenv(envTracingTLSCertPath) } @@ -122,8 +146,30 @@ func (e *EnvConfig) parse() error { return fmt.Errorf("failed to parse %s: %w", envTelemetryEndpoint, err) } e.TelemetryCACertFile = os.Getenv(envTelemetryCACertFile) - e.TelemetryAttributes = getAttributes(envTelemetryAttribute) + e.TelemetryAttributes = getMap(envTelemetryAttribute) e.TelemetryTraceSampleRatio = getFloat64OrZero(envTelemetryTraceSampleRatio) + e.TelemetryAuthHeaders = getMap(envTelemetryAuthHeader) + e.TelemetryAuthPubKeyHex = os.Getenv(envTelemetryAuthPubKeyHex) + e.TelemetryEmitterBatchProcessor, err = getBool(envTelemetryEmitterBatchProcessor) + if err != nil { + return fmt.Errorf("failed to parse %s: %w", envTelemetryEmitterBatchProcessor, err) + } + e.TelemetryEmitterExportTimeout, err = time.ParseDuration(os.Getenv(envTelemetryEmitterExportTimeout)) + if err != nil { + return fmt.Errorf("failed to parse %s: %w", envTelemetryEmitterExportTimeout, err) + } + e.TelemetryEmitterExportInterval, err = time.ParseDuration(os.Getenv(envTelemetryEmitterExportInterval)) + if err != nil { + return fmt.Errorf("failed to parse %s: %w", envTelemetryEmitterExportInterval, err) + } + e.TelemetryEmitterExportMaxBatchSize, err = strconv.Atoi(os.Getenv(envTelemetryEmitterExportMaxBatchSize)) + if err != nil { + return fmt.Errorf("failed to parse %s: %w", envTelemetryEmitterExportMaxBatchSize, err) + } + e.TelemetryEmitterMaxQueueSize, err = strconv.Atoi(os.Getenv(envTelemetryEmitterMaxQueueSize)) + if err != nil { + return fmt.Errorf("failed to parse %s: %w", envTelemetryEmitterMaxQueueSize, err) + } } return nil } @@ -158,14 +204,18 @@ func getValidCollectorTarget() (string, error) { return tracingCollectorTarget, nil } -func getAttributes(envKeyPrefix string) map[string]string { - tracingAttributes := make(map[string]string) +func getMap(envKeyPrefix string) map[string]string { + m := make(map[string]string) for _, env := range os.Environ() { if strings.HasPrefix(env, envKeyPrefix) { - tracingAttributes[strings.TrimPrefix(env, envKeyPrefix)] = os.Getenv(env) + key, value, found := strings.Cut(env, "=") + if found { + key = strings.TrimPrefix(key, envKeyPrefix) + m[key] = value + } } } - return tracingAttributes + return m } // Any errors in parsing result in a sampling ratio of 0.0. diff --git a/pkg/loop/config_test.go b/pkg/loop/config_test.go index e0bcc1d5e..f57eff666 100644 --- a/pkg/loop/config_test.go +++ b/pkg/loop/config_test.go @@ -1,10 +1,13 @@ package loop import ( + "maps" "net/url" + "os" "strconv" "strings" "testing" + "time" "github.com/hashicorp/go-plugin" "github.com/stretchr/testify/assert" @@ -16,34 +19,74 @@ import ( func TestEnvConfig_parse(t *testing.T) { cases := []struct { - name string - envVars map[string]string - expectError bool - expectedDatabaseURL string - expectedPrometheusPort int - expectedTracingEnabled bool - expectedTracingCollectorTarget string - expectedTracingSamplingRatio float64 - expectedTracingTLSCertPath string + name string + envVars map[string]string + expectError bool + expectedDatabaseURL string + expectedPrometheusPort int + expectedTracingEnabled bool + expectedTracingCollectorTarget string + expectedTracingSamplingRatio float64 + expectedTracingTLSCertPath string + expectedTelemetryEnabled bool + expectedTelemetryEndpoint string + expectedTelemetryInsecureConn bool + expectedTelemetryCACertFile string + expectedTelemetryAttributes OtelAttributes + expectedTelemetryTraceSampleRatio float64 + expectedTelemetryAuthHeaders map[string]string + expectedTelemetryAuthPubKeyHex string + expectedTelemetryEmitterBatchProcessor bool + expectedTelemetryEmitterExportTimeout time.Duration + expectedTelemetryEmitterExportInterval time.Duration + expectedTelemetryEmitterExportMaxBatchSize int + expectedTelemetryEmitterMaxQueueSize int }{ { name: "All variables set correctly", envVars: map[string]string{ - envDatabaseURL: "postgres://user:password@localhost:5432/db", - envPromPort: "8080", - envTracingEnabled: "true", - envTracingCollectorTarget: "some:target", - envTracingSamplingRatio: "1.0", - envTracingTLSCertPath: "internal/test/fixtures/client.pem", - envTracingAttribute + "XYZ": "value", + envDatabaseURL: "postgres://user:password@localhost:5432/db", + envPromPort: "8080", + envTracingEnabled: "true", + envTracingCollectorTarget: "some:target", + envTracingSamplingRatio: "1.0", + envTracingTLSCertPath: "internal/test/fixtures/client.pem", + envTracingAttribute + "XYZ": "value", + envTelemetryEnabled: "true", + envTelemetryEndpoint: "example.com/beholder", + envTelemetryInsecureConn: "true", + envTelemetryCACertFile: "foo/bar", + envTelemetryAttribute + "foo": "bar", + envTelemetryAttribute + "baz": "42", + envTelemetryTraceSampleRatio: "0.42", + envTelemetryAuthHeader + "header-key": "header-value", + envTelemetryAuthPubKeyHex: "pub-key-hex", + envTelemetryEmitterBatchProcessor: "true", + envTelemetryEmitterExportTimeout: "1s", + envTelemetryEmitterExportInterval: "2s", + envTelemetryEmitterExportMaxBatchSize: "100", + envTelemetryEmitterMaxQueueSize: "1000", }, - expectError: false, - expectedDatabaseURL: "postgres://user:password@localhost:5432/db", - expectedPrometheusPort: 8080, - expectedTracingEnabled: true, - expectedTracingCollectorTarget: "some:target", - expectedTracingSamplingRatio: 1.0, - expectedTracingTLSCertPath: "internal/test/fixtures/client.pem", + expectError: false, + expectedDatabaseURL: "postgres://user:password@localhost:5432/db", + expectedPrometheusPort: 8080, + expectedTracingEnabled: true, + expectedTracingCollectorTarget: "some:target", + expectedTracingSamplingRatio: 1.0, + expectedTracingTLSCertPath: "internal/test/fixtures/client.pem", + expectedTelemetryEnabled: true, + expectedTelemetryEndpoint: "example.com/beholder", + expectedTelemetryInsecureConn: true, + expectedTelemetryCACertFile: "foo/bar", + expectedTelemetryAttributes: OtelAttributes{"foo": "bar", "baz": "42"}, + expectedTelemetryTraceSampleRatio: 0.42, + expectedTelemetryAuthHeaders: map[string]string{"header-key": "header-value"}, + expectedTelemetryAuthPubKeyHex: "pub-key-hex", + expectedTelemetryEmitterBatchProcessor: true, + expectedTelemetryEmitterExportTimeout: 1 * time.Second, + expectedTelemetryEmitterExportInterval: 2 * time.Second, + expectedTelemetryEmitterExportMaxBatchSize: 100, + expectedTelemetryEmitterMaxQueueSize: 1000, }, { name: "CL_DATABASE_URL parse error", @@ -104,12 +147,75 @@ func TestEnvConfig_parse(t *testing.T) { if config.TracingTLSCertPath != tc.expectedTracingTLSCertPath { t.Errorf("Expected tracingTLSCertPath %s, got %s", tc.expectedTracingTLSCertPath, config.TracingTLSCertPath) } + if config.TelemetryEnabled != tc.expectedTelemetryEnabled { + t.Errorf("Expected telemetryEnabled %v, got %v", tc.expectedTelemetryEnabled, config.TelemetryEnabled) + } + if config.TelemetryEndpoint != tc.expectedTelemetryEndpoint { + t.Errorf("Expected telemetryEndpoint %s, got %s", tc.expectedTelemetryEndpoint, config.TelemetryEndpoint) + } + if config.TelemetryInsecureConnection != tc.expectedTelemetryInsecureConn { + t.Errorf("Expected telemetryInsecureConn %v, got %v", tc.expectedTelemetryInsecureConn, config.TelemetryInsecureConnection) + } + if config.TelemetryCACertFile != tc.expectedTelemetryCACertFile { + t.Errorf("Expected telemetryCACertFile %s, got %s", tc.expectedTelemetryCACertFile, config.TelemetryCACertFile) + } + if !maps.Equal(config.TelemetryAttributes, tc.expectedTelemetryAttributes) { + t.Errorf("Expected telemetryAttributes %v, got %v", tc.expectedTelemetryAttributes, config.TelemetryAttributes) + } + if config.TelemetryTraceSampleRatio != tc.expectedTelemetryTraceSampleRatio { + t.Errorf("Expected telemetryTraceSampleRatio %f, got %f", tc.expectedTelemetryTraceSampleRatio, config.TelemetryTraceSampleRatio) + } + if !maps.Equal(config.TelemetryAuthHeaders, tc.expectedTelemetryAuthHeaders) { + t.Errorf("Expected telemetryAuthHeaders %v, got %v", tc.expectedTelemetryAuthHeaders, config.TelemetryAuthHeaders) + } + if config.TelemetryAuthPubKeyHex != tc.expectedTelemetryAuthPubKeyHex { + t.Errorf("Expected telemetryAuthPubKeyHex %s, got %s", tc.expectedTelemetryAuthPubKeyHex, config.TelemetryAuthPubKeyHex) + } + if config.TelemetryEmitterBatchProcessor != tc.expectedTelemetryEmitterBatchProcessor { + t.Errorf("Expected telemetryEmitterBatchProcessor %v, got %v", tc.expectedTelemetryEmitterBatchProcessor, config.TelemetryEmitterBatchProcessor) + } + if config.TelemetryEmitterExportTimeout != tc.expectedTelemetryEmitterExportTimeout { + t.Errorf("Expected telemetryEmitterExportTimeout %v, got %v", tc.expectedTelemetryEmitterExportTimeout, config.TelemetryEmitterExportTimeout) + } + if config.TelemetryEmitterExportInterval != tc.expectedTelemetryEmitterExportInterval { + t.Errorf("Expected telemetryEmitterExportInterval %v, got %v", tc.expectedTelemetryEmitterExportInterval, config.TelemetryEmitterExportInterval) + } + if config.TelemetryEmitterExportMaxBatchSize != tc.expectedTelemetryEmitterExportMaxBatchSize { + t.Errorf("Expected telemetryEmitterExportMaxBatchSize %d, got %d", tc.expectedTelemetryEmitterExportMaxBatchSize, config.TelemetryEmitterExportMaxBatchSize) + } + if config.TelemetryEmitterMaxQueueSize != tc.expectedTelemetryEmitterMaxQueueSize { + t.Errorf("Expected telemetryEmitterMaxQueueSize %d, got %d", tc.expectedTelemetryEmitterMaxQueueSize, config.TelemetryEmitterMaxQueueSize) + } } } }) } } +func equalOtelAttributes(a, b OtelAttributes) bool { + if len(a) != len(b) { + return false + } + for k, v := range a { + if b[k] != v { + return false + } + } + return true +} + +func equalStringMaps(a, b map[string]string) bool { + if len(a) != len(b) { + return false + } + for k, v := range a { + if b[k] != v { + return false + } + } + return true +} + func TestEnvConfig_AsCmdEnv(t *testing.T) { envCfg := EnvConfig{ DatabaseURL: &url.URL{Scheme: "postgres", Host: "localhost:5432", User: url.UserPassword("user", "password"), Path: "/db"}, @@ -121,12 +227,19 @@ func TestEnvConfig_AsCmdEnv(t *testing.T) { TracingTLSCertPath: "some/path", TracingAttributes: map[string]string{"key": "value"}, - TelemetryEnabled: true, - TelemetryEndpoint: "example.com/beholder", - TelemetryInsecureConnection: true, - TelemetryCACertFile: "foo/bar", - TelemetryAttributes: OtelAttributes{"foo": "bar", "baz": "42"}, - TelemetryTraceSampleRatio: 0.42, + TelemetryEnabled: true, + TelemetryEndpoint: "example.com/beholder", + TelemetryInsecureConnection: true, + TelemetryCACertFile: "foo/bar", + TelemetryAttributes: OtelAttributes{"foo": "bar", "baz": "42"}, + TelemetryTraceSampleRatio: 0.42, + TelemetryAuthHeaders: map[string]string{"header-key": "header-value"}, + TelemetryAuthPubKeyHex: "pub-key-hex", + TelemetryEmitterBatchProcessor: true, + TelemetryEmitterExportTimeout: 1 * time.Second, + TelemetryEmitterExportInterval: 2 * time.Second, + TelemetryEmitterExportMaxBatchSize: 100, + TelemetryEmitterMaxQueueSize: 1000, } got := map[string]string{} for _, kv := range envCfg.AsCmdEnv() { @@ -151,6 +264,42 @@ func TestEnvConfig_AsCmdEnv(t *testing.T) { assert.Equal(t, "0.42", got[envTelemetryTraceSampleRatio]) assert.Equal(t, "bar", got[envTelemetryAttribute+"foo"]) assert.Equal(t, "42", got[envTelemetryAttribute+"baz"]) + assert.Equal(t, "header-value", got[envTelemetryAuthHeader+"header-key"]) + assert.Equal(t, "pub-key-hex", got[envTelemetryAuthPubKeyHex]) + assert.Equal(t, "true", got[envTelemetryEmitterBatchProcessor]) + assert.Equal(t, "1s", got[envTelemetryEmitterExportTimeout]) + assert.Equal(t, "2s", got[envTelemetryEmitterExportInterval]) + assert.Equal(t, "100", got[envTelemetryEmitterExportMaxBatchSize]) + assert.Equal(t, "1000", got[envTelemetryEmitterMaxQueueSize]) +} + +func TestGetMap(t *testing.T) { + os.Setenv("TEST_PREFIX_KEY1", "value1") + os.Setenv("TEST_PREFIX_KEY2", "value2") + os.Setenv("OTHER_KEY", "othervalue") + + defer func() { + os.Unsetenv("TEST_PREFIX_KEY1") + os.Unsetenv("TEST_PREFIX_KEY2") + os.Unsetenv("OTHER_KEY") + }() + + result := getMap("TEST_PREFIX_") + + expected := map[string]string{ + "KEY1": "value1", + "KEY2": "value2", + } + + if len(result) != len(expected) { + t.Errorf("Expected map length %d, got %d", len(expected), len(result)) + } + + for k, v := range expected { + if result[k] != v { + t.Errorf("Expected key %s to have value %s, but got %s", k, v, result[k]) + } + } } func TestManagedGRPCClientConfig(t *testing.T) { diff --git a/pkg/loop/internal/core/services/capability/capabilities.go b/pkg/loop/internal/core/services/capability/capabilities.go index 095429a8e..f1e55c576 100644 --- a/pkg/loop/internal/core/services/capability/capabilities.go +++ b/pkg/loop/internal/core/services/capability/capabilities.go @@ -309,7 +309,8 @@ func (c *executableServer) RegisterToWorkflow(ctx context.Context, req *capabili err = c.impl.RegisterToWorkflow(ctx, capabilities.RegisterToWorkflowRequest{ Metadata: capabilities.RegistrationMetadata{ - WorkflowID: req.Metadata.WorkflowId, + WorkflowID: req.Metadata.WorkflowId, + ReferenceID: req.Metadata.ReferenceId, }, Config: config, }) @@ -324,7 +325,8 @@ func (c *executableServer) UnregisterFromWorkflow(ctx context.Context, req *capa err = c.impl.UnregisterFromWorkflow(ctx, capabilities.UnregisterFromWorkflowRequest{ Metadata: capabilities.RegistrationMetadata{ - WorkflowID: req.Metadata.WorkflowId, + WorkflowID: req.Metadata.WorkflowId, + ReferenceID: req.Metadata.ReferenceId, }, Config: config, }) @@ -398,7 +400,8 @@ func (c *executableClient) UnregisterFromWorkflow(ctx context.Context, req capab r := &capabilitiespb.UnregisterFromWorkflowRequest{ Config: values.ProtoMap(config), Metadata: &capabilitiespb.RegistrationMetadata{ - WorkflowId: req.Metadata.WorkflowID, + WorkflowId: req.Metadata.WorkflowID, + ReferenceId: req.Metadata.ReferenceID, }, } @@ -415,7 +418,8 @@ func (c *executableClient) RegisterToWorkflow(ctx context.Context, req capabilit r := &capabilitiespb.RegisterToWorkflowRequest{ Config: values.ProtoMap(config), Metadata: &capabilitiespb.RegistrationMetadata{ - WorkflowId: req.Metadata.WorkflowID, + WorkflowId: req.Metadata.WorkflowID, + ReferenceId: req.Metadata.ReferenceID, }, } diff --git a/pkg/loop/internal/core/services/capability/capabilities_registry.go b/pkg/loop/internal/core/services/capability/capabilities_registry.go index b6436bc42..88e81809b 100644 --- a/pkg/loop/internal/core/services/capability/capabilities_registry.go +++ b/pkg/loop/internal/core/services/capability/capabilities_registry.go @@ -94,6 +94,7 @@ func (cr *capabilitiesRegistryClient) ConfigForCapability(ctx context.Context, c var remoteTriggerConfig *capabilities.RemoteTriggerConfig var remoteTargetConfig *capabilities.RemoteTargetConfig + var remoteExecutableConfig *capabilities.RemoteExecutableConfig switch res.CapabilityConfig.RemoteConfig.(type) { case *capabilitiespb.CapabilityConfig_RemoteTriggerConfig: @@ -110,12 +111,19 @@ func (cr *capabilitiesRegistryClient) ConfigForCapability(ctx context.Context, c prtc := res.CapabilityConfig.GetRemoteTargetConfig() remoteTargetConfig = &capabilities.RemoteTargetConfig{} remoteTargetConfig.RequestHashExcludedAttributes = prtc.RequestHashExcludedAttributes + case *capabilitiespb.CapabilityConfig_RemoteExecutableConfig: + prtc := res.CapabilityConfig.GetRemoteExecutableConfig() + remoteExecutableConfig = &capabilities.RemoteExecutableConfig{} + remoteExecutableConfig.RequestHashExcludedAttributes = prtc.RequestHashExcludedAttributes + remoteExecutableConfig.RegistrationRefresh = prtc.RegistrationRefresh.AsDuration() + remoteExecutableConfig.RegistrationExpiry = prtc.RegistrationExpiry.AsDuration() } return capabilities.CapabilityConfiguration{ - DefaultConfig: mc, - RemoteTriggerConfig: remoteTriggerConfig, - RemoteTargetConfig: remoteTargetConfig, + DefaultConfig: mc, + RemoteTriggerConfig: remoteTriggerConfig, + RemoteTargetConfig: remoteTargetConfig, + RemoteExecutableConfig: remoteExecutableConfig, }, nil } @@ -258,6 +266,19 @@ func (cr *capabilitiesRegistryClient) Add(ctx context.Context, c capabilities.Ba return nil } +func (cr *capabilitiesRegistryClient) Remove(ctx context.Context, ID string) error { + req := &pb.RemoveRequest{ + Id: ID, + } + + _, err := cr.grpc.Remove(ctx, req) + if err != nil { + return err + } + + return nil +} + func NewCapabilitiesRegistryClient(cc grpc.ClientConnInterface, b *net.BrokerExt) *capabilitiesRegistryClient { return &capabilitiesRegistryClient{grpc: pb.NewCapabilitiesRegistryClient(cc), BrokerExt: b.WithName("CapabilitiesRegistryClient")} } @@ -333,6 +354,16 @@ func (c *capabilitiesRegistryServer) ConfigForCapability(ctx context.Context, re } } + if cc.RemoteExecutableConfig != nil { + ccp.RemoteConfig = &capabilitiespb.CapabilityConfig_RemoteExecutableConfig{ + RemoteExecutableConfig: &capabilitiespb.RemoteExecutableConfig{ + RequestHashExcludedAttributes: cc.RemoteExecutableConfig.RequestHashExcludedAttributes, + RegistrationRefresh: durationpb.New(cc.RemoteExecutableConfig.RegistrationRefresh), + RegistrationExpiry: durationpb.New(cc.RemoteExecutableConfig.RegistrationExpiry), + }, + } + } + return &pb.ConfigForCapabilityReply{ CapabilityConfig: ccp, }, nil @@ -520,6 +551,14 @@ func (c *capabilitiesRegistryServer) Add(ctx context.Context, request *pb.AddReq return &emptypb.Empty{}, nil } +func (c *capabilitiesRegistryServer) Remove(ctx context.Context, request *pb.RemoveRequest) (*emptypb.Empty, error) { + err := c.impl.Remove(ctx, request.Id) + if err != nil { + return &emptypb.Empty{}, err + } + return &emptypb.Empty{}, nil +} + func NewCapabilitiesRegistryServer(b *net.BrokerExt, i core.CapabilitiesRegistry) *capabilitiesRegistryServer { return &capabilitiesRegistryServer{ BrokerExt: b.WithName("CapabilitiesRegistryServer"), diff --git a/pkg/loop/internal/core/services/capability/capabilities_registry_test.go b/pkg/loop/internal/core/services/capability/capabilities_registry_test.go index 620ef5b08..1adc87c6b 100644 --- a/pkg/loop/internal/core/services/capability/capabilities_registry_test.go +++ b/pkg/loop/internal/core/services/capability/capabilities_registry_test.go @@ -4,6 +4,7 @@ import ( "context" "errors" "testing" + "time" "github.com/hashicorp/go-plugin" "github.com/stretchr/testify/assert" @@ -435,3 +436,54 @@ func TestCapabilitiesRegistry_ConfigForCapabilities(t *testing.T) { require.NoError(t, err) assert.Equal(t, expectedCapConfig, capConf) } + +func TestCapabilitiesRegistry_ConfigForCapability_RemoteExecutableConfig(t *testing.T) { + stopCh := make(chan struct{}) + logger := logger.Test(t) + reg := mocks.NewCapabilitiesRegistry(t) + + pluginName := "registry-test" + client, server := plugin.TestPluginGRPCConn( + t, + true, + map[string]plugin.Plugin{ + pluginName: &testRegistryPlugin{ + impl: reg, + brokerExt: &net.BrokerExt{ + BrokerConfig: net.BrokerConfig{ + StopCh: stopCh, + Logger: logger, + }, + }, + }, + }, + ) + + defer client.Close() + defer server.Stop() + + regClient, err := client.Dispense(pluginName) + require.NoError(t, err) + + rc, ok := regClient.(*capabilitiesRegistryClient) + require.True(t, ok) + + capID := "some-cap@1.0.0" + donID := uint32(1) + wm, err := values.WrapMap(map[string]any{"hello": "world"}) + require.NoError(t, err) + + var rec capabilities.RemoteExecutableConfig + rec.ApplyDefaults() + expectedCapConfig := capabilities.CapabilityConfiguration{ + DefaultConfig: wm, + RemoteExecutableConfig: &rec, + } + reg.On("ConfigForCapability", mock.Anything, capID, donID).Once().Return(expectedCapConfig, nil) + + capConf, err := rc.ConfigForCapability(tests.Context(t), capID, donID) + require.NoError(t, err) + assert.Equal(t, expectedCapConfig, capConf) + assert.Equal(t, 30*time.Second, capConf.RemoteExecutableConfig.RegistrationRefresh) + assert.Equal(t, 2*time.Minute, capConf.RemoteExecutableConfig.RegistrationExpiry) +} diff --git a/pkg/loop/internal/goplugin/plugin_service.go b/pkg/loop/internal/goplugin/plugin_service.go index bf02298e8..5ce50e65d 100644 --- a/pkg/loop/internal/goplugin/plugin_service.go +++ b/pkg/loop/internal/goplugin/plugin_service.go @@ -118,6 +118,7 @@ func (s *PluginService[P, S]) launch() (*plugin.Client, plugin.ClientProtocol, e s.lggr.Debug("Launching") cc := s.grpcPlug.ClientConfig() + cc.SkipHostEnv = true cc.Cmd = s.cmd() client := plugin.NewClient(cc) cp, err := client.Client() diff --git a/pkg/loop/internal/keystore/keystore.go b/pkg/loop/internal/keystore/keystore.go new file mode 100644 index 000000000..903b3dd5a --- /dev/null +++ b/pkg/loop/internal/keystore/keystore.go @@ -0,0 +1,306 @@ +package keystore + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/goplugin" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" + keystorepb "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/services" + "github.com/smartcontractkit/chainlink-common/pkg/types/keystore" +) + +var _ keystore.Keystore = (*Client)(nil) + +type Client struct { + services.Service + *goplugin.PluginClient + + grpc keystorepb.KeystoreClient +} + +func NewKeystoreClient(broker net.Broker, brokerCfg net.BrokerConfig, conn *grpc.ClientConn) *Client { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreClient") + pc := goplugin.NewPluginClient(broker, brokerCfg, conn) + return &Client{PluginClient: pc, grpc: keystorepb.NewKeystoreClient(pc)} +} + +func (c *Client) Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.Sign(ctx, &keystorepb.SignRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) { + reply, err := c.grpc.SignBatch(ctx, &keystorepb.SignBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) { + reply, err := c.grpc.Verify(ctx, &keystorepb.VerifyRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return false, err + } + return reply.Valid, nil +} + +func (c *Client) VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) { + reply, err := c.grpc.VerifyBatch(ctx, &keystorepb.VerifyBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Valid, nil +} + +func (c *Client) ListKeys(ctx context.Context, tags []string) ([][]byte, error) { + reply, err := c.grpc.ListKeys(ctx, &keystorepb.ListKeysRequest{ + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyIDs, nil +} + +func (c *Client) RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.RunUDF(ctx, &keystorepb.RunUDFRequest{ + Name: name, + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) { + reply, err := c.grpc.ImportKey(ctx, &keystorepb.ImportKeyRequest{ + KeyType: keyType, + Data: data, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) ExportKey(ctx context.Context, keyID []byte) ([]byte, error) { + reply, err := c.grpc.ExportKey(ctx, &keystorepb.ExportKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) { + reply, err := c.grpc.CreateKey(ctx, &keystorepb.CreateKeyRequest{ + KeyType: keyType, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) DeleteKey(ctx context.Context, keyID []byte) error { + _, err := c.grpc.DeleteKey(ctx, &keystorepb.DeleteKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) AddTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.AddTag(ctx, &keystorepb.AddTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) RemoveTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.RemoveTag(ctx, &keystorepb.RemoveTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) ListTags(ctx context.Context, keyID []byte) ([]string, error) { + reply, err := c.grpc.ListTags(ctx, &keystorepb.ListTagsRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Tags, nil +} + +var _ keystorepb.KeystoreServer = (*server)(nil) + +type server struct { + *net.BrokerExt + keystorepb.UnimplementedKeystoreServer + + impl GRPCService +} + +func RegisterKeystoreServer(server *grpc.Server, broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) error { + keystorepb.RegisterKeystoreServer(server, newKeystoreServer(broker, brokerCfg, impl)) + return nil +} + +func newKeystoreServer(broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) *server { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreServer") + return &server{BrokerExt: &net.BrokerExt{Broker: broker, BrokerConfig: brokerCfg}, impl: impl} +} + +func (s *server) Sign(ctx context.Context, request *keystorepb.SignRequest) (*keystorepb.SignResponse, error) { + data, err := s.impl.Sign(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignResponse{Data: data}, err +} + +func (s *server) SignBatch(ctx context.Context, request *keystorepb.SignBatchRequest) (*keystorepb.SignBatchResponse, error) { + data, err := s.impl.SignBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignBatchResponse{Data: data}, err +} + +func (s *server) Verify(ctx context.Context, request *keystorepb.VerifyRequest) (*keystorepb.VerifyResponse, error) { + valid, err := s.impl.Verify(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyResponse{Valid: valid}, err +} + +func (s *server) VerifyBatch(ctx context.Context, request *keystorepb.VerifyBatchRequest) (*keystorepb.VerifyBatchResponse, error) { + valid, err := s.impl.VerifyBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyBatchResponse{Valid: valid}, err +} + +func (s *server) ListKeys(ctx context.Context, request *keystorepb.ListKeysRequest) (*keystorepb.ListKeysResponse, error) { + keyIDs, err := s.impl.ListKeys(ctx, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ListKeysResponse{KeyIDs: keyIDs}, err +} + +func (s *server) RunUDF(ctx context.Context, request *keystorepb.RunUDFRequest) (*keystorepb.RunUDFResponse, error) { + data, err := s.impl.RunUDF(ctx, request.Name, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.RunUDFResponse{Data: data}, err +} + +func (s *server) ImportKey(ctx context.Context, request *keystorepb.ImportKeyRequest) (*keystorepb.ImportKeyResponse, error) { + keyIDs, err := s.impl.ImportKey(ctx, request.KeyType, request.Data, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ImportKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) ExportKey(ctx context.Context, request *keystorepb.ExportKeyRequest) (*keystorepb.ExportKeyResponse, error) { + data, err := s.impl.ExportKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ExportKeyResponse{Data: data}, err +} + +func (s *server) CreateKey(ctx context.Context, request *keystorepb.CreateKeyRequest) (*keystorepb.CreateKeyResponse, error) { + keyIDs, err := s.impl.CreateKey(ctx, request.KeyType, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.CreateKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) DeleteKey(ctx context.Context, request *keystorepb.DeleteKeyRequest) (*keystorepb.DeleteKeyResponse, error) { + err := s.impl.DeleteKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.DeleteKeyResponse{}, err +} + +func (s *server) AddTag(ctx context.Context, request *keystorepb.AddTagRequest) (*keystorepb.AddTagResponse, error) { + err := s.impl.AddTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.AddTagResponse{}, err +} + +func (s *server) RemoveTag(ctx context.Context, request *keystorepb.RemoveTagRequest) (*keystorepb.RemoveTagResponse, error) { + err := s.impl.RemoveTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.RemoveTagResponse{}, err +} + +func (s *server) ListTags(ctx context.Context, request *keystorepb.ListTagsRequest) (*keystorepb.ListTagsResponse, error) { + tags, err := s.impl.ListTags(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ListTagsResponse{Tags: tags}, nil +} diff --git a/pkg/loop/internal/keystore/keystore_test.go b/pkg/loop/internal/keystore/keystore_test.go new file mode 100644 index 000000000..391d82168 --- /dev/null +++ b/pkg/loop/internal/keystore/keystore_test.go @@ -0,0 +1,255 @@ +package keystore + +import ( + "bytes" + "context" + "errors" + "fmt" + "reflect" + "testing" + + "github.com/hashicorp/go-plugin" + "github.com/stretchr/testify/require" + "google.golang.org/grpc" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" + "github.com/smartcontractkit/chainlink-common/pkg/services" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" +) + +func TestKeystore(t *testing.T) { + ctx := tests.Context(t) + stopCh := make(chan struct{}) + log := logger.Test(t) + + pluginName := "keystore-test" + client, server := plugin.TestPluginGRPCConn( + t, + true, + map[string]plugin.Plugin{ + pluginName: &testKeystorePlugin{ + log: log, + impl: &testKeystore{}, + brokerExt: &net.BrokerExt{ + BrokerConfig: net.BrokerConfig{ + StopCh: stopCh, + Logger: log, + }, + }, + }, + }, + ) + + defer client.Close() + defer server.Stop() + + keystoreClient, err := client.Dispense(pluginName) + require.NoError(t, err) + + ks, ok := keystoreClient.(*Client) + require.True(t, ok) + + r, err := ks.Sign(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r, sign) + + r2, err := ks.SignBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r2, signBatch) + + r3, err := ks.Verify(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r3, verify) + + r4, err := ks.VerifyBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r4, verifyBatch) + + r5, err := ks.ListKeys(ctx, tags) + require.NoError(t, err) + require.Equal(t, r5, list) + + r6, err := ks.RunUDF(ctx, udfName, keyID, data) + require.NoError(t, err) + require.Equal(t, r6, runUDF) + + r7, err := ks.ImportKey(ctx, keyType, data, tags) + require.NoError(t, err) + require.Equal(t, r7, importResponse) + + r8, err := ks.ExportKey(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r8, export) + + r9, err := ks.CreateKey(ctx, keyType, tags) + require.NoError(t, err) + require.Equal(t, r9, create) + + err = ks.DeleteKey(ctx, keyID) + require.ErrorContains(t, err, errDelete.Error()) + + err = ks.AddTag(ctx, keyID, tag) + require.ErrorContains(t, err, errAddTag.Error()) + + err = ks.RemoveTag(ctx, keyID, tag) + require.ErrorContains(t, err, errRemoveTag.Error()) + + r10, err := ks.ListTags(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r10, listTag) +} + +var ( + //Inputs + keyID = []byte("this-is-a-keyID") + data = []byte("some-data") + dataList = [][]byte{[]byte("some-data-in-a-list"), []byte("some-more-data-in-a-list")} + tags = []string{"tag1", "tag2"} + tag = "just-one-tag" + udfName = "i-am-a-udf-method-name" + keyType = "some-keyType" + + //Outputs + sign = []byte("signed") + signBatch = [][]byte{[]byte("signed1"), []byte("signed2")} + verify = true + verifyBatch = []bool{true, false} + list = [][]byte{[]byte("item1"), []byte("item2")} + runUDF = []byte("udf-response") + importResponse = []byte("imported") + export = []byte("exported") + create = []byte("created") + listTag = []string{"tag1", "tag2"} + errDelete = errors.New("delete-err") + errAddTag = errors.New("add-tag-err") + errRemoveTag = errors.New("remove-tag-err") +) + +type testKeystorePlugin struct { + log logger.Logger + plugin.NetRPCUnsupportedPlugin + brokerExt *net.BrokerExt + impl GRPCService +} + +func (r *testKeystorePlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, client *grpc.ClientConn) (any, error) { + r.brokerExt.Broker = broker + + return NewKeystoreClient(r.brokerExt.Broker, r.brokerExt.BrokerConfig, client), nil +} + +func (r *testKeystorePlugin) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + r.brokerExt.Broker = broker + + err := RegisterKeystoreServer(server, r.brokerExt.Broker, r.brokerExt.BrokerConfig, r.impl) + if err != nil { + return err + } + return nil +} + +type testKeystore struct { + services.Service +} + +func checkKeyID(target []byte) error { + if !bytes.Equal(target, keyID) { + return fmt.Errorf("checkKeyID: expected %v but got %v", keyID, target) + } + return nil +} + +func checkData(target []byte) error { + if !bytes.Equal(target, data) { + return fmt.Errorf("checkData: expected %v but got %v", data, target) + } + return nil +} + +func checkDataList(target [][]byte) error { + if !reflect.DeepEqual(target, dataList) { + return fmt.Errorf("checkDataList: nexpected %v but got %v", data, target) + } + return nil +} + +func checkTags(target []string) error { + if !reflect.DeepEqual(target, tags) { + return fmt.Errorf("checkTags: expected %v but got %v", tags, target) + } + return nil +} + +func checkUdfName(target string) error { + if target != udfName { + return fmt.Errorf("checkUdfName: expected %v but got %v", udfName, target) + } + return nil +} + +func checkKeyType(target string) error { + if target != keyType { + return fmt.Errorf("checkKeyType: expected %q but got %q", keyType, target) + } + return nil +} + +func checkTag(target string) error { + if target != tag { + return fmt.Errorf("checkTag: expected %q but got %q", tag, target) + } + return nil +} + +func (t testKeystore) Sign(ctx context.Context, _keyID []byte, _data []byte) ([]byte, error) { + return sign, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) SignBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([][]byte, error) { + return signBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) Verify(ctx context.Context, _keyID []byte, _data []byte) (bool, error) { + return verify, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) VerifyBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([]bool, error) { + return verifyBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) ListKeys(ctx context.Context, _tags []string) ([][]byte, error) { + return list, checkTags(_tags) +} + +func (t testKeystore) RunUDF(ctx context.Context, _udfName string, _keyID []byte, _data []byte) ([]byte, error) { + return runUDF, errors.Join(checkUdfName(_udfName), checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) ImportKey(ctx context.Context, _keyType string, _data []byte, _tags []string) ([]byte, error) { + return importResponse, errors.Join(checkKeyType(_keyType), checkData(_data), checkTags(_tags)) +} + +func (t testKeystore) ExportKey(ctx context.Context, _keyID []byte) ([]byte, error) { + return export, checkKeyID(_keyID) +} + +func (t testKeystore) CreateKey(ctx context.Context, _keyType string, _tags []string) ([]byte, error) { + return create, errors.Join(checkKeyType(_keyType), checkTags(_tags)) +} + +func (t testKeystore) DeleteKey(ctx context.Context, _keyID []byte) error { + return errors.Join(errDelete, checkKeyID(_keyID)) +} + +func (t testKeystore) AddTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errAddTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) RemoveTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errRemoveTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) ListTags(ctx context.Context, _keyID []byte) ([]string, error) { + return listTag, checkKeyID(_keyID) +} diff --git a/pkg/loop/internal/keystore/types.go b/pkg/loop/internal/keystore/types.go new file mode 100644 index 000000000..145b1b28e --- /dev/null +++ b/pkg/loop/internal/keystore/types.go @@ -0,0 +1,30 @@ +package keystore + +import ( + "context" + + "github.com/smartcontractkit/chainlink-common/pkg/services" +) + +// GRPCService This interface contains all the functionalities of the GRPC layer of the LOOPP keystore +type GRPCService interface { + services.Service + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/loop/internal/net/client.go b/pkg/loop/internal/net/client.go index 0c96eafd4..fbe5978f6 100644 --- a/pkg/loop/internal/net/client.go +++ b/pkg/loop/internal/net/client.go @@ -10,6 +10,7 @@ import ( "google.golang.org/grpc" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb" ) var _ grpc.ClientConnInterface = (*AtomicClient)(nil) @@ -58,7 +59,12 @@ func (c *clientConn) Invoke(ctx context.Context, method string, args interface{} for cc != nil { err := cc.Invoke(ctx, method, args, reply, opts...) if isErrTerminal(err) { - c.Logger.Warnw("clientConn: Invoke: terminal error, refreshing connection", "err", err) + if method == pb.Service_Close_FullMethodName { + // don't reconnect just to call Close + c.Logger.Warnw("clientConn: Invoke: terminal error", "method", method, "err", err) + return err + } + c.Logger.Warnw("clientConn: Invoke: terminal error, refreshing connection", "method", method, "err", err) cc = c.refresh(ctx, cc) continue } diff --git a/pkg/loop/internal/pb/capabilities_registry.pb.go b/pkg/loop/internal/pb/capabilities_registry.pb.go index ea7afed71..513b9c993 100644 --- a/pkg/loop/internal/pb/capabilities_registry.pb.go +++ b/pkg/loop/internal/pb/capabilities_registry.pb.go @@ -799,6 +799,54 @@ func (x *AddRequest) GetType() ExecuteAPIType { return ExecuteAPIType_EXECUTE_API_TYPE_UNKNOWN } +// Remove has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.CapabilitiesRegistry.Remove]. +type RemoveRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *RemoveRequest) Reset() { + *x = RemoveRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveRequest) ProtoMessage() {} + +func (x *RemoveRequest) ProtoReflect() protoreflect.Message { + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveRequest.ProtoReflect.Descriptor instead. +func (*RemoveRequest) Descriptor() ([]byte, []int) { + return file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP(), []int{14} +} + +func (x *RemoveRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + type ConfigForCapabilityRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -811,7 +859,7 @@ type ConfigForCapabilityRequest struct { func (x *ConfigForCapabilityRequest) Reset() { *x = ConfigForCapabilityRequest{} if protoimpl.UnsafeEnabled { - mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[14] + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -824,7 +872,7 @@ func (x *ConfigForCapabilityRequest) String() string { func (*ConfigForCapabilityRequest) ProtoMessage() {} func (x *ConfigForCapabilityRequest) ProtoReflect() protoreflect.Message { - mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[14] + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -837,7 +885,7 @@ func (x *ConfigForCapabilityRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ConfigForCapabilityRequest.ProtoReflect.Descriptor instead. func (*ConfigForCapabilityRequest) Descriptor() ([]byte, []int) { - return file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP(), []int{14} + return file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP(), []int{15} } func (x *ConfigForCapabilityRequest) GetCapabilityID() string { @@ -865,7 +913,7 @@ type ConfigForCapabilityReply struct { func (x *ConfigForCapabilityReply) Reset() { *x = ConfigForCapabilityReply{} if protoimpl.UnsafeEnabled { - mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[15] + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -878,7 +926,7 @@ func (x *ConfigForCapabilityReply) String() string { func (*ConfigForCapabilityReply) ProtoMessage() {} func (x *ConfigForCapabilityReply) ProtoReflect() protoreflect.Message { - mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[15] + mi := &file_loop_internal_pb_capabilities_registry_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -891,7 +939,7 @@ func (x *ConfigForCapabilityReply) ProtoReflect() protoreflect.Message { // Deprecated: Use ConfigForCapabilityReply.ProtoReflect.Descriptor instead. func (*ConfigForCapabilityReply) Descriptor() ([]byte, []int) { - return file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP(), []int{15} + return file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP(), []int{16} } func (x *ConfigForCapabilityReply) GetCapabilityConfig() *pb.CapabilityConfig { @@ -965,66 +1013,72 @@ var file_loop_internal_pb_capabilities_registry_proto_rawDesc = []byte{ 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x44, 0x12, 0x28, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x41, 0x50, 0x49, 0x54, 0x79, 0x70, 0x65, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x56, 0x0a, 0x1a, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x61, 0x70, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x6e, 0x49, - 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x64, 0x6f, 0x6e, 0x49, 0x44, 0x22, 0x5f, - 0x0a, 0x18, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x43, 0x0a, 0x11, 0x63, 0x61, - 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x61, 0x70, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x10, 0x63, - 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2a, - 0x6a, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x41, 0x50, 0x49, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x1c, 0x0a, 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, 0x49, - 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, - 0x1c, 0x0a, 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, 0x49, 0x5f, 0x54, - 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x10, 0x01, 0x12, 0x1c, 0x0a, - 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, 0x49, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x10, 0x02, 0x32, 0xbf, 0x04, 0x0a, 0x14, - 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x67, 0x69, - 0x73, 0x74, 0x72, 0x79, 0x12, 0x3b, 0x0a, 0x09, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x6f, 0x64, - 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, - 0x00, 0x12, 0x59, 0x0a, 0x13, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, - 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, - 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x29, 0x0a, 0x03, - 0x47, 0x65, 0x74, 0x12, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x54, 0x72, - 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, - 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x41, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x41, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x65, - 0x6e, 0x73, 0x75, 0x73, 0x12, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, - 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x65, 0x6e, - 0x73, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x09, 0x47, 0x65, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x12, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, - 0x65, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, - 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x03, 0x41, 0x64, - 0x64, 0x12, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x64, 0x64, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, - 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, - 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, - 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, - 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x1f, 0x0a, 0x0d, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x56, 0x0a, 0x1a, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x61, 0x70, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x6e, + 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x64, 0x6f, 0x6e, 0x49, 0x44, 0x22, + 0x5f, 0x0a, 0x18, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x43, 0x0a, 0x11, 0x63, + 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x61, + 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x10, + 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x2a, 0x6a, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x41, 0x50, 0x49, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x1c, 0x0a, 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, + 0x49, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, + 0x12, 0x1c, 0x0a, 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, 0x49, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x10, 0x01, 0x12, 0x1c, + 0x0a, 0x18, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x5f, 0x41, 0x50, 0x49, 0x5f, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x45, 0x10, 0x02, 0x32, 0xf8, 0x04, 0x0a, + 0x14, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x67, + 0x69, 0x73, 0x74, 0x72, 0x79, 0x12, 0x3b, 0x0a, 0x09, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x6f, + 0x64, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x00, 0x12, 0x59, 0x0a, 0x13, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, + 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x43, 0x61, 0x70, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x29, 0x0a, + 0x03, 0x47, 0x65, 0x74, 0x12, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x54, + 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, + 0x74, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, + 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x41, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, + 0x65, 0x6e, 0x73, 0x75, 0x73, 0x12, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, + 0x43, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x73, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x65, + 0x6e, 0x73, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x09, 0x47, + 0x65, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x12, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x47, 0x65, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x03, 0x41, + 0x64, 0x64, 0x12, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x64, 0x64, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x37, + 0x0a, 0x06, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x12, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, + 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, + 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1040,7 +1094,7 @@ func file_loop_internal_pb_capabilities_registry_proto_rawDescGZIP() []byte { } var file_loop_internal_pb_capabilities_registry_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_loop_internal_pb_capabilities_registry_proto_msgTypes = make([]protoimpl.MessageInfo, 16) +var file_loop_internal_pb_capabilities_registry_proto_msgTypes = make([]protoimpl.MessageInfo, 17) var file_loop_internal_pb_capabilities_registry_proto_goTypes = []interface{}{ (ExecuteAPIType)(0), // 0: loop.ExecuteAPIType (*DON)(nil), // 1: loop.DON @@ -1057,37 +1111,40 @@ var file_loop_internal_pb_capabilities_registry_proto_goTypes = []interface{}{ (*ListReply)(nil), // 12: loop.ListReply (*GetTargetReply)(nil), // 13: loop.GetTargetReply (*AddRequest)(nil), // 14: loop.AddRequest - (*ConfigForCapabilityRequest)(nil), // 15: loop.ConfigForCapabilityRequest - (*ConfigForCapabilityReply)(nil), // 16: loop.ConfigForCapabilityReply - (*pb.CapabilityConfig)(nil), // 17: loop.CapabilityConfig - (*emptypb.Empty)(nil), // 18: google.protobuf.Empty + (*RemoveRequest)(nil), // 15: loop.RemoveRequest + (*ConfigForCapabilityRequest)(nil), // 16: loop.ConfigForCapabilityRequest + (*ConfigForCapabilityReply)(nil), // 17: loop.ConfigForCapabilityReply + (*pb.CapabilityConfig)(nil), // 18: loop.CapabilityConfig + (*emptypb.Empty)(nil), // 19: google.protobuf.Empty } var file_loop_internal_pb_capabilities_registry_proto_depIdxs = []int32{ 1, // 0: loop.LocalNodeReply.workflowDON:type_name -> loop.DON 1, // 1: loop.LocalNodeReply.CapabilityDONs:type_name -> loop.DON 0, // 2: loop.GetReply.type:type_name -> loop.ExecuteAPIType 0, // 3: loop.AddRequest.type:type_name -> loop.ExecuteAPIType - 17, // 4: loop.ConfigForCapabilityReply.capability_config:type_name -> loop.CapabilityConfig - 18, // 5: loop.CapabilitiesRegistry.LocalNode:input_type -> google.protobuf.Empty - 15, // 6: loop.CapabilitiesRegistry.ConfigForCapability:input_type -> loop.ConfigForCapabilityRequest + 18, // 4: loop.ConfigForCapabilityReply.capability_config:type_name -> loop.CapabilityConfig + 19, // 5: loop.CapabilitiesRegistry.LocalNode:input_type -> google.protobuf.Empty + 16, // 6: loop.CapabilitiesRegistry.ConfigForCapability:input_type -> loop.ConfigForCapabilityRequest 3, // 7: loop.CapabilitiesRegistry.Get:input_type -> loop.GetRequest 5, // 8: loop.CapabilitiesRegistry.GetTrigger:input_type -> loop.GetTriggerRequest 7, // 9: loop.CapabilitiesRegistry.GetAction:input_type -> loop.GetActionRequest 9, // 10: loop.CapabilitiesRegistry.GetConsensus:input_type -> loop.GetConsensusRequest 11, // 11: loop.CapabilitiesRegistry.GetTarget:input_type -> loop.GetTargetRequest - 18, // 12: loop.CapabilitiesRegistry.List:input_type -> google.protobuf.Empty + 19, // 12: loop.CapabilitiesRegistry.List:input_type -> google.protobuf.Empty 14, // 13: loop.CapabilitiesRegistry.Add:input_type -> loop.AddRequest - 2, // 14: loop.CapabilitiesRegistry.LocalNode:output_type -> loop.LocalNodeReply - 16, // 15: loop.CapabilitiesRegistry.ConfigForCapability:output_type -> loop.ConfigForCapabilityReply - 4, // 16: loop.CapabilitiesRegistry.Get:output_type -> loop.GetReply - 6, // 17: loop.CapabilitiesRegistry.GetTrigger:output_type -> loop.GetTriggerReply - 8, // 18: loop.CapabilitiesRegistry.GetAction:output_type -> loop.GetActionReply - 10, // 19: loop.CapabilitiesRegistry.GetConsensus:output_type -> loop.GetConsensusReply - 13, // 20: loop.CapabilitiesRegistry.GetTarget:output_type -> loop.GetTargetReply - 12, // 21: loop.CapabilitiesRegistry.List:output_type -> loop.ListReply - 18, // 22: loop.CapabilitiesRegistry.Add:output_type -> google.protobuf.Empty - 14, // [14:23] is the sub-list for method output_type - 5, // [5:14] is the sub-list for method input_type + 15, // 14: loop.CapabilitiesRegistry.Remove:input_type -> loop.RemoveRequest + 2, // 15: loop.CapabilitiesRegistry.LocalNode:output_type -> loop.LocalNodeReply + 17, // 16: loop.CapabilitiesRegistry.ConfigForCapability:output_type -> loop.ConfigForCapabilityReply + 4, // 17: loop.CapabilitiesRegistry.Get:output_type -> loop.GetReply + 6, // 18: loop.CapabilitiesRegistry.GetTrigger:output_type -> loop.GetTriggerReply + 8, // 19: loop.CapabilitiesRegistry.GetAction:output_type -> loop.GetActionReply + 10, // 20: loop.CapabilitiesRegistry.GetConsensus:output_type -> loop.GetConsensusReply + 13, // 21: loop.CapabilitiesRegistry.GetTarget:output_type -> loop.GetTargetReply + 12, // 22: loop.CapabilitiesRegistry.List:output_type -> loop.ListReply + 19, // 23: loop.CapabilitiesRegistry.Add:output_type -> google.protobuf.Empty + 19, // 24: loop.CapabilitiesRegistry.Remove:output_type -> google.protobuf.Empty + 15, // [15:25] is the sub-list for method output_type + 5, // [5:15] is the sub-list for method input_type 5, // [5:5] is the sub-list for extension type_name 5, // [5:5] is the sub-list for extension extendee 0, // [0:5] is the sub-list for field type_name @@ -1268,7 +1325,7 @@ func file_loop_internal_pb_capabilities_registry_proto_init() { } } file_loop_internal_pb_capabilities_registry_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigForCapabilityRequest); i { + switch v := v.(*RemoveRequest); i { case 0: return &v.state case 1: @@ -1280,6 +1337,18 @@ func file_loop_internal_pb_capabilities_registry_proto_init() { } } file_loop_internal_pb_capabilities_registry_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ConfigForCapabilityRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_loop_internal_pb_capabilities_registry_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ConfigForCapabilityReply); i { case 0: return &v.state @@ -1298,7 +1367,7 @@ func file_loop_internal_pb_capabilities_registry_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_loop_internal_pb_capabilities_registry_proto_rawDesc, NumEnums: 1, - NumMessages: 16, + NumMessages: 17, NumExtensions: 0, NumServices: 1, }, diff --git a/pkg/loop/internal/pb/capabilities_registry.proto b/pkg/loop/internal/pb/capabilities_registry.proto index a5ca7283d..12e2d908d 100644 --- a/pkg/loop/internal/pb/capabilities_registry.proto +++ b/pkg/loop/internal/pb/capabilities_registry.proto @@ -17,6 +17,7 @@ service CapabilitiesRegistry { rpc GetTarget (GetTargetRequest) returns (GetTargetReply) {} rpc List (google.protobuf.Empty) returns (ListReply) {} rpc Add (AddRequest) returns (google.protobuf.Empty) {} + rpc Remove (RemoveRequest) returns (google.protobuf.Empty) {} } enum ExecuteAPIType { @@ -104,6 +105,11 @@ message AddRequest { ExecuteAPIType type = 2; } +// Remove has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.CapabilitiesRegistry.Remove]. +message RemoveRequest { + string id = 1; +} + message ConfigForCapabilityRequest { string capabilityID = 1; uint32 donID = 2; diff --git a/pkg/loop/internal/pb/capabilities_registry_grpc.pb.go b/pkg/loop/internal/pb/capabilities_registry_grpc.pb.go index e7f1735be..6bba79252 100644 --- a/pkg/loop/internal/pb/capabilities_registry_grpc.pb.go +++ b/pkg/loop/internal/pb/capabilities_registry_grpc.pb.go @@ -29,6 +29,7 @@ const ( CapabilitiesRegistry_GetTarget_FullMethodName = "/loop.CapabilitiesRegistry/GetTarget" CapabilitiesRegistry_List_FullMethodName = "/loop.CapabilitiesRegistry/List" CapabilitiesRegistry_Add_FullMethodName = "/loop.CapabilitiesRegistry/Add" + CapabilitiesRegistry_Remove_FullMethodName = "/loop.CapabilitiesRegistry/Remove" ) // CapabilitiesRegistryClient is the client API for CapabilitiesRegistry service. @@ -44,6 +45,7 @@ type CapabilitiesRegistryClient interface { GetTarget(ctx context.Context, in *GetTargetRequest, opts ...grpc.CallOption) (*GetTargetReply, error) List(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*ListReply, error) Add(ctx context.Context, in *AddRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + Remove(ctx context.Context, in *RemoveRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) } type capabilitiesRegistryClient struct { @@ -135,6 +137,15 @@ func (c *capabilitiesRegistryClient) Add(ctx context.Context, in *AddRequest, op return out, nil } +func (c *capabilitiesRegistryClient) Remove(ctx context.Context, in *RemoveRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, CapabilitiesRegistry_Remove_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // CapabilitiesRegistryServer is the server API for CapabilitiesRegistry service. // All implementations must embed UnimplementedCapabilitiesRegistryServer // for forward compatibility @@ -148,6 +159,7 @@ type CapabilitiesRegistryServer interface { GetTarget(context.Context, *GetTargetRequest) (*GetTargetReply, error) List(context.Context, *emptypb.Empty) (*ListReply, error) Add(context.Context, *AddRequest) (*emptypb.Empty, error) + Remove(context.Context, *RemoveRequest) (*emptypb.Empty, error) mustEmbedUnimplementedCapabilitiesRegistryServer() } @@ -182,6 +194,9 @@ func (UnimplementedCapabilitiesRegistryServer) List(context.Context, *emptypb.Em func (UnimplementedCapabilitiesRegistryServer) Add(context.Context, *AddRequest) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method Add not implemented") } +func (UnimplementedCapabilitiesRegistryServer) Remove(context.Context, *RemoveRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method Remove not implemented") +} func (UnimplementedCapabilitiesRegistryServer) mustEmbedUnimplementedCapabilitiesRegistryServer() {} // UnsafeCapabilitiesRegistryServer may be embedded to opt out of forward compatibility for this service. @@ -357,6 +372,24 @@ func _CapabilitiesRegistry_Add_Handler(srv interface{}, ctx context.Context, dec return interceptor(ctx, in, info, handler) } +func _CapabilitiesRegistry_Remove_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CapabilitiesRegistryServer).Remove(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CapabilitiesRegistry_Remove_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CapabilitiesRegistryServer).Remove(ctx, req.(*RemoveRequest)) + } + return interceptor(ctx, in, info, handler) +} + // CapabilitiesRegistry_ServiceDesc is the grpc.ServiceDesc for CapabilitiesRegistry service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -400,6 +433,10 @@ var CapabilitiesRegistry_ServiceDesc = grpc.ServiceDesc{ MethodName: "Add", Handler: _CapabilitiesRegistry_Add_Handler, }, + { + MethodName: "Remove", + Handler: _CapabilitiesRegistry_Remove_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "loop/internal/pb/capabilities_registry.proto", diff --git a/pkg/loop/internal/pb/chain_writer_grpc.pb.go b/pkg/loop/internal/pb/chain_writer_grpc.pb.go deleted file mode 100644 index d49a4cdf4..000000000 --- a/pkg/loop/internal/pb/chain_writer_grpc.pb.go +++ /dev/null @@ -1,184 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.3.0 -// - protoc v4.25.1 -// source: chain_writer.proto - -package pb - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - emptypb "google.golang.org/protobuf/types/known/emptypb" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -const ( - ChainWriter_SubmitTransaction_FullMethodName = "/loop.ChainWriter/SubmitTransaction" - ChainWriter_GetTransactionStatus_FullMethodName = "/loop.ChainWriter/GetTransactionStatus" - ChainWriter_GetFeeComponents_FullMethodName = "/loop.ChainWriter/GetFeeComponents" -) - -// ChainWriterClient is the client API for ChainWriter service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type ChainWriterClient interface { - SubmitTransaction(ctx context.Context, in *SubmitTransactionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - GetTransactionStatus(ctx context.Context, in *GetTransactionStatusRequest, opts ...grpc.CallOption) (*GetTransactionStatusReply, error) - GetFeeComponents(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetFeeComponentsReply, error) -} - -type chainWriterClient struct { - cc grpc.ClientConnInterface -} - -func NewChainWriterClient(cc grpc.ClientConnInterface) ChainWriterClient { - return &chainWriterClient{cc} -} - -func (c *chainWriterClient) SubmitTransaction(ctx context.Context, in *SubmitTransactionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, ChainWriter_SubmitTransaction_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *chainWriterClient) GetTransactionStatus(ctx context.Context, in *GetTransactionStatusRequest, opts ...grpc.CallOption) (*GetTransactionStatusReply, error) { - out := new(GetTransactionStatusReply) - err := c.cc.Invoke(ctx, ChainWriter_GetTransactionStatus_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *chainWriterClient) GetFeeComponents(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetFeeComponentsReply, error) { - out := new(GetFeeComponentsReply) - err := c.cc.Invoke(ctx, ChainWriter_GetFeeComponents_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ChainWriterServer is the server API for ChainWriter service. -// All implementations must embed UnimplementedChainWriterServer -// for forward compatibility -type ChainWriterServer interface { - SubmitTransaction(context.Context, *SubmitTransactionRequest) (*emptypb.Empty, error) - GetTransactionStatus(context.Context, *GetTransactionStatusRequest) (*GetTransactionStatusReply, error) - GetFeeComponents(context.Context, *emptypb.Empty) (*GetFeeComponentsReply, error) - mustEmbedUnimplementedChainWriterServer() -} - -// UnimplementedChainWriterServer must be embedded to have forward compatible implementations. -type UnimplementedChainWriterServer struct { -} - -func (UnimplementedChainWriterServer) SubmitTransaction(context.Context, *SubmitTransactionRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method SubmitTransaction not implemented") -} -func (UnimplementedChainWriterServer) GetTransactionStatus(context.Context, *GetTransactionStatusRequest) (*GetTransactionStatusReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetTransactionStatus not implemented") -} -func (UnimplementedChainWriterServer) GetFeeComponents(context.Context, *emptypb.Empty) (*GetFeeComponentsReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFeeComponents not implemented") -} -func (UnimplementedChainWriterServer) mustEmbedUnimplementedChainWriterServer() {} - -// UnsafeChainWriterServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to ChainWriterServer will -// result in compilation errors. -type UnsafeChainWriterServer interface { - mustEmbedUnimplementedChainWriterServer() -} - -func RegisterChainWriterServer(s grpc.ServiceRegistrar, srv ChainWriterServer) { - s.RegisterService(&ChainWriter_ServiceDesc, srv) -} - -func _ChainWriter_SubmitTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SubmitTransactionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ChainWriterServer).SubmitTransaction(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: ChainWriter_SubmitTransaction_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ChainWriterServer).SubmitTransaction(ctx, req.(*SubmitTransactionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ChainWriter_GetTransactionStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTransactionStatusRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ChainWriterServer).GetTransactionStatus(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: ChainWriter_GetTransactionStatus_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ChainWriterServer).GetTransactionStatus(ctx, req.(*GetTransactionStatusRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ChainWriter_GetFeeComponents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(emptypb.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ChainWriterServer).GetFeeComponents(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: ChainWriter_GetFeeComponents_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ChainWriterServer).GetFeeComponents(ctx, req.(*emptypb.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -// ChainWriter_ServiceDesc is the grpc.ServiceDesc for ChainWriter service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var ChainWriter_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "loop.ChainWriter", - HandlerType: (*ChainWriterServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "SubmitTransaction", - Handler: _ChainWriter_SubmitTransaction_Handler, - }, - { - MethodName: "GetTransactionStatus", - Handler: _ChainWriter_GetTransactionStatus_Handler, - }, - { - MethodName: "GetFeeComponents", - Handler: _ChainWriter_GetFeeComponents_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "chain_writer.proto", -} diff --git a/pkg/loop/internal/pb/contract_reader.pb.go b/pkg/loop/internal/pb/contract_reader.pb.go index 09683ccd7..c3b30fe47 100644 --- a/pkg/loop/internal/pb/contract_reader.pb.go +++ b/pkg/loop/internal/pb/contract_reader.pb.go @@ -505,6 +505,125 @@ func (x *QueryKeyRequest) GetAsValueType() bool { return false } +// QueryKeysRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.QueryKeys]. +type QueryKeysRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Filters []*ContractKeyFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + LimitAndSort *LimitAndSort `protobuf:"bytes,2,opt,name=limit_and_sort,json=limitAndSort,proto3" json:"limit_and_sort,omitempty"` +} + +func (x *QueryKeysRequest) Reset() { + *x = QueryKeysRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_contract_reader_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryKeysRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryKeysRequest) ProtoMessage() {} + +func (x *QueryKeysRequest) ProtoReflect() protoreflect.Message { + mi := &file_contract_reader_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryKeysRequest.ProtoReflect.Descriptor instead. +func (*QueryKeysRequest) Descriptor() ([]byte, []int) { + return file_contract_reader_proto_rawDescGZIP(), []int{3} +} + +func (x *QueryKeysRequest) GetFilters() []*ContractKeyFilter { + if x != nil { + return x.Filters + } + return nil +} + +func (x *QueryKeysRequest) GetLimitAndSort() *LimitAndSort { + if x != nil { + return x.LimitAndSort + } + return nil +} + +type ContractKeyFilter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Contract *BoundContract `protobuf:"bytes,1,opt,name=contract,proto3" json:"contract,omitempty"` + Filter *QueryKeyFilter `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + AsValueType bool `protobuf:"varint,4,opt,name=as_value_type,json=asValueType,proto3" json:"as_value_type,omitempty"` +} + +func (x *ContractKeyFilter) Reset() { + *x = ContractKeyFilter{} + if protoimpl.UnsafeEnabled { + mi := &file_contract_reader_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ContractKeyFilter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ContractKeyFilter) ProtoMessage() {} + +func (x *ContractKeyFilter) ProtoReflect() protoreflect.Message { + mi := &file_contract_reader_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ContractKeyFilter.ProtoReflect.Descriptor instead. +func (*ContractKeyFilter) Descriptor() ([]byte, []int) { + return file_contract_reader_proto_rawDescGZIP(), []int{4} +} + +func (x *ContractKeyFilter) GetContract() *BoundContract { + if x != nil { + return x.Contract + } + return nil +} + +func (x *ContractKeyFilter) GetFilter() *QueryKeyFilter { + if x != nil { + return x.Filter + } + return nil +} + +func (x *ContractKeyFilter) GetAsValueType() bool { + if x != nil { + return x.AsValueType + } + return false +} + // BindRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.Bind]. type BindRequest struct { state protoimpl.MessageState @@ -517,7 +636,7 @@ type BindRequest struct { func (x *BindRequest) Reset() { *x = BindRequest{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[3] + mi := &file_contract_reader_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -530,7 +649,7 @@ func (x *BindRequest) String() string { func (*BindRequest) ProtoMessage() {} func (x *BindRequest) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[3] + mi := &file_contract_reader_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -543,7 +662,7 @@ func (x *BindRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use BindRequest.ProtoReflect.Descriptor instead. func (*BindRequest) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{3} + return file_contract_reader_proto_rawDescGZIP(), []int{5} } func (x *BindRequest) GetBindings() []*BoundContract { @@ -565,7 +684,7 @@ type UnbindRequest struct { func (x *UnbindRequest) Reset() { *x = UnbindRequest{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[4] + mi := &file_contract_reader_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -578,7 +697,7 @@ func (x *UnbindRequest) String() string { func (*UnbindRequest) ProtoMessage() {} func (x *UnbindRequest) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[4] + mi := &file_contract_reader_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -591,7 +710,7 @@ func (x *UnbindRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UnbindRequest.ProtoReflect.Descriptor instead. func (*UnbindRequest) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{4} + return file_contract_reader_proto_rawDescGZIP(), []int{6} } func (x *UnbindRequest) GetBindings() []*BoundContract { @@ -613,7 +732,7 @@ type GetLatestValueReply struct { func (x *GetLatestValueReply) Reset() { *x = GetLatestValueReply{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[5] + mi := &file_contract_reader_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -626,7 +745,7 @@ func (x *GetLatestValueReply) String() string { func (*GetLatestValueReply) ProtoMessage() {} func (x *GetLatestValueReply) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[5] + mi := &file_contract_reader_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -639,7 +758,7 @@ func (x *GetLatestValueReply) ProtoReflect() protoreflect.Message { // Deprecated: Use GetLatestValueReply.ProtoReflect.Descriptor instead. func (*GetLatestValueReply) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{5} + return file_contract_reader_proto_rawDescGZIP(), []int{7} } func (x *GetLatestValueReply) GetRetVal() *VersionedBytes { @@ -649,6 +768,62 @@ func (x *GetLatestValueReply) GetRetVal() *VersionedBytes { return nil } +// GetLatestValueWithHeadDataReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.GetLatestValueWithHeadData]. +type GetLatestValueWithHeadDataReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + RetVal *VersionedBytes `protobuf:"bytes,1,opt,name=ret_val,json=retVal,proto3" json:"ret_val,omitempty"` + HeadData *Head `protobuf:"bytes,2,opt,name=head_data,json=headData,proto3" json:"head_data,omitempty"` +} + +func (x *GetLatestValueWithHeadDataReply) Reset() { + *x = GetLatestValueWithHeadDataReply{} + if protoimpl.UnsafeEnabled { + mi := &file_contract_reader_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetLatestValueWithHeadDataReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetLatestValueWithHeadDataReply) ProtoMessage() {} + +func (x *GetLatestValueWithHeadDataReply) ProtoReflect() protoreflect.Message { + mi := &file_contract_reader_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetLatestValueWithHeadDataReply.ProtoReflect.Descriptor instead. +func (*GetLatestValueWithHeadDataReply) Descriptor() ([]byte, []int) { + return file_contract_reader_proto_rawDescGZIP(), []int{8} +} + +func (x *GetLatestValueWithHeadDataReply) GetRetVal() *VersionedBytes { + if x != nil { + return x.RetVal + } + return nil +} + +func (x *GetLatestValueWithHeadDataReply) GetHeadData() *Head { + if x != nil { + return x.HeadData + } + return nil +} + // BatchGetLatestValuesReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.BatchGetLatestValues]. type BatchGetLatestValuesReply struct { state protoimpl.MessageState @@ -661,7 +836,7 @@ type BatchGetLatestValuesReply struct { func (x *BatchGetLatestValuesReply) Reset() { *x = BatchGetLatestValuesReply{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[6] + mi := &file_contract_reader_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -674,7 +849,7 @@ func (x *BatchGetLatestValuesReply) String() string { func (*BatchGetLatestValuesReply) ProtoMessage() {} func (x *BatchGetLatestValuesReply) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[6] + mi := &file_contract_reader_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -687,7 +862,7 @@ func (x *BatchGetLatestValuesReply) ProtoReflect() protoreflect.Message { // Deprecated: Use BatchGetLatestValuesReply.ProtoReflect.Descriptor instead. func (*BatchGetLatestValuesReply) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{6} + return file_contract_reader_proto_rawDescGZIP(), []int{9} } func (x *BatchGetLatestValuesReply) GetResults() []*ContractBatchResult { @@ -709,7 +884,7 @@ type QueryKeyReply struct { func (x *QueryKeyReply) Reset() { *x = QueryKeyReply{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[7] + mi := &file_contract_reader_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -722,7 +897,7 @@ func (x *QueryKeyReply) String() string { func (*QueryKeyReply) ProtoMessage() {} func (x *QueryKeyReply) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[7] + mi := &file_contract_reader_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -735,7 +910,7 @@ func (x *QueryKeyReply) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryKeyReply.ProtoReflect.Descriptor instead. func (*QueryKeyReply) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{7} + return file_contract_reader_proto_rawDescGZIP(), []int{10} } func (x *QueryKeyReply) GetSequences() []*Sequence { @@ -745,6 +920,54 @@ func (x *QueryKeyReply) GetSequences() []*Sequence { return nil } +// QueryKeysReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.QueryKeys]. +type QueryKeysReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sequences []*SequenceWithKey `protobuf:"bytes,1,rep,name=sequences,proto3" json:"sequences,omitempty"` +} + +func (x *QueryKeysReply) Reset() { + *x = QueryKeysReply{} + if protoimpl.UnsafeEnabled { + mi := &file_contract_reader_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryKeysReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryKeysReply) ProtoMessage() {} + +func (x *QueryKeysReply) ProtoReflect() protoreflect.Message { + mi := &file_contract_reader_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryKeysReply.ProtoReflect.Descriptor instead. +func (*QueryKeysReply) Descriptor() ([]byte, []int) { + return file_contract_reader_proto_rawDescGZIP(), []int{11} +} + +func (x *QueryKeysReply) GetSequences() []*SequenceWithKey { + if x != nil { + return x.Sequences + } + return nil +} + // ContractBatch is gRPC adapter for the BatchGetLatestValuesRequest struct map value [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.BatchGetLatestValuesRequest]. type ContractBatch struct { state protoimpl.MessageState @@ -758,7 +981,7 @@ type ContractBatch struct { func (x *ContractBatch) Reset() { *x = ContractBatch{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[8] + mi := &file_contract_reader_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -771,7 +994,7 @@ func (x *ContractBatch) String() string { func (*ContractBatch) ProtoMessage() {} func (x *ContractBatch) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[8] + mi := &file_contract_reader_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -784,7 +1007,7 @@ func (x *ContractBatch) ProtoReflect() protoreflect.Message { // Deprecated: Use ContractBatch.ProtoReflect.Descriptor instead. func (*ContractBatch) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{8} + return file_contract_reader_proto_rawDescGZIP(), []int{12} } func (x *ContractBatch) GetContract() *BoundContract { @@ -815,7 +1038,7 @@ type BatchRead struct { func (x *BatchRead) Reset() { *x = BatchRead{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[9] + mi := &file_contract_reader_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -828,7 +1051,7 @@ func (x *BatchRead) String() string { func (*BatchRead) ProtoMessage() {} func (x *BatchRead) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[9] + mi := &file_contract_reader_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -841,7 +1064,7 @@ func (x *BatchRead) ProtoReflect() protoreflect.Message { // Deprecated: Use BatchRead.ProtoReflect.Descriptor instead. func (*BatchRead) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{9} + return file_contract_reader_proto_rawDescGZIP(), []int{13} } func (x *BatchRead) GetReadName() string { @@ -878,7 +1101,7 @@ type ContractBatchResult struct { func (x *ContractBatchResult) Reset() { *x = ContractBatchResult{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[10] + mi := &file_contract_reader_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -891,7 +1114,7 @@ func (x *ContractBatchResult) String() string { func (*ContractBatchResult) ProtoMessage() {} func (x *ContractBatchResult) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[10] + mi := &file_contract_reader_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -904,7 +1127,7 @@ func (x *ContractBatchResult) ProtoReflect() protoreflect.Message { // Deprecated: Use ContractBatchResult.ProtoReflect.Descriptor instead. func (*ContractBatchResult) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{10} + return file_contract_reader_proto_rawDescGZIP(), []int{14} } func (x *ContractBatchResult) GetContract() *BoundContract { @@ -935,7 +1158,7 @@ type BatchReadResult struct { func (x *BatchReadResult) Reset() { *x = BatchReadResult{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[11] + mi := &file_contract_reader_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -948,7 +1171,7 @@ func (x *BatchReadResult) String() string { func (*BatchReadResult) ProtoMessage() {} func (x *BatchReadResult) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[11] + mi := &file_contract_reader_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -961,7 +1184,7 @@ func (x *BatchReadResult) ProtoReflect() protoreflect.Message { // Deprecated: Use BatchReadResult.ProtoReflect.Descriptor instead. func (*BatchReadResult) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{11} + return file_contract_reader_proto_rawDescGZIP(), []int{15} } func (x *BatchReadResult) GetReadName() string { @@ -999,7 +1222,7 @@ type Head struct { func (x *Head) Reset() { *x = Head{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[12] + mi := &file_contract_reader_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1012,7 +1235,7 @@ func (x *Head) String() string { func (*Head) ProtoMessage() {} func (x *Head) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[12] + mi := &file_contract_reader_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1025,7 +1248,7 @@ func (x *Head) ProtoReflect() protoreflect.Message { // Deprecated: Use Head.ProtoReflect.Descriptor instead. func (*Head) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{12} + return file_contract_reader_proto_rawDescGZIP(), []int{16} } func (x *Head) GetHeight() string { @@ -1063,7 +1286,7 @@ type Sequence struct { func (x *Sequence) Reset() { *x = Sequence{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[13] + mi := &file_contract_reader_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1076,7 +1299,7 @@ func (x *Sequence) String() string { func (*Sequence) ProtoMessage() {} func (x *Sequence) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[13] + mi := &file_contract_reader_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1089,7 +1312,7 @@ func (x *Sequence) ProtoReflect() protoreflect.Message { // Deprecated: Use Sequence.ProtoReflect.Descriptor instead. func (*Sequence) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{13} + return file_contract_reader_proto_rawDescGZIP(), []int{17} } func (x *Sequence) GetSequenceCursor() string { @@ -1113,6 +1336,77 @@ func (x *Sequence) GetData() *VersionedBytes { return nil } +type SequenceWithKey struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SequenceCursor string `protobuf:"bytes,1,opt,name=sequence_cursor,json=sequenceCursor,proto3" json:"sequence_cursor,omitempty"` + Head *Head `protobuf:"bytes,2,opt,name=head,proto3" json:"head,omitempty"` + Data *VersionedBytes `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` + Key string `protobuf:"bytes,4,opt,name=key,proto3" json:"key,omitempty"` +} + +func (x *SequenceWithKey) Reset() { + *x = SequenceWithKey{} + if protoimpl.UnsafeEnabled { + mi := &file_contract_reader_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SequenceWithKey) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SequenceWithKey) ProtoMessage() {} + +func (x *SequenceWithKey) ProtoReflect() protoreflect.Message { + mi := &file_contract_reader_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SequenceWithKey.ProtoReflect.Descriptor instead. +func (*SequenceWithKey) Descriptor() ([]byte, []int) { + return file_contract_reader_proto_rawDescGZIP(), []int{18} +} + +func (x *SequenceWithKey) GetSequenceCursor() string { + if x != nil { + return x.SequenceCursor + } + return "" +} + +func (x *SequenceWithKey) GetHead() *Head { + if x != nil { + return x.Head + } + return nil +} + +func (x *SequenceWithKey) GetData() *VersionedBytes { + if x != nil { + return x.Data + } + return nil +} + +func (x *SequenceWithKey) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + // BoundContract represents a [github.com/smartcontractkit/chainlink-common/pkg/types.BoundContract]. type BoundContract struct { state protoimpl.MessageState @@ -1126,7 +1420,7 @@ type BoundContract struct { func (x *BoundContract) Reset() { *x = BoundContract{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[14] + mi := &file_contract_reader_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1139,7 +1433,7 @@ func (x *BoundContract) String() string { func (*BoundContract) ProtoMessage() {} func (x *BoundContract) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[14] + mi := &file_contract_reader_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1152,7 +1446,7 @@ func (x *BoundContract) ProtoReflect() protoreflect.Message { // Deprecated: Use BoundContract.ProtoReflect.Descriptor instead. func (*BoundContract) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{14} + return file_contract_reader_proto_rawDescGZIP(), []int{19} } func (x *BoundContract) GetAddress() string { @@ -1182,7 +1476,7 @@ type QueryKeyFilter struct { func (x *QueryKeyFilter) Reset() { *x = QueryKeyFilter{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[15] + mi := &file_contract_reader_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1195,7 +1489,7 @@ func (x *QueryKeyFilter) String() string { func (*QueryKeyFilter) ProtoMessage() {} func (x *QueryKeyFilter) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[15] + mi := &file_contract_reader_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1208,7 +1502,7 @@ func (x *QueryKeyFilter) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryKeyFilter.ProtoReflect.Descriptor instead. func (*QueryKeyFilter) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{15} + return file_contract_reader_proto_rawDescGZIP(), []int{20} } func (x *QueryKeyFilter) GetKey() string { @@ -1242,7 +1536,7 @@ type Expression struct { func (x *Expression) Reset() { *x = Expression{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[16] + mi := &file_contract_reader_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1255,7 +1549,7 @@ func (x *Expression) String() string { func (*Expression) ProtoMessage() {} func (x *Expression) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[16] + mi := &file_contract_reader_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1268,7 +1562,7 @@ func (x *Expression) ProtoReflect() protoreflect.Message { // Deprecated: Use Expression.ProtoReflect.Descriptor instead. func (*Expression) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{16} + return file_contract_reader_proto_rawDescGZIP(), []int{21} } func (m *Expression) GetEvaluator() isExpression_Evaluator { @@ -1320,7 +1614,7 @@ type BooleanExpression struct { func (x *BooleanExpression) Reset() { *x = BooleanExpression{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[17] + mi := &file_contract_reader_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1333,7 +1627,7 @@ func (x *BooleanExpression) String() string { func (*BooleanExpression) ProtoMessage() {} func (x *BooleanExpression) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[17] + mi := &file_contract_reader_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1346,7 +1640,7 @@ func (x *BooleanExpression) ProtoReflect() protoreflect.Message { // Deprecated: Use BooleanExpression.ProtoReflect.Descriptor instead. func (*BooleanExpression) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{17} + return file_contract_reader_proto_rawDescGZIP(), []int{22} } func (x *BooleanExpression) GetBooleanOperator() BooleanOperator { @@ -1374,7 +1668,7 @@ type And struct { func (x *And) Reset() { *x = And{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[18] + mi := &file_contract_reader_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1387,7 +1681,7 @@ func (x *And) String() string { func (*And) ProtoMessage() {} func (x *And) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[18] + mi := &file_contract_reader_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1400,7 +1694,7 @@ func (x *And) ProtoReflect() protoreflect.Message { // Deprecated: Use And.ProtoReflect.Descriptor instead. func (*And) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{18} + return file_contract_reader_proto_rawDescGZIP(), []int{23} } func (x *And) GetExpr() []*Expression { @@ -1421,7 +1715,7 @@ type Or struct { func (x *Or) Reset() { *x = Or{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[19] + mi := &file_contract_reader_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1434,7 +1728,7 @@ func (x *Or) String() string { func (*Or) ProtoMessage() {} func (x *Or) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[19] + mi := &file_contract_reader_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1447,7 +1741,7 @@ func (x *Or) ProtoReflect() protoreflect.Message { // Deprecated: Use Or.ProtoReflect.Descriptor instead. func (*Or) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{19} + return file_contract_reader_proto_rawDescGZIP(), []int{24} } func (x *Or) GetExpr() []*Expression { @@ -1469,7 +1763,7 @@ type ValueComparator struct { func (x *ValueComparator) Reset() { *x = ValueComparator{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[20] + mi := &file_contract_reader_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1482,7 +1776,7 @@ func (x *ValueComparator) String() string { func (*ValueComparator) ProtoMessage() {} func (x *ValueComparator) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[20] + mi := &file_contract_reader_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1495,7 +1789,7 @@ func (x *ValueComparator) ProtoReflect() protoreflect.Message { // Deprecated: Use ValueComparator.ProtoReflect.Descriptor instead. func (*ValueComparator) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{20} + return file_contract_reader_proto_rawDescGZIP(), []int{25} } func (x *ValueComparator) GetValue() *VersionedBytes { @@ -1524,7 +1818,7 @@ type Comparator struct { func (x *Comparator) Reset() { *x = Comparator{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[21] + mi := &file_contract_reader_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1537,7 +1831,7 @@ func (x *Comparator) String() string { func (*Comparator) ProtoMessage() {} func (x *Comparator) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[21] + mi := &file_contract_reader_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1550,7 +1844,7 @@ func (x *Comparator) ProtoReflect() protoreflect.Message { // Deprecated: Use Comparator.ProtoReflect.Descriptor instead. func (*Comparator) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{21} + return file_contract_reader_proto_rawDescGZIP(), []int{26} } func (x *Comparator) GetName() string { @@ -1579,7 +1873,7 @@ type Block struct { func (x *Block) Reset() { *x = Block{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[22] + mi := &file_contract_reader_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1592,7 +1886,7 @@ func (x *Block) String() string { func (*Block) ProtoMessage() {} func (x *Block) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[22] + mi := &file_contract_reader_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1605,7 +1899,7 @@ func (x *Block) ProtoReflect() protoreflect.Message { // Deprecated: Use Block.ProtoReflect.Descriptor instead. func (*Block) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{22} + return file_contract_reader_proto_rawDescGZIP(), []int{27} } func (x *Block) GetBlockNumber() string { @@ -1634,7 +1928,7 @@ type Timestamp struct { func (x *Timestamp) Reset() { *x = Timestamp{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[23] + mi := &file_contract_reader_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1647,7 +1941,7 @@ func (x *Timestamp) String() string { func (*Timestamp) ProtoMessage() {} func (x *Timestamp) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[23] + mi := &file_contract_reader_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1660,7 +1954,7 @@ func (x *Timestamp) ProtoReflect() protoreflect.Message { // Deprecated: Use Timestamp.ProtoReflect.Descriptor instead. func (*Timestamp) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{23} + return file_contract_reader_proto_rawDescGZIP(), []int{28} } func (x *Timestamp) GetTimestamp() uint64 { @@ -1688,7 +1982,7 @@ type TxHash struct { func (x *TxHash) Reset() { *x = TxHash{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[24] + mi := &file_contract_reader_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1701,7 +1995,7 @@ func (x *TxHash) String() string { func (*TxHash) ProtoMessage() {} func (x *TxHash) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[24] + mi := &file_contract_reader_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1714,7 +2008,7 @@ func (x *TxHash) ProtoReflect() protoreflect.Message { // Deprecated: Use TxHash.ProtoReflect.Descriptor instead. func (*TxHash) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{24} + return file_contract_reader_proto_rawDescGZIP(), []int{29} } func (x *TxHash) GetTxHash() string { @@ -1743,7 +2037,7 @@ type Primitive struct { func (x *Primitive) Reset() { *x = Primitive{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[25] + mi := &file_contract_reader_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1756,7 +2050,7 @@ func (x *Primitive) String() string { func (*Primitive) ProtoMessage() {} func (x *Primitive) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[25] + mi := &file_contract_reader_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1769,7 +2063,7 @@ func (x *Primitive) ProtoReflect() protoreflect.Message { // Deprecated: Use Primitive.ProtoReflect.Descriptor instead. func (*Primitive) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{25} + return file_contract_reader_proto_rawDescGZIP(), []int{30} } func (m *Primitive) GetPrimitive() isPrimitive_Primitive { @@ -1862,7 +2156,7 @@ type Limit struct { func (x *Limit) Reset() { *x = Limit{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[26] + mi := &file_contract_reader_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1875,7 +2169,7 @@ func (x *Limit) String() string { func (*Limit) ProtoMessage() {} func (x *Limit) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[26] + mi := &file_contract_reader_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1888,7 +2182,7 @@ func (x *Limit) ProtoReflect() protoreflect.Message { // Deprecated: Use Limit.ProtoReflect.Descriptor instead. func (*Limit) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{26} + return file_contract_reader_proto_rawDescGZIP(), []int{31} } func (x *Limit) GetCursor() string { @@ -1924,7 +2218,7 @@ type SortBy struct { func (x *SortBy) Reset() { *x = SortBy{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[27] + mi := &file_contract_reader_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1937,7 +2231,7 @@ func (x *SortBy) String() string { func (*SortBy) ProtoMessage() {} func (x *SortBy) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[27] + mi := &file_contract_reader_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1950,7 +2244,7 @@ func (x *SortBy) ProtoReflect() protoreflect.Message { // Deprecated: Use SortBy.ProtoReflect.Descriptor instead. func (*SortBy) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{27} + return file_contract_reader_proto_rawDescGZIP(), []int{32} } func (x *SortBy) GetSortType() SortType { @@ -1980,7 +2274,7 @@ type LimitAndSort struct { func (x *LimitAndSort) Reset() { *x = LimitAndSort{} if protoimpl.UnsafeEnabled { - mi := &file_contract_reader_proto_msgTypes[28] + mi := &file_contract_reader_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1993,7 +2287,7 @@ func (x *LimitAndSort) String() string { func (*LimitAndSort) ProtoMessage() {} func (x *LimitAndSort) ProtoReflect() protoreflect.Message { - mi := &file_contract_reader_proto_msgTypes[28] + mi := &file_contract_reader_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2006,7 +2300,7 @@ func (x *LimitAndSort) ProtoReflect() protoreflect.Message { // Deprecated: Use LimitAndSort.ProtoReflect.Descriptor instead. func (*LimitAndSort) Descriptor() ([]byte, []int) { - return file_contract_reader_proto_rawDescGZIP(), []int{28} + return file_contract_reader_proto_rawDescGZIP(), []int{33} } func (x *LimitAndSort) GetSortBy() []*SortBy { @@ -2061,216 +2355,266 @@ var file_contract_reader_proto_rawDesc = []byte{ 0x69, 0x6d, 0x69, 0x74, 0x41, 0x6e, 0x64, 0x53, 0x6f, 0x72, 0x74, 0x12, 0x22, 0x0a, 0x0d, 0x61, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x61, 0x73, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, - 0x3e, 0x0a, 0x0b, 0x42, 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, - 0x0a, 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, - 0x40, 0x0a, 0x0d, 0x55, 0x6e, 0x62, 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x2f, 0x0a, 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, - 0x73, 0x22, 0x44, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2d, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x5f, - 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, - 0x06, 0x72, 0x65, 0x74, 0x56, 0x61, 0x6c, 0x22, 0x50, 0x0a, 0x19, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x3d, 0x0a, 0x0d, 0x51, 0x75, 0x65, - 0x72, 0x79, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2c, 0x0a, 0x09, 0x73, 0x65, - 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x09, 0x73, - 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x67, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, - 0x72, 0x61, 0x63, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, - 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x25, 0x0a, 0x05, 0x72, 0x65, - 0x61, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64, - 0x73, 0x22, 0x8b, 0x01, 0x0a, 0x09, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x12, - 0x1b, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x61, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2c, 0x0a, 0x06, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x33, 0x0a, 0x0a, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, - 0x79, 0x74, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x56, 0x61, 0x6c, 0x22, - 0x77, 0x0a, 0x13, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x7f, 0x0a, 0x10, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x07, 0x66, + 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x12, 0x38, 0x0a, 0x0e, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, + 0x61, 0x6e, 0x64, 0x5f, 0x73, 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x41, 0x6e, 0x64, 0x53, 0x6f, + 0x72, 0x74, 0x52, 0x0c, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x41, 0x6e, 0x64, 0x53, 0x6f, 0x72, 0x74, + 0x22, 0x96, 0x01, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x4b, 0x65, 0x79, + 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x08, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x2f, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, - 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x79, 0x0a, 0x0f, 0x42, 0x61, 0x74, 0x63, - 0x68, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, - 0x65, 0x61, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x72, 0x65, 0x61, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x56, 0x61, 0x6c, 0x12, 0x14, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0x50, 0x0a, 0x04, 0x48, 0x65, 0x61, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x68, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x7d, 0x0a, 0x08, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x75, - 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x65, 0x43, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x0a, 0x04, 0x68, 0x65, - 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x48, 0x65, 0x61, 0x64, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x12, 0x28, 0x0a, 0x04, 0x64, 0x61, - 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x04, - 0x64, 0x61, 0x74, 0x61, 0x22, 0x3d, 0x0a, 0x0d, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x22, 0x54, 0x0a, 0x0e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x46, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x65, - 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x94, 0x01, 0x0a, 0x0a, 0x45, 0x78, - 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x48, 0x00, 0x52, 0x09, - 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x62, 0x6f, 0x6f, - 0x6c, 0x65, 0x61, 0x6e, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x6f, - 0x6c, 0x65, 0x61, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, - 0x52, 0x11, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, 0x75, 0x61, 0x74, 0x6f, 0x72, - 0x22, 0x87, 0x01, 0x0a, 0x11, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x45, 0x78, 0x70, 0x72, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, - 0x6e, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x4f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x0f, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x30, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, - 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x2b, 0x0a, 0x03, 0x41, 0x6e, - 0x64, 0x12, 0x24, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x22, 0x2a, 0x0a, 0x02, 0x4f, 0x72, 0x12, 0x24, 0x0a, - 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x65, - 0x78, 0x70, 0x72, 0x22, 0x73, 0x0a, 0x0f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x2c, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, + 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x22, 0x0a, 0x0d, 0x61, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x61, 0x73, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0x3e, 0x0a, 0x0b, 0x42, 0x69, 0x6e, + 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x08, 0x62, 0x69, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, + 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x40, 0x0a, 0x0d, 0x55, 0x6e, 0x62, + 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x08, 0x62, 0x69, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, + 0x74, 0x52, 0x08, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x44, 0x0a, 0x13, 0x47, + 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x2d, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x72, 0x65, 0x74, 0x56, 0x61, + 0x6c, 0x22, 0x79, 0x0a, 0x1f, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x57, 0x69, 0x74, 0x68, 0x48, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2d, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x34, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, - 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, - 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x64, 0x0a, 0x0a, 0x43, 0x6f, 0x6d, 0x70, - 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x42, 0x0a, 0x11, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x10, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x22, 0x60, - 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x34, 0x0a, 0x08, 0x6f, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x22, 0x5f, 0x0a, 0x09, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1c, 0x0a, - 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, - 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x34, 0x0a, 0x08, 0x6f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x22, 0x21, 0x0a, 0x06, 0x54, 0x78, 0x48, 0x61, 0x73, 0x68, 0x12, 0x17, 0x0a, 0x07, 0x74, - 0x78, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x78, - 0x48, 0x61, 0x73, 0x68, 0x22, 0xff, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x12, 0x32, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, - 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, - 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x23, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x32, 0x0a, 0x0a, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, - 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, - 0x2f, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x12, 0x27, 0x0a, 0x07, 0x74, 0x78, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x78, 0x48, 0x61, 0x73, 0x68, 0x48, - 0x00, 0x52, 0x06, 0x74, 0x78, 0x48, 0x61, 0x73, 0x68, 0x42, 0x0b, 0x0a, 0x09, 0x70, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x22, 0x8d, 0x01, 0x0a, 0x05, 0x4c, 0x69, 0x6d, 0x69, 0x74, - 0x12, 0x1b, 0x0a, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x48, 0x00, 0x52, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x38, 0x0a, - 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x44, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x01, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x09, 0x0a, - 0x07, 0x5f, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x69, 0x72, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x68, 0x0a, 0x06, 0x53, 0x6f, 0x72, 0x74, 0x42, 0x79, - 0x12, 0x2b, 0x0a, 0x09, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x08, 0x73, 0x6f, 0x72, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x31, 0x0a, - 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x44, 0x69, 0x72, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x58, 0x0a, 0x0c, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x41, 0x6e, 0x64, 0x53, 0x6f, 0x72, 0x74, - 0x12, 0x25, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x52, - 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x21, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x69, - 0x6d, 0x69, 0x74, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x2a, 0x47, 0x0a, 0x12, 0x43, 0x6f, - 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x12, 0x06, 0x0a, 0x02, 0x45, 0x71, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4e, 0x65, 0x71, 0x10, - 0x01, 0x12, 0x06, 0x0a, 0x02, 0x47, 0x74, 0x10, 0x02, 0x12, 0x06, 0x0a, 0x02, 0x4c, 0x74, 0x10, - 0x03, 0x12, 0x07, 0x0a, 0x03, 0x47, 0x74, 0x65, 0x10, 0x04, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x74, - 0x65, 0x10, 0x05, 0x2a, 0x22, 0x0a, 0x0f, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x4f, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4e, 0x44, 0x10, 0x00, 0x12, - 0x06, 0x0a, 0x02, 0x4f, 0x52, 0x10, 0x01, 0x2a, 0x2c, 0x0a, 0x0a, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x6e, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x72, 0x6d, 0x65, 0x64, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x69, - 0x7a, 0x65, 0x64, 0x10, 0x01, 0x2a, 0x2f, 0x0a, 0x0f, 0x43, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x44, - 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0d, 0x0a, 0x09, 0x50, 0x72, 0x65, 0x63, - 0x65, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x6f, 0x6c, 0x6c, 0x6f, - 0x77, 0x69, 0x6e, 0x67, 0x10, 0x01, 0x2a, 0x22, 0x0a, 0x0d, 0x53, 0x6f, 0x72, 0x74, 0x44, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x73, 0x63, 0x10, 0x00, - 0x12, 0x08, 0x0a, 0x04, 0x44, 0x65, 0x73, 0x63, 0x10, 0x01, 0x2a, 0x3e, 0x0a, 0x08, 0x53, 0x6f, - 0x72, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x6f, 0x72, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x6f, 0x72, - 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x6f, 0x72, 0x74, - 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x10, 0x02, 0x32, 0xe2, 0x02, 0x0a, 0x0e, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x4a, 0x0a, - 0x0e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6c, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x72, 0x65, 0x74, + 0x56, 0x61, 0x6c, 0x12, 0x27, 0x0a, 0x09, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, + 0x61, 0x64, 0x52, 0x08, 0x68, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x22, 0x50, 0x0a, 0x19, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x3d, + 0x0a, 0x0d, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, + 0x2c, 0x0a, 0x09, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x63, 0x65, 0x52, 0x09, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x45, 0x0a, + 0x0e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, + 0x33, 0x0a, 0x09, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x63, 0x65, 0x57, 0x69, 0x74, 0x68, 0x4b, 0x65, 0x79, 0x52, 0x09, 0x73, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x65, 0x73, 0x22, 0x67, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, + 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x08, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x25, 0x0a, 0x05, 0x72, 0x65, 0x61, 0x64, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64, 0x73, 0x22, 0x8b, 0x01, + 0x0a, 0x09, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x72, + 0x65, 0x61, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x72, 0x65, 0x61, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2c, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x33, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x56, 0x61, 0x6c, 0x22, 0x77, 0x0a, 0x13, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x75, 0x6e, + 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x12, 0x2f, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x73, 0x22, 0x79, 0x0a, 0x0f, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, + 0x64, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x64, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x61, 0x64, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x76, + 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x09, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x56, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x50, 0x0a, 0x04, 0x48, 0x65, 0x61, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, + 0x61, 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x22, 0x7d, 0x0a, 0x08, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x27, 0x0a, + 0x0f, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, + 0x43, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, 0x61, 0x64, + 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x12, 0x28, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, + 0x22, 0x96, 0x01, 0x0a, 0x0f, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x57, 0x69, 0x74, + 0x68, 0x4b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, + 0x5f, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, + 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x43, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x0a, + 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x12, 0x28, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x0a, 0x0d, 0x42, 0x6f, 0x75, + 0x6e, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x64, + 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x64, 0x64, + 0x72, 0x65, 0x73, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x54, 0x0a, 0x0e, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, 0x0a, 0x0a, + 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x94, + 0x01, 0x0a, 0x0a, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, + 0x09, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x48, 0x00, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x48, + 0x0a, 0x12, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x11, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x45, 0x78, + 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x65, 0x76, 0x61, 0x6c, + 0x75, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x87, 0x01, 0x0a, 0x11, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, + 0x6e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x62, + 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x6f, 0x6f, + 0x6c, 0x65, 0x61, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x0f, 0x62, 0x6f, + 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x30, 0x0a, + 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x22, + 0x2b, 0x0a, 0x03, 0x41, 0x6e, 0x64, 0x12, 0x24, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x22, 0x2a, 0x0a, 0x02, + 0x4f, 0x72, 0x12, 0x24, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x22, 0x73, 0x0a, 0x0f, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x2a, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x34, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x64, 0x0a, + 0x0a, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x42, 0x0a, 0x11, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, + 0x72, 0x52, 0x10, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x73, 0x22, 0x60, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x21, 0x0a, 0x0c, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, + 0x34, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, + 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x5f, 0x0a, 0x09, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x12, 0x34, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x21, 0x0a, 0x06, 0x54, 0x78, 0x48, 0x61, 0x73, 0x68, + 0x12, 0x17, 0x0a, 0x07, 0x74, 0x78, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x74, 0x78, 0x48, 0x61, 0x73, 0x68, 0x22, 0xff, 0x01, 0x0a, 0x09, 0x50, 0x72, + 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x32, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, + 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x23, 0x0a, 0x05, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x12, 0x32, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, + 0x65, 0x6e, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x27, 0x0a, 0x07, 0x74, 0x78, 0x5f, 0x68, 0x61, 0x73, 0x68, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x78, + 0x48, 0x61, 0x73, 0x68, 0x48, 0x00, 0x52, 0x06, 0x74, 0x78, 0x48, 0x61, 0x73, 0x68, 0x42, 0x0b, + 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x22, 0x8d, 0x01, 0x0a, 0x05, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1b, 0x0a, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x88, + 0x01, 0x01, 0x12, 0x38, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x75, 0x72, + 0x73, 0x6f, 0x72, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x01, 0x52, 0x09, + 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x14, 0x0a, 0x05, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x42, 0x0c, 0x0a, + 0x0a, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x68, 0x0a, 0x06, 0x53, + 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x2b, 0x0a, 0x09, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x74, 0x79, + 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x53, 0x6f, 0x72, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x73, 0x6f, 0x72, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x31, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x6f, 0x72, + 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x58, 0x0a, 0x0c, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x41, 0x6e, + 0x64, 0x53, 0x6f, 0x72, 0x74, 0x12, 0x25, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x6f, + 0x72, 0x74, 0x42, 0x79, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x21, 0x0a, 0x05, + 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x2a, + 0x47, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x69, 0x73, 0x6f, 0x6e, 0x4f, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x06, 0x0a, 0x02, 0x45, 0x71, 0x10, 0x00, 0x12, 0x07, 0x0a, + 0x03, 0x4e, 0x65, 0x71, 0x10, 0x01, 0x12, 0x06, 0x0a, 0x02, 0x47, 0x74, 0x10, 0x02, 0x12, 0x06, + 0x0a, 0x02, 0x4c, 0x74, 0x10, 0x03, 0x12, 0x07, 0x0a, 0x03, 0x47, 0x74, 0x65, 0x10, 0x04, 0x12, + 0x07, 0x0a, 0x03, 0x4c, 0x74, 0x65, 0x10, 0x05, 0x2a, 0x22, 0x0a, 0x0f, 0x42, 0x6f, 0x6f, 0x6c, + 0x65, 0x61, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x41, + 0x4e, 0x44, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x4f, 0x52, 0x10, 0x01, 0x2a, 0x2c, 0x0a, 0x0a, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x6e, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x72, 0x6d, 0x65, 0x64, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, + 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x10, 0x01, 0x2a, 0x2f, 0x0a, 0x0f, 0x43, 0x75, + 0x72, 0x73, 0x6f, 0x72, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0d, 0x0a, + 0x09, 0x50, 0x72, 0x65, 0x63, 0x65, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, + 0x46, 0x6f, 0x6c, 0x6c, 0x6f, 0x77, 0x69, 0x6e, 0x67, 0x10, 0x01, 0x2a, 0x22, 0x0a, 0x0d, 0x53, + 0x6f, 0x72, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, + 0x41, 0x73, 0x63, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x65, 0x73, 0x63, 0x10, 0x01, 0x2a, + 0x3e, 0x0a, 0x08, 0x53, 0x6f, 0x72, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x11, 0x0a, 0x0d, 0x53, + 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x10, 0x00, 0x12, 0x0d, + 0x0a, 0x09, 0x53, 0x6f, 0x72, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x10, 0x01, 0x12, 0x10, 0x0a, + 0x0c, 0x53, 0x6f, 0x72, 0x74, 0x53, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x10, 0x02, 0x32, + 0x83, 0x04, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x12, 0x4a, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x4c, + 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x62, + 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x57, 0x69, 0x74, 0x68, 0x48, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x14, 0x42, 0x61, 0x74, - 0x63, 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x12, 0x21, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, - 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x61, 0x74, 0x63, - 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x38, 0x0a, 0x08, 0x51, 0x75, 0x65, 0x72, 0x79, - 0x4b, 0x65, 0x79, 0x12, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, - 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, - 0x00, 0x12, 0x33, 0x0a, 0x04, 0x42, 0x69, 0x6e, 0x64, 0x12, 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x37, 0x0a, 0x06, 0x55, 0x6e, 0x62, 0x69, 0x6e, 0x64, - 0x12, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x55, 0x6e, 0x62, 0x69, 0x6e, 0x64, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, - 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, - 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, - 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, - 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x75, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x57, + 0x69, 0x74, 0x68, 0x48, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x14, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x21, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x74, + 0x65, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x12, 0x38, 0x0a, 0x08, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x12, 0x15, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x09, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x12, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x33, 0x0a, 0x04, 0x42, 0x69, 0x6e, 0x64, 0x12, + 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x37, 0x0a, 0x06, + 0x55, 0x6e, 0x62, 0x69, 0x6e, 0x64, 0x12, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x55, 0x6e, + 0x62, 0x69, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, + 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, + 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -2286,104 +2630,122 @@ func file_contract_reader_proto_rawDescGZIP() []byte { } var file_contract_reader_proto_enumTypes = make([]protoimpl.EnumInfo, 6) -var file_contract_reader_proto_msgTypes = make([]protoimpl.MessageInfo, 29) +var file_contract_reader_proto_msgTypes = make([]protoimpl.MessageInfo, 34) var file_contract_reader_proto_goTypes = []interface{}{ - (ComparisonOperator)(0), // 0: loop.ComparisonOperator - (BooleanOperator)(0), // 1: loop.BooleanOperator - (Confidence)(0), // 2: loop.Confidence - (CursorDirection)(0), // 3: loop.CursorDirection - (SortDirection)(0), // 4: loop.SortDirection - (SortType)(0), // 5: loop.SortType - (*GetLatestValueRequest)(nil), // 6: loop.GetLatestValueRequest - (*BatchGetLatestValuesRequest)(nil), // 7: loop.BatchGetLatestValuesRequest - (*QueryKeyRequest)(nil), // 8: loop.QueryKeyRequest - (*BindRequest)(nil), // 9: loop.BindRequest - (*UnbindRequest)(nil), // 10: loop.UnbindRequest - (*GetLatestValueReply)(nil), // 11: loop.GetLatestValueReply - (*BatchGetLatestValuesReply)(nil), // 12: loop.BatchGetLatestValuesReply - (*QueryKeyReply)(nil), // 13: loop.QueryKeyReply - (*ContractBatch)(nil), // 14: loop.ContractBatch - (*BatchRead)(nil), // 15: loop.BatchRead - (*ContractBatchResult)(nil), // 16: loop.ContractBatchResult - (*BatchReadResult)(nil), // 17: loop.BatchReadResult - (*Head)(nil), // 18: loop.Head - (*Sequence)(nil), // 19: loop.Sequence - (*BoundContract)(nil), // 20: loop.BoundContract - (*QueryKeyFilter)(nil), // 21: loop.QueryKeyFilter - (*Expression)(nil), // 22: loop.Expression - (*BooleanExpression)(nil), // 23: loop.BooleanExpression - (*And)(nil), // 24: loop.And - (*Or)(nil), // 25: loop.Or - (*ValueComparator)(nil), // 26: loop.ValueComparator - (*Comparator)(nil), // 27: loop.Comparator - (*Block)(nil), // 28: loop.Block - (*Timestamp)(nil), // 29: loop.Timestamp - (*TxHash)(nil), // 30: loop.TxHash - (*Primitive)(nil), // 31: loop.Primitive - (*Limit)(nil), // 32: loop.Limit - (*SortBy)(nil), // 33: loop.SortBy - (*LimitAndSort)(nil), // 34: loop.LimitAndSort - (*VersionedBytes)(nil), // 35: loop.VersionedBytes - (*emptypb.Empty)(nil), // 36: google.protobuf.Empty + (ComparisonOperator)(0), // 0: loop.ComparisonOperator + (BooleanOperator)(0), // 1: loop.BooleanOperator + (Confidence)(0), // 2: loop.Confidence + (CursorDirection)(0), // 3: loop.CursorDirection + (SortDirection)(0), // 4: loop.SortDirection + (SortType)(0), // 5: loop.SortType + (*GetLatestValueRequest)(nil), // 6: loop.GetLatestValueRequest + (*BatchGetLatestValuesRequest)(nil), // 7: loop.BatchGetLatestValuesRequest + (*QueryKeyRequest)(nil), // 8: loop.QueryKeyRequest + (*QueryKeysRequest)(nil), // 9: loop.QueryKeysRequest + (*ContractKeyFilter)(nil), // 10: loop.ContractKeyFilter + (*BindRequest)(nil), // 11: loop.BindRequest + (*UnbindRequest)(nil), // 12: loop.UnbindRequest + (*GetLatestValueReply)(nil), // 13: loop.GetLatestValueReply + (*GetLatestValueWithHeadDataReply)(nil), // 14: loop.GetLatestValueWithHeadDataReply + (*BatchGetLatestValuesReply)(nil), // 15: loop.BatchGetLatestValuesReply + (*QueryKeyReply)(nil), // 16: loop.QueryKeyReply + (*QueryKeysReply)(nil), // 17: loop.QueryKeysReply + (*ContractBatch)(nil), // 18: loop.ContractBatch + (*BatchRead)(nil), // 19: loop.BatchRead + (*ContractBatchResult)(nil), // 20: loop.ContractBatchResult + (*BatchReadResult)(nil), // 21: loop.BatchReadResult + (*Head)(nil), // 22: loop.Head + (*Sequence)(nil), // 23: loop.Sequence + (*SequenceWithKey)(nil), // 24: loop.SequenceWithKey + (*BoundContract)(nil), // 25: loop.BoundContract + (*QueryKeyFilter)(nil), // 26: loop.QueryKeyFilter + (*Expression)(nil), // 27: loop.Expression + (*BooleanExpression)(nil), // 28: loop.BooleanExpression + (*And)(nil), // 29: loop.And + (*Or)(nil), // 30: loop.Or + (*ValueComparator)(nil), // 31: loop.ValueComparator + (*Comparator)(nil), // 32: loop.Comparator + (*Block)(nil), // 33: loop.Block + (*Timestamp)(nil), // 34: loop.Timestamp + (*TxHash)(nil), // 35: loop.TxHash + (*Primitive)(nil), // 36: loop.Primitive + (*Limit)(nil), // 37: loop.Limit + (*SortBy)(nil), // 38: loop.SortBy + (*LimitAndSort)(nil), // 39: loop.LimitAndSort + (*VersionedBytes)(nil), // 40: loop.VersionedBytes + (*emptypb.Empty)(nil), // 41: google.protobuf.Empty } var file_contract_reader_proto_depIdxs = []int32{ 2, // 0: loop.GetLatestValueRequest.confidence:type_name -> loop.Confidence - 35, // 1: loop.GetLatestValueRequest.params:type_name -> loop.VersionedBytes - 14, // 2: loop.BatchGetLatestValuesRequest.requests:type_name -> loop.ContractBatch - 20, // 3: loop.QueryKeyRequest.contract:type_name -> loop.BoundContract - 21, // 4: loop.QueryKeyRequest.filter:type_name -> loop.QueryKeyFilter - 34, // 5: loop.QueryKeyRequest.limit_and_sort:type_name -> loop.LimitAndSort - 20, // 6: loop.BindRequest.bindings:type_name -> loop.BoundContract - 20, // 7: loop.UnbindRequest.bindings:type_name -> loop.BoundContract - 35, // 8: loop.GetLatestValueReply.ret_val:type_name -> loop.VersionedBytes - 16, // 9: loop.BatchGetLatestValuesReply.results:type_name -> loop.ContractBatchResult - 19, // 10: loop.QueryKeyReply.sequences:type_name -> loop.Sequence - 20, // 11: loop.ContractBatch.contract:type_name -> loop.BoundContract - 15, // 12: loop.ContractBatch.reads:type_name -> loop.BatchRead - 35, // 13: loop.BatchRead.params:type_name -> loop.VersionedBytes - 35, // 14: loop.BatchRead.return_val:type_name -> loop.VersionedBytes - 20, // 15: loop.ContractBatchResult.contract:type_name -> loop.BoundContract - 17, // 16: loop.ContractBatchResult.results:type_name -> loop.BatchReadResult - 35, // 17: loop.BatchReadResult.return_val:type_name -> loop.VersionedBytes - 18, // 18: loop.Sequence.head:type_name -> loop.Head - 35, // 19: loop.Sequence.data:type_name -> loop.VersionedBytes - 22, // 20: loop.QueryKeyFilter.expression:type_name -> loop.Expression - 31, // 21: loop.Expression.primitive:type_name -> loop.Primitive - 23, // 22: loop.Expression.boolean_expression:type_name -> loop.BooleanExpression - 1, // 23: loop.BooleanExpression.boolean_operator:type_name -> loop.BooleanOperator - 22, // 24: loop.BooleanExpression.expression:type_name -> loop.Expression - 22, // 25: loop.And.expr:type_name -> loop.Expression - 22, // 26: loop.Or.expr:type_name -> loop.Expression - 35, // 27: loop.ValueComparator.value:type_name -> loop.VersionedBytes - 0, // 28: loop.ValueComparator.operator:type_name -> loop.ComparisonOperator - 26, // 29: loop.Comparator.value_comparators:type_name -> loop.ValueComparator - 0, // 30: loop.Block.operator:type_name -> loop.ComparisonOperator - 0, // 31: loop.Timestamp.operator:type_name -> loop.ComparisonOperator - 27, // 32: loop.Primitive.comparator:type_name -> loop.Comparator - 28, // 33: loop.Primitive.block:type_name -> loop.Block - 2, // 34: loop.Primitive.confidence:type_name -> loop.Confidence - 29, // 35: loop.Primitive.timestamp:type_name -> loop.Timestamp - 30, // 36: loop.Primitive.tx_hash:type_name -> loop.TxHash - 3, // 37: loop.Limit.direction:type_name -> loop.CursorDirection - 5, // 38: loop.SortBy.sort_type:type_name -> loop.SortType - 4, // 39: loop.SortBy.direction:type_name -> loop.SortDirection - 33, // 40: loop.LimitAndSort.sort_by:type_name -> loop.SortBy - 32, // 41: loop.LimitAndSort.limit:type_name -> loop.Limit - 6, // 42: loop.ContractReader.GetLatestValue:input_type -> loop.GetLatestValueRequest - 7, // 43: loop.ContractReader.BatchGetLatestValues:input_type -> loop.BatchGetLatestValuesRequest - 8, // 44: loop.ContractReader.QueryKey:input_type -> loop.QueryKeyRequest - 9, // 45: loop.ContractReader.Bind:input_type -> loop.BindRequest - 10, // 46: loop.ContractReader.Unbind:input_type -> loop.UnbindRequest - 11, // 47: loop.ContractReader.GetLatestValue:output_type -> loop.GetLatestValueReply - 12, // 48: loop.ContractReader.BatchGetLatestValues:output_type -> loop.BatchGetLatestValuesReply - 13, // 49: loop.ContractReader.QueryKey:output_type -> loop.QueryKeyReply - 36, // 50: loop.ContractReader.Bind:output_type -> google.protobuf.Empty - 36, // 51: loop.ContractReader.Unbind:output_type -> google.protobuf.Empty - 47, // [47:52] is the sub-list for method output_type - 42, // [42:47] is the sub-list for method input_type - 42, // [42:42] is the sub-list for extension type_name - 42, // [42:42] is the sub-list for extension extendee - 0, // [0:42] is the sub-list for field type_name + 40, // 1: loop.GetLatestValueRequest.params:type_name -> loop.VersionedBytes + 18, // 2: loop.BatchGetLatestValuesRequest.requests:type_name -> loop.ContractBatch + 25, // 3: loop.QueryKeyRequest.contract:type_name -> loop.BoundContract + 26, // 4: loop.QueryKeyRequest.filter:type_name -> loop.QueryKeyFilter + 39, // 5: loop.QueryKeyRequest.limit_and_sort:type_name -> loop.LimitAndSort + 10, // 6: loop.QueryKeysRequest.filters:type_name -> loop.ContractKeyFilter + 39, // 7: loop.QueryKeysRequest.limit_and_sort:type_name -> loop.LimitAndSort + 25, // 8: loop.ContractKeyFilter.contract:type_name -> loop.BoundContract + 26, // 9: loop.ContractKeyFilter.filter:type_name -> loop.QueryKeyFilter + 25, // 10: loop.BindRequest.bindings:type_name -> loop.BoundContract + 25, // 11: loop.UnbindRequest.bindings:type_name -> loop.BoundContract + 40, // 12: loop.GetLatestValueReply.ret_val:type_name -> loop.VersionedBytes + 40, // 13: loop.GetLatestValueWithHeadDataReply.ret_val:type_name -> loop.VersionedBytes + 22, // 14: loop.GetLatestValueWithHeadDataReply.head_data:type_name -> loop.Head + 20, // 15: loop.BatchGetLatestValuesReply.results:type_name -> loop.ContractBatchResult + 23, // 16: loop.QueryKeyReply.sequences:type_name -> loop.Sequence + 24, // 17: loop.QueryKeysReply.sequences:type_name -> loop.SequenceWithKey + 25, // 18: loop.ContractBatch.contract:type_name -> loop.BoundContract + 19, // 19: loop.ContractBatch.reads:type_name -> loop.BatchRead + 40, // 20: loop.BatchRead.params:type_name -> loop.VersionedBytes + 40, // 21: loop.BatchRead.return_val:type_name -> loop.VersionedBytes + 25, // 22: loop.ContractBatchResult.contract:type_name -> loop.BoundContract + 21, // 23: loop.ContractBatchResult.results:type_name -> loop.BatchReadResult + 40, // 24: loop.BatchReadResult.return_val:type_name -> loop.VersionedBytes + 22, // 25: loop.Sequence.head:type_name -> loop.Head + 40, // 26: loop.Sequence.data:type_name -> loop.VersionedBytes + 22, // 27: loop.SequenceWithKey.head:type_name -> loop.Head + 40, // 28: loop.SequenceWithKey.data:type_name -> loop.VersionedBytes + 27, // 29: loop.QueryKeyFilter.expression:type_name -> loop.Expression + 36, // 30: loop.Expression.primitive:type_name -> loop.Primitive + 28, // 31: loop.Expression.boolean_expression:type_name -> loop.BooleanExpression + 1, // 32: loop.BooleanExpression.boolean_operator:type_name -> loop.BooleanOperator + 27, // 33: loop.BooleanExpression.expression:type_name -> loop.Expression + 27, // 34: loop.And.expr:type_name -> loop.Expression + 27, // 35: loop.Or.expr:type_name -> loop.Expression + 40, // 36: loop.ValueComparator.value:type_name -> loop.VersionedBytes + 0, // 37: loop.ValueComparator.operator:type_name -> loop.ComparisonOperator + 31, // 38: loop.Comparator.value_comparators:type_name -> loop.ValueComparator + 0, // 39: loop.Block.operator:type_name -> loop.ComparisonOperator + 0, // 40: loop.Timestamp.operator:type_name -> loop.ComparisonOperator + 32, // 41: loop.Primitive.comparator:type_name -> loop.Comparator + 33, // 42: loop.Primitive.block:type_name -> loop.Block + 2, // 43: loop.Primitive.confidence:type_name -> loop.Confidence + 34, // 44: loop.Primitive.timestamp:type_name -> loop.Timestamp + 35, // 45: loop.Primitive.tx_hash:type_name -> loop.TxHash + 3, // 46: loop.Limit.direction:type_name -> loop.CursorDirection + 5, // 47: loop.SortBy.sort_type:type_name -> loop.SortType + 4, // 48: loop.SortBy.direction:type_name -> loop.SortDirection + 38, // 49: loop.LimitAndSort.sort_by:type_name -> loop.SortBy + 37, // 50: loop.LimitAndSort.limit:type_name -> loop.Limit + 6, // 51: loop.ContractReader.GetLatestValue:input_type -> loop.GetLatestValueRequest + 6, // 52: loop.ContractReader.GetLatestValueWithHeadData:input_type -> loop.GetLatestValueRequest + 7, // 53: loop.ContractReader.BatchGetLatestValues:input_type -> loop.BatchGetLatestValuesRequest + 8, // 54: loop.ContractReader.QueryKey:input_type -> loop.QueryKeyRequest + 9, // 55: loop.ContractReader.QueryKeys:input_type -> loop.QueryKeysRequest + 11, // 56: loop.ContractReader.Bind:input_type -> loop.BindRequest + 12, // 57: loop.ContractReader.Unbind:input_type -> loop.UnbindRequest + 13, // 58: loop.ContractReader.GetLatestValue:output_type -> loop.GetLatestValueReply + 14, // 59: loop.ContractReader.GetLatestValueWithHeadData:output_type -> loop.GetLatestValueWithHeadDataReply + 15, // 60: loop.ContractReader.BatchGetLatestValues:output_type -> loop.BatchGetLatestValuesReply + 16, // 61: loop.ContractReader.QueryKey:output_type -> loop.QueryKeyReply + 17, // 62: loop.ContractReader.QueryKeys:output_type -> loop.QueryKeysReply + 41, // 63: loop.ContractReader.Bind:output_type -> google.protobuf.Empty + 41, // 64: loop.ContractReader.Unbind:output_type -> google.protobuf.Empty + 58, // [58:65] is the sub-list for method output_type + 51, // [51:58] is the sub-list for method input_type + 51, // [51:51] is the sub-list for extension type_name + 51, // [51:51] is the sub-list for extension extendee + 0, // [0:51] is the sub-list for field type_name } func init() { file_contract_reader_proto_init() } @@ -2430,7 +2792,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BindRequest); i { + switch v := v.(*QueryKeysRequest); i { case 0: return &v.state case 1: @@ -2442,7 +2804,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UnbindRequest); i { + switch v := v.(*ContractKeyFilter); i { case 0: return &v.state case 1: @@ -2454,7 +2816,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetLatestValueReply); i { + switch v := v.(*BindRequest); i { case 0: return &v.state case 1: @@ -2466,7 +2828,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BatchGetLatestValuesReply); i { + switch v := v.(*UnbindRequest); i { case 0: return &v.state case 1: @@ -2478,7 +2840,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QueryKeyReply); i { + switch v := v.(*GetLatestValueReply); i { case 0: return &v.state case 1: @@ -2490,7 +2852,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ContractBatch); i { + switch v := v.(*GetLatestValueWithHeadDataReply); i { case 0: return &v.state case 1: @@ -2502,7 +2864,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BatchRead); i { + switch v := v.(*BatchGetLatestValuesReply); i { case 0: return &v.state case 1: @@ -2514,7 +2876,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ContractBatchResult); i { + switch v := v.(*QueryKeyReply); i { case 0: return &v.state case 1: @@ -2526,7 +2888,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BatchReadResult); i { + switch v := v.(*QueryKeysReply); i { case 0: return &v.state case 1: @@ -2538,7 +2900,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Head); i { + switch v := v.(*ContractBatch); i { case 0: return &v.state case 1: @@ -2550,7 +2912,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Sequence); i { + switch v := v.(*BatchRead); i { case 0: return &v.state case 1: @@ -2562,7 +2924,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BoundContract); i { + switch v := v.(*ContractBatchResult); i { case 0: return &v.state case 1: @@ -2574,7 +2936,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QueryKeyFilter); i { + switch v := v.(*BatchReadResult); i { case 0: return &v.state case 1: @@ -2586,7 +2948,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Expression); i { + switch v := v.(*Head); i { case 0: return &v.state case 1: @@ -2598,7 +2960,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BooleanExpression); i { + switch v := v.(*Sequence); i { case 0: return &v.state case 1: @@ -2610,7 +2972,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*And); i { + switch v := v.(*SequenceWithKey); i { case 0: return &v.state case 1: @@ -2622,7 +2984,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Or); i { + switch v := v.(*BoundContract); i { case 0: return &v.state case 1: @@ -2634,7 +2996,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValueComparator); i { + switch v := v.(*QueryKeyFilter); i { case 0: return &v.state case 1: @@ -2646,7 +3008,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Comparator); i { + switch v := v.(*Expression); i { case 0: return &v.state case 1: @@ -2658,7 +3020,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Block); i { + switch v := v.(*BooleanExpression); i { case 0: return &v.state case 1: @@ -2670,7 +3032,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Timestamp); i { + switch v := v.(*And); i { case 0: return &v.state case 1: @@ -2682,7 +3044,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TxHash); i { + switch v := v.(*Or); i { case 0: return &v.state case 1: @@ -2694,7 +3056,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Primitive); i { + switch v := v.(*ValueComparator); i { case 0: return &v.state case 1: @@ -2706,7 +3068,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Limit); i { + switch v := v.(*Comparator); i { case 0: return &v.state case 1: @@ -2718,7 +3080,7 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SortBy); i { + switch v := v.(*Block); i { case 0: return &v.state case 1: @@ -2730,6 +3092,66 @@ func file_contract_reader_proto_init() { } } file_contract_reader_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Timestamp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_contract_reader_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TxHash); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_contract_reader_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Primitive); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_contract_reader_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Limit); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_contract_reader_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SortBy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_contract_reader_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*LimitAndSort); i { case 0: return &v.state @@ -2742,25 +3164,25 @@ func file_contract_reader_proto_init() { } } } - file_contract_reader_proto_msgTypes[16].OneofWrappers = []interface{}{ + file_contract_reader_proto_msgTypes[21].OneofWrappers = []interface{}{ (*Expression_Primitive)(nil), (*Expression_BooleanExpression)(nil), } - file_contract_reader_proto_msgTypes[25].OneofWrappers = []interface{}{ + file_contract_reader_proto_msgTypes[30].OneofWrappers = []interface{}{ (*Primitive_Comparator)(nil), (*Primitive_Block)(nil), (*Primitive_Confidence)(nil), (*Primitive_Timestamp)(nil), (*Primitive_TxHash)(nil), } - file_contract_reader_proto_msgTypes[26].OneofWrappers = []interface{}{} + file_contract_reader_proto_msgTypes[31].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_contract_reader_proto_rawDesc, NumEnums: 6, - NumMessages: 29, + NumMessages: 34, NumExtensions: 0, NumServices: 1, }, diff --git a/pkg/loop/internal/pb/contract_reader.proto b/pkg/loop/internal/pb/contract_reader.proto index 713245233..8987c0086 100644 --- a/pkg/loop/internal/pb/contract_reader.proto +++ b/pkg/loop/internal/pb/contract_reader.proto @@ -9,8 +9,10 @@ import "google/protobuf/empty.proto"; service ContractReader { rpc GetLatestValue (GetLatestValueRequest) returns (GetLatestValueReply) {} + rpc GetLatestValueWithHeadData (GetLatestValueRequest) returns (GetLatestValueWithHeadDataReply) {} rpc BatchGetLatestValues (BatchGetLatestValuesRequest) returns (BatchGetLatestValuesReply) {} rpc QueryKey(QueryKeyRequest) returns (QueryKeyReply) {} + rpc QueryKeys(QueryKeysRequest) returns (QueryKeysReply) {} rpc Bind(BindRequest) returns (google.protobuf.Empty) {} rpc Unbind(UnbindRequest) returns (google.protobuf.Empty) {} } @@ -36,6 +38,18 @@ message QueryKeyRequest { bool as_value_type = 4; } +// QueryKeysRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.QueryKeys]. +message QueryKeysRequest { + repeated ContractKeyFilter filters = 1; + LimitAndSort limit_and_sort = 2; +} + +message ContractKeyFilter { + BoundContract contract = 1; + QueryKeyFilter filter = 2; + bool as_value_type = 4; +} + // BindRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.Bind]. message BindRequest { repeated BoundContract bindings = 1; @@ -51,6 +65,13 @@ message GetLatestValueReply { VersionedBytes ret_val = 1; } + +// GetLatestValueWithHeadDataReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.GetLatestValueWithHeadData]. +message GetLatestValueWithHeadDataReply { + VersionedBytes ret_val = 1; + Head head_data = 2; +} + // BatchGetLatestValuesReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.BatchGetLatestValues]. message BatchGetLatestValuesReply { repeated ContractBatchResult results = 1; @@ -61,6 +82,11 @@ message QueryKeyReply { repeated Sequence sequences = 1; } +// QueryKeysReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.QueryKeys]. +message QueryKeysReply { + repeated SequenceWithKey sequences = 1; +} + // ContractBatch is gRPC adapter for the BatchGetLatestValuesRequest struct map value [github.com/smartcontractkit/chainlink-common/pkg/types.ContractReader.BatchGetLatestValuesRequest]. message ContractBatch { BoundContract contract = 1; @@ -101,6 +127,13 @@ message Sequence { VersionedBytes data = 3; } +message SequenceWithKey { + string sequence_cursor = 1; + Head head = 2; + VersionedBytes data = 3; + string key = 4; +} + // BoundContract represents a [github.com/smartcontractkit/chainlink-common/pkg/types.BoundContract]. message BoundContract { string address = 1; diff --git a/pkg/loop/internal/pb/contract_reader_grpc.pb.go b/pkg/loop/internal/pb/contract_reader_grpc.pb.go index 67e2f967b..c8f8d5414 100644 --- a/pkg/loop/internal/pb/contract_reader_grpc.pb.go +++ b/pkg/loop/internal/pb/contract_reader_grpc.pb.go @@ -20,11 +20,13 @@ import ( const _ = grpc.SupportPackageIsVersion7 const ( - ContractReader_GetLatestValue_FullMethodName = "/loop.ContractReader/GetLatestValue" - ContractReader_BatchGetLatestValues_FullMethodName = "/loop.ContractReader/BatchGetLatestValues" - ContractReader_QueryKey_FullMethodName = "/loop.ContractReader/QueryKey" - ContractReader_Bind_FullMethodName = "/loop.ContractReader/Bind" - ContractReader_Unbind_FullMethodName = "/loop.ContractReader/Unbind" + ContractReader_GetLatestValue_FullMethodName = "/loop.ContractReader/GetLatestValue" + ContractReader_GetLatestValueWithHeadData_FullMethodName = "/loop.ContractReader/GetLatestValueWithHeadData" + ContractReader_BatchGetLatestValues_FullMethodName = "/loop.ContractReader/BatchGetLatestValues" + ContractReader_QueryKey_FullMethodName = "/loop.ContractReader/QueryKey" + ContractReader_QueryKeys_FullMethodName = "/loop.ContractReader/QueryKeys" + ContractReader_Bind_FullMethodName = "/loop.ContractReader/Bind" + ContractReader_Unbind_FullMethodName = "/loop.ContractReader/Unbind" ) // ContractReaderClient is the client API for ContractReader service. @@ -32,8 +34,10 @@ const ( // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. type ContractReaderClient interface { GetLatestValue(ctx context.Context, in *GetLatestValueRequest, opts ...grpc.CallOption) (*GetLatestValueReply, error) + GetLatestValueWithHeadData(ctx context.Context, in *GetLatestValueRequest, opts ...grpc.CallOption) (*GetLatestValueWithHeadDataReply, error) BatchGetLatestValues(ctx context.Context, in *BatchGetLatestValuesRequest, opts ...grpc.CallOption) (*BatchGetLatestValuesReply, error) QueryKey(ctx context.Context, in *QueryKeyRequest, opts ...grpc.CallOption) (*QueryKeyReply, error) + QueryKeys(ctx context.Context, in *QueryKeysRequest, opts ...grpc.CallOption) (*QueryKeysReply, error) Bind(ctx context.Context, in *BindRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) Unbind(ctx context.Context, in *UnbindRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) } @@ -55,6 +59,15 @@ func (c *contractReaderClient) GetLatestValue(ctx context.Context, in *GetLatest return out, nil } +func (c *contractReaderClient) GetLatestValueWithHeadData(ctx context.Context, in *GetLatestValueRequest, opts ...grpc.CallOption) (*GetLatestValueWithHeadDataReply, error) { + out := new(GetLatestValueWithHeadDataReply) + err := c.cc.Invoke(ctx, ContractReader_GetLatestValueWithHeadData_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *contractReaderClient) BatchGetLatestValues(ctx context.Context, in *BatchGetLatestValuesRequest, opts ...grpc.CallOption) (*BatchGetLatestValuesReply, error) { out := new(BatchGetLatestValuesReply) err := c.cc.Invoke(ctx, ContractReader_BatchGetLatestValues_FullMethodName, in, out, opts...) @@ -73,6 +86,15 @@ func (c *contractReaderClient) QueryKey(ctx context.Context, in *QueryKeyRequest return out, nil } +func (c *contractReaderClient) QueryKeys(ctx context.Context, in *QueryKeysRequest, opts ...grpc.CallOption) (*QueryKeysReply, error) { + out := new(QueryKeysReply) + err := c.cc.Invoke(ctx, ContractReader_QueryKeys_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *contractReaderClient) Bind(ctx context.Context, in *BindRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { out := new(emptypb.Empty) err := c.cc.Invoke(ctx, ContractReader_Bind_FullMethodName, in, out, opts...) @@ -96,8 +118,10 @@ func (c *contractReaderClient) Unbind(ctx context.Context, in *UnbindRequest, op // for forward compatibility type ContractReaderServer interface { GetLatestValue(context.Context, *GetLatestValueRequest) (*GetLatestValueReply, error) + GetLatestValueWithHeadData(context.Context, *GetLatestValueRequest) (*GetLatestValueWithHeadDataReply, error) BatchGetLatestValues(context.Context, *BatchGetLatestValuesRequest) (*BatchGetLatestValuesReply, error) QueryKey(context.Context, *QueryKeyRequest) (*QueryKeyReply, error) + QueryKeys(context.Context, *QueryKeysRequest) (*QueryKeysReply, error) Bind(context.Context, *BindRequest) (*emptypb.Empty, error) Unbind(context.Context, *UnbindRequest) (*emptypb.Empty, error) mustEmbedUnimplementedContractReaderServer() @@ -110,12 +134,18 @@ type UnimplementedContractReaderServer struct { func (UnimplementedContractReaderServer) GetLatestValue(context.Context, *GetLatestValueRequest) (*GetLatestValueReply, error) { return nil, status.Errorf(codes.Unimplemented, "method GetLatestValue not implemented") } +func (UnimplementedContractReaderServer) GetLatestValueWithHeadData(context.Context, *GetLatestValueRequest) (*GetLatestValueWithHeadDataReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetLatestValueWithHeadData not implemented") +} func (UnimplementedContractReaderServer) BatchGetLatestValues(context.Context, *BatchGetLatestValuesRequest) (*BatchGetLatestValuesReply, error) { return nil, status.Errorf(codes.Unimplemented, "method BatchGetLatestValues not implemented") } func (UnimplementedContractReaderServer) QueryKey(context.Context, *QueryKeyRequest) (*QueryKeyReply, error) { return nil, status.Errorf(codes.Unimplemented, "method QueryKey not implemented") } +func (UnimplementedContractReaderServer) QueryKeys(context.Context, *QueryKeysRequest) (*QueryKeysReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryKeys not implemented") +} func (UnimplementedContractReaderServer) Bind(context.Context, *BindRequest) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method Bind not implemented") } @@ -153,6 +183,24 @@ func _ContractReader_GetLatestValue_Handler(srv interface{}, ctx context.Context return interceptor(ctx, in, info, handler) } +func _ContractReader_GetLatestValueWithHeadData_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLatestValueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContractReaderServer).GetLatestValueWithHeadData(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ContractReader_GetLatestValueWithHeadData_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContractReaderServer).GetLatestValueWithHeadData(ctx, req.(*GetLatestValueRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _ContractReader_BatchGetLatestValues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(BatchGetLatestValuesRequest) if err := dec(in); err != nil { @@ -189,6 +237,24 @@ func _ContractReader_QueryKey_Handler(srv interface{}, ctx context.Context, dec return interceptor(ctx, in, info, handler) } +func _ContractReader_QueryKeys_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryKeysRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContractReaderServer).QueryKeys(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ContractReader_QueryKeys_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContractReaderServer).QueryKeys(ctx, req.(*QueryKeysRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _ContractReader_Bind_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(BindRequest) if err := dec(in); err != nil { @@ -236,6 +302,10 @@ var ContractReader_ServiceDesc = grpc.ServiceDesc{ MethodName: "GetLatestValue", Handler: _ContractReader_GetLatestValue_Handler, }, + { + MethodName: "GetLatestValueWithHeadData", + Handler: _ContractReader_GetLatestValueWithHeadData_Handler, + }, { MethodName: "BatchGetLatestValues", Handler: _ContractReader_BatchGetLatestValues_Handler, @@ -244,6 +314,10 @@ var ContractReader_ServiceDesc = grpc.ServiceDesc{ MethodName: "QueryKey", Handler: _ContractReader_QueryKey_Handler, }, + { + MethodName: "QueryKeys", + Handler: _ContractReader_QueryKeys_Handler, + }, { MethodName: "Bind", Handler: _ContractReader_Bind_Handler, diff --git a/pkg/loop/internal/pb/chain_writer.pb.go b/pkg/loop/internal/pb/contract_writer.pb.go similarity index 55% rename from pkg/loop/internal/pb/chain_writer.pb.go rename to pkg/loop/internal/pb/contract_writer.pb.go index df8b7f7a2..b43891cb2 100644 --- a/pkg/loop/internal/pb/chain_writer.pb.go +++ b/pkg/loop/internal/pb/contract_writer.pb.go @@ -2,7 +2,7 @@ // versions: // protoc-gen-go v1.31.0 // protoc v4.25.1 -// source: chain_writer.proto +// source: contract_writer.proto package pb @@ -65,11 +65,11 @@ func (x TransactionStatus) String() string { } func (TransactionStatus) Descriptor() protoreflect.EnumDescriptor { - return file_chain_writer_proto_enumTypes[0].Descriptor() + return file_contract_writer_proto_enumTypes[0].Descriptor() } func (TransactionStatus) Type() protoreflect.EnumType { - return &file_chain_writer_proto_enumTypes[0] + return &file_contract_writer_proto_enumTypes[0] } func (x TransactionStatus) Number() protoreflect.EnumNumber { @@ -78,7 +78,7 @@ func (x TransactionStatus) Number() protoreflect.EnumNumber { // Deprecated: Use TransactionStatus.Descriptor instead. func (TransactionStatus) EnumDescriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{0} + return file_contract_writer_proto_rawDescGZIP(), []int{0} } type SubmitTransactionRequest struct { @@ -98,7 +98,7 @@ type SubmitTransactionRequest struct { func (x *SubmitTransactionRequest) Reset() { *x = SubmitTransactionRequest{} if protoimpl.UnsafeEnabled { - mi := &file_chain_writer_proto_msgTypes[0] + mi := &file_contract_writer_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -111,7 +111,7 @@ func (x *SubmitTransactionRequest) String() string { func (*SubmitTransactionRequest) ProtoMessage() {} func (x *SubmitTransactionRequest) ProtoReflect() protoreflect.Message { - mi := &file_chain_writer_proto_msgTypes[0] + mi := &file_contract_writer_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -124,7 +124,7 @@ func (x *SubmitTransactionRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use SubmitTransactionRequest.ProtoReflect.Descriptor instead. func (*SubmitTransactionRequest) Descriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{0} + return file_contract_writer_proto_rawDescGZIP(), []int{0} } func (x *SubmitTransactionRequest) GetContractName() string { @@ -188,7 +188,7 @@ type TransactionMeta struct { func (x *TransactionMeta) Reset() { *x = TransactionMeta{} if protoimpl.UnsafeEnabled { - mi := &file_chain_writer_proto_msgTypes[1] + mi := &file_contract_writer_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -201,7 +201,7 @@ func (x *TransactionMeta) String() string { func (*TransactionMeta) ProtoMessage() {} func (x *TransactionMeta) ProtoReflect() protoreflect.Message { - mi := &file_chain_writer_proto_msgTypes[1] + mi := &file_contract_writer_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -214,7 +214,7 @@ func (x *TransactionMeta) ProtoReflect() protoreflect.Message { // Deprecated: Use TransactionMeta.ProtoReflect.Descriptor instead. func (*TransactionMeta) Descriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{1} + return file_contract_writer_proto_rawDescGZIP(), []int{1} } func (x *TransactionMeta) GetWorkflowExecutionId() string { @@ -231,7 +231,7 @@ func (x *TransactionMeta) GetGasLimit() *BigInt { return nil } -// GetTransactionStatusRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetTransactionStatus]. +// GetTransactionStatusRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetTransactionStatus]. type GetTransactionStatusRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -243,7 +243,7 @@ type GetTransactionStatusRequest struct { func (x *GetTransactionStatusRequest) Reset() { *x = GetTransactionStatusRequest{} if protoimpl.UnsafeEnabled { - mi := &file_chain_writer_proto_msgTypes[2] + mi := &file_contract_writer_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -256,7 +256,7 @@ func (x *GetTransactionStatusRequest) String() string { func (*GetTransactionStatusRequest) ProtoMessage() {} func (x *GetTransactionStatusRequest) ProtoReflect() protoreflect.Message { - mi := &file_chain_writer_proto_msgTypes[2] + mi := &file_contract_writer_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -269,7 +269,7 @@ func (x *GetTransactionStatusRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTransactionStatusRequest.ProtoReflect.Descriptor instead. func (*GetTransactionStatusRequest) Descriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{2} + return file_contract_writer_proto_rawDescGZIP(), []int{2} } func (x *GetTransactionStatusRequest) GetTransactionId() string { @@ -279,7 +279,7 @@ func (x *GetTransactionStatusRequest) GetTransactionId() string { return "" } -// GetTransactionStatusReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetTransactionStatus]. +// GetTransactionStatusReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetTransactionStatus]. type GetTransactionStatusReply struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -291,7 +291,7 @@ type GetTransactionStatusReply struct { func (x *GetTransactionStatusReply) Reset() { *x = GetTransactionStatusReply{} if protoimpl.UnsafeEnabled { - mi := &file_chain_writer_proto_msgTypes[3] + mi := &file_contract_writer_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -304,7 +304,7 @@ func (x *GetTransactionStatusReply) String() string { func (*GetTransactionStatusReply) ProtoMessage() {} func (x *GetTransactionStatusReply) ProtoReflect() protoreflect.Message { - mi := &file_chain_writer_proto_msgTypes[3] + mi := &file_contract_writer_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -317,7 +317,7 @@ func (x *GetTransactionStatusReply) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTransactionStatusReply.ProtoReflect.Descriptor instead. func (*GetTransactionStatusReply) Descriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{3} + return file_contract_writer_proto_rawDescGZIP(), []int{3} } func (x *GetTransactionStatusReply) GetTransactionStatus() TransactionStatus { @@ -327,7 +327,7 @@ func (x *GetTransactionStatusReply) GetTransactionStatus() TransactionStatus { return TransactionStatus_TRANSACTION_STATUS_UNKNOWN } -// GetFeeComponentsReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetFeeComponents]. +// GetFeeComponentsReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetFeeComponents]. type GetFeeComponentsReply struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -340,7 +340,7 @@ type GetFeeComponentsReply struct { func (x *GetFeeComponentsReply) Reset() { *x = GetFeeComponentsReply{} if protoimpl.UnsafeEnabled { - mi := &file_chain_writer_proto_msgTypes[4] + mi := &file_contract_writer_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -353,7 +353,7 @@ func (x *GetFeeComponentsReply) String() string { func (*GetFeeComponentsReply) ProtoMessage() {} func (x *GetFeeComponentsReply) ProtoReflect() protoreflect.Message { - mi := &file_chain_writer_proto_msgTypes[4] + mi := &file_contract_writer_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -366,7 +366,7 @@ func (x *GetFeeComponentsReply) ProtoReflect() protoreflect.Message { // Deprecated: Use GetFeeComponentsReply.ProtoReflect.Descriptor instead. func (*GetFeeComponentsReply) Descriptor() ([]byte, []int) { - return file_chain_writer_proto_rawDescGZIP(), []int{4} + return file_contract_writer_proto_rawDescGZIP(), []int{4} } func (x *GetFeeComponentsReply) GetExecutionFee() *BigInt { @@ -383,111 +383,111 @@ func (x *GetFeeComponentsReply) GetDataAvailabilityFee() *BigInt { return nil } -var File_chain_writer_proto protoreflect.FileDescriptor - -var file_chain_writer_proto_rawDesc = []byte{ - 0x0a, 0x12, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x5f, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x6c, 0x6f, 0x6f, 0x70, 0x1a, 0x0b, 0x63, 0x6f, 0x64, 0x65, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0d, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0x9a, 0x02, 0x0a, 0x18, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, - 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x2c, 0x0a, - 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x42, 0x79, - 0x74, 0x65, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x6f, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, - 0x73, 0x12, 0x29, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x70, 0x0a, 0x0f, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, - 0x65, 0x74, 0x61, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x09, 0x67, 0x61, 0x73, 0x5f, 0x6c, - 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x08, 0x67, 0x61, 0x73, 0x4c, 0x69, 0x6d, - 0x69, 0x74, 0x22, 0x44, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x63, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x54, +var File_contract_writer_proto protoreflect.FileDescriptor + +var file_contract_writer_proto_rawDesc = []byte{ + 0x0a, 0x15, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5f, 0x77, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x6c, 0x6f, 0x6f, 0x70, 0x1a, 0x0b, 0x63, + 0x6f, 0x64, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0d, 0x72, 0x65, 0x6c, 0x61, + 0x79, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9a, 0x02, 0x0a, 0x18, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, + 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, + 0x12, 0x2c, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x25, + 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x5f, 0x61, 0x64, 0x64, 0x72, + 0x65, 0x73, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x6f, 0x41, 0x64, 0x64, + 0x72, 0x65, 0x73, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x12, + 0x22, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x22, 0x70, 0x0a, 0x0f, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x09, 0x67, 0x61, + 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x08, 0x67, 0x61, 0x73, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x44, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x63, 0x0a, 0x19, 0x47, + 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x46, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x46, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x8c, 0x01, - 0x0a, 0x15, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x31, 0x0a, 0x0d, 0x65, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x65, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x0c, 0x65, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x65, 0x65, 0x12, 0x40, 0x0a, 0x15, 0x64, 0x61, - 0x74, 0x61, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, - 0x66, 0x65, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x13, 0x64, 0x61, 0x74, 0x61, 0x41, 0x76, 0x61, - 0x69, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x46, 0x65, 0x65, 0x2a, 0xd6, 0x01, 0x0a, - 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x1e, 0x0a, 0x1a, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, - 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x10, 0x00, 0x12, 0x1e, 0x0a, 0x1a, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, - 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, - 0x10, 0x01, 0x12, 0x22, 0x0a, 0x1e, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, - 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x43, 0x4f, 0x4e, 0x46, 0x49, - 0x52, 0x4d, 0x45, 0x44, 0x10, 0x02, 0x12, 0x20, 0x0a, 0x1c, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, - 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x46, 0x49, 0x4e, - 0x41, 0x4c, 0x49, 0x5a, 0x45, 0x44, 0x10, 0x03, 0x12, 0x1d, 0x0a, 0x19, 0x54, 0x52, 0x41, 0x4e, - 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x46, - 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x1c, 0x0a, 0x18, 0x54, 0x52, 0x41, 0x4e, 0x53, - 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x46, 0x41, - 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0x85, 0x02, 0x0a, 0x0b, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, - 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x11, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x54, - 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x22, 0x00, 0x12, 0x49, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, - 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, - 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, - 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, - 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x22, 0x8c, 0x01, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x31, 0x0a, 0x0d, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x65, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, + 0x0c, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x65, 0x65, 0x12, 0x40, 0x0a, + 0x15, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x79, 0x5f, 0x66, 0x65, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x13, 0x64, 0x61, 0x74, 0x61, + 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x46, 0x65, 0x65, 0x2a, + 0xd6, 0x01, 0x0a, 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1e, 0x0a, 0x1a, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, + 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, + 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1e, 0x0a, 0x1a, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, + 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, + 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x22, 0x0a, 0x1e, 0x54, 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, + 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x43, 0x4f, + 0x4e, 0x46, 0x49, 0x52, 0x4d, 0x45, 0x44, 0x10, 0x02, 0x12, 0x20, 0x0a, 0x1c, 0x54, 0x52, 0x41, + 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, + 0x46, 0x49, 0x4e, 0x41, 0x4c, 0x49, 0x5a, 0x45, 0x44, 0x10, 0x03, 0x12, 0x1d, 0x0a, 0x19, 0x54, + 0x52, 0x41, 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, + 0x53, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x1c, 0x0a, 0x18, 0x54, 0x52, + 0x41, 0x4e, 0x53, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, + 0x5f, 0x46, 0x41, 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0x88, 0x02, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, + 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x11, 0x53, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x14, 0x47, 0x65, + 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x12, 0x21, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, + 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x49, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x46, + 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x46, + 0x65, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, + 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, + 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( - file_chain_writer_proto_rawDescOnce sync.Once - file_chain_writer_proto_rawDescData = file_chain_writer_proto_rawDesc + file_contract_writer_proto_rawDescOnce sync.Once + file_contract_writer_proto_rawDescData = file_contract_writer_proto_rawDesc ) -func file_chain_writer_proto_rawDescGZIP() []byte { - file_chain_writer_proto_rawDescOnce.Do(func() { - file_chain_writer_proto_rawDescData = protoimpl.X.CompressGZIP(file_chain_writer_proto_rawDescData) +func file_contract_writer_proto_rawDescGZIP() []byte { + file_contract_writer_proto_rawDescOnce.Do(func() { + file_contract_writer_proto_rawDescData = protoimpl.X.CompressGZIP(file_contract_writer_proto_rawDescData) }) - return file_chain_writer_proto_rawDescData + return file_contract_writer_proto_rawDescData } -var file_chain_writer_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_chain_writer_proto_msgTypes = make([]protoimpl.MessageInfo, 5) -var file_chain_writer_proto_goTypes = []interface{}{ +var file_contract_writer_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_contract_writer_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_contract_writer_proto_goTypes = []interface{}{ (TransactionStatus)(0), // 0: loop.TransactionStatus (*SubmitTransactionRequest)(nil), // 1: loop.SubmitTransactionRequest (*TransactionMeta)(nil), // 2: loop.TransactionMeta @@ -498,7 +498,7 @@ var file_chain_writer_proto_goTypes = []interface{}{ (*BigInt)(nil), // 7: loop.BigInt (*emptypb.Empty)(nil), // 8: google.protobuf.Empty } -var file_chain_writer_proto_depIdxs = []int32{ +var file_contract_writer_proto_depIdxs = []int32{ 6, // 0: loop.SubmitTransactionRequest.params:type_name -> loop.VersionedBytes 2, // 1: loop.SubmitTransactionRequest.meta:type_name -> loop.TransactionMeta 7, // 2: loop.SubmitTransactionRequest.value:type_name -> loop.BigInt @@ -506,12 +506,12 @@ var file_chain_writer_proto_depIdxs = []int32{ 0, // 4: loop.GetTransactionStatusReply.transaction_status:type_name -> loop.TransactionStatus 7, // 5: loop.GetFeeComponentsReply.execution_fee:type_name -> loop.BigInt 7, // 6: loop.GetFeeComponentsReply.data_availability_fee:type_name -> loop.BigInt - 1, // 7: loop.ChainWriter.SubmitTransaction:input_type -> loop.SubmitTransactionRequest - 3, // 8: loop.ChainWriter.GetTransactionStatus:input_type -> loop.GetTransactionStatusRequest - 8, // 9: loop.ChainWriter.GetFeeComponents:input_type -> google.protobuf.Empty - 8, // 10: loop.ChainWriter.SubmitTransaction:output_type -> google.protobuf.Empty - 4, // 11: loop.ChainWriter.GetTransactionStatus:output_type -> loop.GetTransactionStatusReply - 5, // 12: loop.ChainWriter.GetFeeComponents:output_type -> loop.GetFeeComponentsReply + 1, // 7: loop.ContractWriter.SubmitTransaction:input_type -> loop.SubmitTransactionRequest + 3, // 8: loop.ContractWriter.GetTransactionStatus:input_type -> loop.GetTransactionStatusRequest + 8, // 9: loop.ContractWriter.GetFeeComponents:input_type -> google.protobuf.Empty + 8, // 10: loop.ContractWriter.SubmitTransaction:output_type -> google.protobuf.Empty + 4, // 11: loop.ContractWriter.GetTransactionStatus:output_type -> loop.GetTransactionStatusReply + 5, // 12: loop.ContractWriter.GetFeeComponents:output_type -> loop.GetFeeComponentsReply 10, // [10:13] is the sub-list for method output_type 7, // [7:10] is the sub-list for method input_type 7, // [7:7] is the sub-list for extension type_name @@ -519,15 +519,15 @@ var file_chain_writer_proto_depIdxs = []int32{ 0, // [0:7] is the sub-list for field type_name } -func init() { file_chain_writer_proto_init() } -func file_chain_writer_proto_init() { - if File_chain_writer_proto != nil { +func init() { file_contract_writer_proto_init() } +func file_contract_writer_proto_init() { + if File_contract_writer_proto != nil { return } file_codec_proto_init() file_relayer_proto_init() if !protoimpl.UnsafeEnabled { - file_chain_writer_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + file_contract_writer_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SubmitTransactionRequest); i { case 0: return &v.state @@ -539,7 +539,7 @@ func file_chain_writer_proto_init() { return nil } } - file_chain_writer_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_contract_writer_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*TransactionMeta); i { case 0: return &v.state @@ -551,7 +551,7 @@ func file_chain_writer_proto_init() { return nil } } - file_chain_writer_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_contract_writer_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetTransactionStatusRequest); i { case 0: return &v.state @@ -563,7 +563,7 @@ func file_chain_writer_proto_init() { return nil } } - file_chain_writer_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_contract_writer_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetTransactionStatusReply); i { case 0: return &v.state @@ -575,7 +575,7 @@ func file_chain_writer_proto_init() { return nil } } - file_chain_writer_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_contract_writer_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetFeeComponentsReply); i { case 0: return &v.state @@ -592,19 +592,19 @@ func file_chain_writer_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_chain_writer_proto_rawDesc, + RawDescriptor: file_contract_writer_proto_rawDesc, NumEnums: 1, NumMessages: 5, NumExtensions: 0, NumServices: 1, }, - GoTypes: file_chain_writer_proto_goTypes, - DependencyIndexes: file_chain_writer_proto_depIdxs, - EnumInfos: file_chain_writer_proto_enumTypes, - MessageInfos: file_chain_writer_proto_msgTypes, + GoTypes: file_contract_writer_proto_goTypes, + DependencyIndexes: file_contract_writer_proto_depIdxs, + EnumInfos: file_contract_writer_proto_enumTypes, + MessageInfos: file_contract_writer_proto_msgTypes, }.Build() - File_chain_writer_proto = out.File - file_chain_writer_proto_rawDesc = nil - file_chain_writer_proto_goTypes = nil - file_chain_writer_proto_depIdxs = nil + File_contract_writer_proto = out.File + file_contract_writer_proto_rawDesc = nil + file_contract_writer_proto_goTypes = nil + file_contract_writer_proto_depIdxs = nil } diff --git a/pkg/loop/internal/pb/chain_writer.proto b/pkg/loop/internal/pb/contract_writer.proto similarity index 86% rename from pkg/loop/internal/pb/chain_writer.proto rename to pkg/loop/internal/pb/contract_writer.proto index db03e2a7c..dd33f99cd 100644 --- a/pkg/loop/internal/pb/chain_writer.proto +++ b/pkg/loop/internal/pb/contract_writer.proto @@ -8,7 +8,7 @@ import "codec.proto"; import "relayer.proto"; import "google/protobuf/empty.proto"; -service ChainWriter { +service ContractWriter { rpc SubmitTransaction(SubmitTransactionRequest) returns (google.protobuf.Empty) {} rpc GetTransactionStatus(GetTransactionStatusRequest) returns (GetTransactionStatusReply) {} rpc GetFeeComponents(google.protobuf.Empty) returns (GetFeeComponentsReply) {} @@ -29,7 +29,7 @@ message TransactionMeta { BigInt gas_limit = 2; } -// GetTransactionStatusRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetTransactionStatus]. +// GetTransactionStatusRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetTransactionStatus]. message GetTransactionStatusRequest { string transaction_id = 1; } @@ -45,12 +45,12 @@ enum TransactionStatus { TRANSACTION_STATUS_FATAL = 5; } -// GetTransactionStatusReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetTransactionStatus]. +// GetTransactionStatusReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetTransactionStatus]. message GetTransactionStatusReply { TransactionStatus transaction_status = 1; } -// GetFeeComponentsReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ChainWriter.GetFeeComponents]. +// GetFeeComponentsReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/types.ContractWriter.GetFeeComponents]. message GetFeeComponentsReply { BigInt execution_fee = 1; BigInt data_availability_fee = 2; diff --git a/pkg/loop/internal/pb/contract_writer_grpc.pb.go b/pkg/loop/internal/pb/contract_writer_grpc.pb.go new file mode 100644 index 000000000..4f45583fd --- /dev/null +++ b/pkg/loop/internal/pb/contract_writer_grpc.pb.go @@ -0,0 +1,184 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.1 +// source: contract_writer.proto + +package pb + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + ContractWriter_SubmitTransaction_FullMethodName = "/loop.ContractWriter/SubmitTransaction" + ContractWriter_GetTransactionStatus_FullMethodName = "/loop.ContractWriter/GetTransactionStatus" + ContractWriter_GetFeeComponents_FullMethodName = "/loop.ContractWriter/GetFeeComponents" +) + +// ContractWriterClient is the client API for ContractWriter service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ContractWriterClient interface { + SubmitTransaction(ctx context.Context, in *SubmitTransactionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + GetTransactionStatus(ctx context.Context, in *GetTransactionStatusRequest, opts ...grpc.CallOption) (*GetTransactionStatusReply, error) + GetFeeComponents(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetFeeComponentsReply, error) +} + +type contractWriterClient struct { + cc grpc.ClientConnInterface +} + +func NewContractWriterClient(cc grpc.ClientConnInterface) ContractWriterClient { + return &contractWriterClient{cc} +} + +func (c *contractWriterClient) SubmitTransaction(ctx context.Context, in *SubmitTransactionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ContractWriter_SubmitTransaction_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contractWriterClient) GetTransactionStatus(ctx context.Context, in *GetTransactionStatusRequest, opts ...grpc.CallOption) (*GetTransactionStatusReply, error) { + out := new(GetTransactionStatusReply) + err := c.cc.Invoke(ctx, ContractWriter_GetTransactionStatus_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contractWriterClient) GetFeeComponents(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetFeeComponentsReply, error) { + out := new(GetFeeComponentsReply) + err := c.cc.Invoke(ctx, ContractWriter_GetFeeComponents_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContractWriterServer is the server API for ContractWriter service. +// All implementations must embed UnimplementedContractWriterServer +// for forward compatibility +type ContractWriterServer interface { + SubmitTransaction(context.Context, *SubmitTransactionRequest) (*emptypb.Empty, error) + GetTransactionStatus(context.Context, *GetTransactionStatusRequest) (*GetTransactionStatusReply, error) + GetFeeComponents(context.Context, *emptypb.Empty) (*GetFeeComponentsReply, error) + mustEmbedUnimplementedContractWriterServer() +} + +// UnimplementedContractWriterServer must be embedded to have forward compatible implementations. +type UnimplementedContractWriterServer struct { +} + +func (UnimplementedContractWriterServer) SubmitTransaction(context.Context, *SubmitTransactionRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method SubmitTransaction not implemented") +} +func (UnimplementedContractWriterServer) GetTransactionStatus(context.Context, *GetTransactionStatusRequest) (*GetTransactionStatusReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTransactionStatus not implemented") +} +func (UnimplementedContractWriterServer) GetFeeComponents(context.Context, *emptypb.Empty) (*GetFeeComponentsReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeeComponents not implemented") +} +func (UnimplementedContractWriterServer) mustEmbedUnimplementedContractWriterServer() {} + +// UnsafeContractWriterServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ContractWriterServer will +// result in compilation errors. +type UnsafeContractWriterServer interface { + mustEmbedUnimplementedContractWriterServer() +} + +func RegisterContractWriterServer(s grpc.ServiceRegistrar, srv ContractWriterServer) { + s.RegisterService(&ContractWriter_ServiceDesc, srv) +} + +func _ContractWriter_SubmitTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SubmitTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContractWriterServer).SubmitTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ContractWriter_SubmitTransaction_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContractWriterServer).SubmitTransaction(ctx, req.(*SubmitTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContractWriter_GetTransactionStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTransactionStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContractWriterServer).GetTransactionStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ContractWriter_GetTransactionStatus_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContractWriterServer).GetTransactionStatus(ctx, req.(*GetTransactionStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContractWriter_GetFeeComponents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(emptypb.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContractWriterServer).GetFeeComponents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ContractWriter_GetFeeComponents_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContractWriterServer).GetFeeComponents(ctx, req.(*emptypb.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +// ContractWriter_ServiceDesc is the grpc.ServiceDesc for ContractWriter service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ContractWriter_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "loop.ContractWriter", + HandlerType: (*ContractWriterServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SubmitTransaction", + Handler: _ContractWriter_SubmitTransaction_Handler, + }, + { + MethodName: "GetTransactionStatus", + Handler: _ContractWriter_GetTransactionStatus_Handler, + }, + { + MethodName: "GetFeeComponents", + Handler: _ContractWriter_GetFeeComponents_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "contract_writer.proto", +} diff --git a/pkg/loop/internal/pb/generate.go b/pkg/loop/internal/pb/generate.go index 05dbfbde4..1c3067891 100644 --- a/pkg/loop/internal/pb/generate.go +++ b/pkg/loop/internal/pb/generate.go @@ -5,7 +5,7 @@ //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative telemetry.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative pipeline_runner.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative contract_reader.proto -//go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative chain_writer.proto +//go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative contract_writer.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative median_datasource.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative codec.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative keyvalue_store.proto diff --git a/pkg/loop/internal/pb/keystore/generate.go b/pkg/loop/internal/pb/keystore/generate.go new file mode 100644 index 000000000..e6cc69dcc --- /dev/null +++ b/pkg/loop/internal/pb/keystore/generate.go @@ -0,0 +1,2 @@ +//go:generate protoc --proto_path=.:..:. --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative keystore.proto +package keystorepb diff --git a/pkg/loop/internal/pb/keystore/keystore.pb.go b/pkg/loop/internal/pb/keystore/keystore.pb.go new file mode 100644 index 000000000..f385c3de4 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.pb.go @@ -0,0 +1,1891 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SignRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignRequest) Reset() { + *x = SignRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignRequest) ProtoMessage() {} + +func (x *SignRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignRequest.ProtoReflect.Descriptor instead. +func (*SignRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{0} +} + +func (x *SignRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignResponse) Reset() { + *x = SignResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignResponse) ProtoMessage() {} + +func (x *SignResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignResponse.ProtoReflect.Descriptor instead. +func (*SignResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{1} +} + +func (x *SignResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchRequest) Reset() { + *x = SignBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchRequest) ProtoMessage() {} + +func (x *SignBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchRequest.ProtoReflect.Descriptor instead. +func (*SignBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{2} +} + +func (x *SignBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data [][]byte `protobuf:"bytes,1,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchResponse) Reset() { + *x = SignBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchResponse) ProtoMessage() {} + +func (x *SignBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchResponse.ProtoReflect.Descriptor instead. +func (*SignBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{3} +} + +func (x *SignBatchResponse) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyRequest) Reset() { + *x = VerifyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyRequest) ProtoMessage() {} + +func (x *VerifyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyRequest.ProtoReflect.Descriptor instead. +func (*VerifyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{4} +} + +func (x *VerifyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid bool `protobuf:"varint,1,opt,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyResponse) Reset() { + *x = VerifyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyResponse) ProtoMessage() {} + +func (x *VerifyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyResponse.ProtoReflect.Descriptor instead. +func (*VerifyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{5} +} + +func (x *VerifyResponse) GetValid() bool { + if x != nil { + return x.Valid + } + return false +} + +type VerifyBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyBatchRequest) Reset() { + *x = VerifyBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchRequest) ProtoMessage() {} + +func (x *VerifyBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchRequest.ProtoReflect.Descriptor instead. +func (*VerifyBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{6} +} + +func (x *VerifyBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid []bool `protobuf:"varint,1,rep,packed,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyBatchResponse) Reset() { + *x = VerifyBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchResponse) ProtoMessage() {} + +func (x *VerifyBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchResponse.ProtoReflect.Descriptor instead. +func (*VerifyBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{7} +} + +func (x *VerifyBatchResponse) GetValid() []bool { + if x != nil { + return x.Valid + } + return nil +} + +type ListKeysRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListKeysRequest) Reset() { + *x = ListKeysRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysRequest) ProtoMessage() {} + +func (x *ListKeysRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysRequest.ProtoReflect.Descriptor instead. +func (*ListKeysRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{8} +} + +func (x *ListKeysRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ListKeysResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyIDs [][]byte `protobuf:"bytes,1,rep,name=keyIDs,proto3" json:"keyIDs,omitempty"` +} + +func (x *ListKeysResponse) Reset() { + *x = ListKeysResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysResponse) ProtoMessage() {} + +func (x *ListKeysResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysResponse.ProtoReflect.Descriptor instead. +func (*ListKeysResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{9} +} + +func (x *ListKeysResponse) GetKeyIDs() [][]byte { + if x != nil { + return x.KeyIDs + } + return nil +} + +type RunUDFRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + KeyID []byte `protobuf:"bytes,2,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFRequest) Reset() { + *x = RunUDFRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFRequest) ProtoMessage() {} + +func (x *RunUDFRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFRequest.ProtoReflect.Descriptor instead. +func (*RunUDFRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{10} +} + +func (x *RunUDFRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *RunUDFRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RunUDFRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type RunUDFResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFResponse) Reset() { + *x = RunUDFResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFResponse) ProtoMessage() {} + +func (x *RunUDFResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFResponse.ProtoReflect.Descriptor instead. +func (*RunUDFResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{11} +} + +func (x *RunUDFResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type ImportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ImportKeyRequest) Reset() { + *x = ImportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyRequest) ProtoMessage() {} + +func (x *ImportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyRequest.ProtoReflect.Descriptor instead. +func (*ImportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{12} +} + +func (x *ImportKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *ImportKeyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +func (x *ImportKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ImportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ImportKeyResponse) Reset() { + *x = ImportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyResponse) ProtoMessage() {} + +func (x *ImportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyResponse.ProtoReflect.Descriptor instead. +func (*ImportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{13} +} + +func (x *ImportKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ExportKeyRequest) Reset() { + *x = ExportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyRequest) ProtoMessage() {} + +func (x *ExportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyRequest.ProtoReflect.Descriptor instead. +func (*ExportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{14} +} + +func (x *ExportKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *ExportKeyResponse) Reset() { + *x = ExportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyResponse) ProtoMessage() {} + +func (x *ExportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyResponse.ProtoReflect.Descriptor instead. +func (*ExportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{15} +} + +func (x *ExportKeyResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type CreateKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Tags []string `protobuf:"bytes,2,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *CreateKeyRequest) Reset() { + *x = CreateKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyRequest) ProtoMessage() {} + +func (x *CreateKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyRequest.ProtoReflect.Descriptor instead. +func (*CreateKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{16} +} + +func (x *CreateKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *CreateKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type CreateKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *CreateKeyResponse) Reset() { + *x = CreateKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyResponse) ProtoMessage() {} + +func (x *CreateKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyResponse.ProtoReflect.Descriptor instead. +func (*CreateKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{17} +} + +func (x *CreateKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *DeleteKeyRequest) Reset() { + *x = DeleteKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyRequest) ProtoMessage() {} + +func (x *DeleteKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyRequest.ProtoReflect.Descriptor instead. +func (*DeleteKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{18} +} + +func (x *DeleteKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteKeyResponse) Reset() { + *x = DeleteKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyResponse) ProtoMessage() {} + +func (x *DeleteKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyResponse.ProtoReflect.Descriptor instead. +func (*DeleteKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{19} +} + +type AddTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *AddTagRequest) Reset() { + *x = AddTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagRequest) ProtoMessage() {} + +func (x *AddTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagRequest.ProtoReflect.Descriptor instead. +func (*AddTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{20} +} + +func (x *AddTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *AddTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type AddTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddTagResponse) Reset() { + *x = AddTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagResponse) ProtoMessage() {} + +func (x *AddTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagResponse.ProtoReflect.Descriptor instead. +func (*AddTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{21} +} + +type RemoveTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *RemoveTagRequest) Reset() { + *x = RemoveTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagRequest) ProtoMessage() {} + +func (x *RemoveTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagRequest.ProtoReflect.Descriptor instead. +func (*RemoveTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{22} +} + +func (x *RemoveTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RemoveTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type RemoveTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RemoveTagResponse) Reset() { + *x = RemoveTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagResponse) ProtoMessage() {} + +func (x *RemoveTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagResponse.ProtoReflect.Descriptor instead. +func (*RemoveTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{23} +} + +type ListTagsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ListTagsRequest) Reset() { + *x = ListTagsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsRequest) ProtoMessage() {} + +func (x *ListTagsRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsRequest.ProtoReflect.Descriptor instead. +func (*ListTagsRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{24} +} + +func (x *ListTagsRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ListTagsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListTagsResponse) Reset() { + *x = ListTagsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsResponse) ProtoMessage() {} + +func (x *ListTagsResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsResponse.ProtoReflect.Descriptor instead. +func (*ListTagsResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{25} +} + +func (x *ListTagsResponse) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +var File_keystore_proto protoreflect.FileDescriptor + +var file_keystore_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x19, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x37, 0x0a, 0x0b, 0x53, + 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x22, 0x0a, 0x0c, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x3c, 0x0a, 0x10, 0x53, 0x69, 0x67, 0x6e, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x11, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, + 0x39, 0x0a, 0x0d, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x26, 0x0a, 0x0e, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x22, 0x3e, 0x0a, 0x12, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, + 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x2b, 0x0a, 0x13, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x22, + 0x25, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x2a, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, + 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x73, 0x22, 0x4d, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, + 0x61, 0x22, 0x24, 0x0a, 0x0e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x54, 0x0a, 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6b, + 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6b, 0x65, + 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x6f, + 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x27, 0x0a, 0x11, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x40, 0x0a, 0x10, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x18, 0x0a, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x13, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, 0x0d, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, + 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, + 0x22, 0x10, 0x0a, 0x0e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x3a, 0x0a, 0x10, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, 0x0a, 0x03, + 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, 0x22, 0x13, + 0x0a, 0x11, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x27, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x26, 0x0a, 0x10, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, + 0x74, 0x61, 0x67, 0x73, 0x32, 0xa8, 0x0a, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x12, 0x57, 0x0a, 0x04, 0x53, 0x69, 0x67, 0x6e, 0x12, 0x26, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x53, 0x69, + 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x12, 0x28, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, + 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x6c, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x12, 0x2d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, + 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x2e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, + 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x63, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, + 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x12, 0x28, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, + 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, + 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, + 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x08, 0x4c, + 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x5d, 0x0a, 0x06, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x12, 0x28, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, + 0x57, 0x5a, 0x55, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, + 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, + 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2f, 0x70, 0x62, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3b, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_keystore_proto_rawDescOnce sync.Once + file_keystore_proto_rawDescData = file_keystore_proto_rawDesc +) + +func file_keystore_proto_rawDescGZIP() []byte { + file_keystore_proto_rawDescOnce.Do(func() { + file_keystore_proto_rawDescData = protoimpl.X.CompressGZIP(file_keystore_proto_rawDescData) + }) + return file_keystore_proto_rawDescData +} + +var file_keystore_proto_msgTypes = make([]protoimpl.MessageInfo, 26) +var file_keystore_proto_goTypes = []interface{}{ + (*SignRequest)(nil), // 0: loop.internal.pb.keystore.SignRequest + (*SignResponse)(nil), // 1: loop.internal.pb.keystore.SignResponse + (*SignBatchRequest)(nil), // 2: loop.internal.pb.keystore.SignBatchRequest + (*SignBatchResponse)(nil), // 3: loop.internal.pb.keystore.SignBatchResponse + (*VerifyRequest)(nil), // 4: loop.internal.pb.keystore.VerifyRequest + (*VerifyResponse)(nil), // 5: loop.internal.pb.keystore.VerifyResponse + (*VerifyBatchRequest)(nil), // 6: loop.internal.pb.keystore.VerifyBatchRequest + (*VerifyBatchResponse)(nil), // 7: loop.internal.pb.keystore.VerifyBatchResponse + (*ListKeysRequest)(nil), // 8: loop.internal.pb.keystore.ListKeysRequest + (*ListKeysResponse)(nil), // 9: loop.internal.pb.keystore.ListKeysResponse + (*RunUDFRequest)(nil), // 10: loop.internal.pb.keystore.RunUDFRequest + (*RunUDFResponse)(nil), // 11: loop.internal.pb.keystore.RunUDFResponse + (*ImportKeyRequest)(nil), // 12: loop.internal.pb.keystore.ImportKeyRequest + (*ImportKeyResponse)(nil), // 13: loop.internal.pb.keystore.ImportKeyResponse + (*ExportKeyRequest)(nil), // 14: loop.internal.pb.keystore.ExportKeyRequest + (*ExportKeyResponse)(nil), // 15: loop.internal.pb.keystore.ExportKeyResponse + (*CreateKeyRequest)(nil), // 16: loop.internal.pb.keystore.CreateKeyRequest + (*CreateKeyResponse)(nil), // 17: loop.internal.pb.keystore.CreateKeyResponse + (*DeleteKeyRequest)(nil), // 18: loop.internal.pb.keystore.DeleteKeyRequest + (*DeleteKeyResponse)(nil), // 19: loop.internal.pb.keystore.DeleteKeyResponse + (*AddTagRequest)(nil), // 20: loop.internal.pb.keystore.AddTagRequest + (*AddTagResponse)(nil), // 21: loop.internal.pb.keystore.AddTagResponse + (*RemoveTagRequest)(nil), // 22: loop.internal.pb.keystore.RemoveTagRequest + (*RemoveTagResponse)(nil), // 23: loop.internal.pb.keystore.RemoveTagResponse + (*ListTagsRequest)(nil), // 24: loop.internal.pb.keystore.ListTagsRequest + (*ListTagsResponse)(nil), // 25: loop.internal.pb.keystore.ListTagsResponse +} +var file_keystore_proto_depIdxs = []int32{ + 0, // 0: loop.internal.pb.keystore.Keystore.Sign:input_type -> loop.internal.pb.keystore.SignRequest + 2, // 1: loop.internal.pb.keystore.Keystore.SignBatch:input_type -> loop.internal.pb.keystore.SignBatchRequest + 4, // 2: loop.internal.pb.keystore.Keystore.Verify:input_type -> loop.internal.pb.keystore.VerifyRequest + 6, // 3: loop.internal.pb.keystore.Keystore.VerifyBatch:input_type -> loop.internal.pb.keystore.VerifyBatchRequest + 8, // 4: loop.internal.pb.keystore.Keystore.ListKeys:input_type -> loop.internal.pb.keystore.ListKeysRequest + 12, // 5: loop.internal.pb.keystore.Keystore.ImportKey:input_type -> loop.internal.pb.keystore.ImportKeyRequest + 14, // 6: loop.internal.pb.keystore.Keystore.ExportKey:input_type -> loop.internal.pb.keystore.ExportKeyRequest + 16, // 7: loop.internal.pb.keystore.Keystore.CreateKey:input_type -> loop.internal.pb.keystore.CreateKeyRequest + 18, // 8: loop.internal.pb.keystore.Keystore.DeleteKey:input_type -> loop.internal.pb.keystore.DeleteKeyRequest + 20, // 9: loop.internal.pb.keystore.Keystore.AddTag:input_type -> loop.internal.pb.keystore.AddTagRequest + 22, // 10: loop.internal.pb.keystore.Keystore.RemoveTag:input_type -> loop.internal.pb.keystore.RemoveTagRequest + 24, // 11: loop.internal.pb.keystore.Keystore.ListTags:input_type -> loop.internal.pb.keystore.ListTagsRequest + 10, // 12: loop.internal.pb.keystore.Keystore.RunUDF:input_type -> loop.internal.pb.keystore.RunUDFRequest + 1, // 13: loop.internal.pb.keystore.Keystore.Sign:output_type -> loop.internal.pb.keystore.SignResponse + 3, // 14: loop.internal.pb.keystore.Keystore.SignBatch:output_type -> loop.internal.pb.keystore.SignBatchResponse + 5, // 15: loop.internal.pb.keystore.Keystore.Verify:output_type -> loop.internal.pb.keystore.VerifyResponse + 7, // 16: loop.internal.pb.keystore.Keystore.VerifyBatch:output_type -> loop.internal.pb.keystore.VerifyBatchResponse + 9, // 17: loop.internal.pb.keystore.Keystore.ListKeys:output_type -> loop.internal.pb.keystore.ListKeysResponse + 13, // 18: loop.internal.pb.keystore.Keystore.ImportKey:output_type -> loop.internal.pb.keystore.ImportKeyResponse + 15, // 19: loop.internal.pb.keystore.Keystore.ExportKey:output_type -> loop.internal.pb.keystore.ExportKeyResponse + 17, // 20: loop.internal.pb.keystore.Keystore.CreateKey:output_type -> loop.internal.pb.keystore.CreateKeyResponse + 19, // 21: loop.internal.pb.keystore.Keystore.DeleteKey:output_type -> loop.internal.pb.keystore.DeleteKeyResponse + 21, // 22: loop.internal.pb.keystore.Keystore.AddTag:output_type -> loop.internal.pb.keystore.AddTagResponse + 23, // 23: loop.internal.pb.keystore.Keystore.RemoveTag:output_type -> loop.internal.pb.keystore.RemoveTagResponse + 25, // 24: loop.internal.pb.keystore.Keystore.ListTags:output_type -> loop.internal.pb.keystore.ListTagsResponse + 11, // 25: loop.internal.pb.keystore.Keystore.RunUDF:output_type -> loop.internal.pb.keystore.RunUDFResponse + 13, // [13:26] is the sub-list for method output_type + 0, // [0:13] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_keystore_proto_init() } +func file_keystore_proto_init() { + if File_keystore_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_keystore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_keystore_proto_rawDesc, + NumEnums: 0, + NumMessages: 26, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_keystore_proto_goTypes, + DependencyIndexes: file_keystore_proto_depIdxs, + MessageInfos: file_keystore_proto_msgTypes, + }.Build() + File_keystore_proto = out.File + file_keystore_proto_rawDesc = nil + file_keystore_proto_goTypes = nil + file_keystore_proto_depIdxs = nil +} diff --git a/pkg/loop/internal/pb/keystore/keystore.proto b/pkg/loop/internal/pb/keystore/keystore.proto new file mode 100644 index 000000000..ba6013b44 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.proto @@ -0,0 +1,134 @@ +syntax = "proto3"; + +option go_package = "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb/keystore;keystorepb"; + +package loop.internal.pb.keystore; + +service Keystore { + rpc Sign(SignRequest) returns (SignResponse); + rpc SignBatch(SignBatchRequest)returns (SignBatchResponse); + rpc Verify(VerifyRequest)returns (VerifyResponse); + rpc VerifyBatch(VerifyBatchRequest)returns (VerifyBatchResponse); + + rpc ListKeys(ListKeysRequest)returns (ListKeysResponse); + rpc ImportKey(ImportKeyRequest)returns(ImportKeyResponse); + rpc ExportKey(ExportKeyRequest)returns(ExportKeyResponse); + + rpc CreateKey(CreateKeyRequest)returns(CreateKeyResponse); + rpc DeleteKey(DeleteKeyRequest)returns(DeleteKeyResponse); + + rpc AddTag(AddTagRequest)returns(AddTagResponse); + rpc RemoveTag(RemoveTagRequest)returns(RemoveTagResponse); + rpc ListTags(ListTagsRequest)returns(ListTagsResponse); + + rpc RunUDF(RunUDFRequest)returns (RunUDFResponse); +} + +message SignRequest { + bytes keyID = 1; + bytes data = 2; +} + +message SignResponse { + bytes data = 1; +} + +message SignBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message SignBatchResponse { + repeated bytes data = 1; +} + +message VerifyRequest { + bytes keyID = 1; + bytes data = 2; +} + +message VerifyResponse { + bool valid = 1; +} + +message VerifyBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message VerifyBatchResponse { + repeated bool valid = 1; +} + +message ListKeysRequest { + repeated string tags = 1; +} + +message ListKeysResponse { + repeated bytes keyIDs = 1; +} + +message RunUDFRequest { + string name = 1; + bytes keyID = 2; + bytes data = 3; +} + +message RunUDFResponse { + bytes data = 1; +} + +message ImportKeyRequest { + string keyType = 1; + bytes data = 2; + repeated string tags = 3; +} + +message ImportKeyResponse { + bytes keyID = 1; +} + +message ExportKeyRequest { + bytes keyID = 1; +} + +message ExportKeyResponse { + bytes data = 1; +} + +message CreateKeyRequest { + string keyType = 1; + repeated string tags = 2; +} + +message CreateKeyResponse{ + bytes keyID =1; +} + +message DeleteKeyRequest{ + bytes keyID =1; +} + +message DeleteKeyResponse{} + +message AddTagRequest{ + bytes keyID=1; + string tag =2; +} + +message AddTagResponse{} + +message RemoveTagRequest{ + bytes keyID =1; + string tag =2; +} + +message RemoveTagResponse{} + +message ListTagsRequest{ + bytes keyID=1; +} + +message ListTagsResponse{ + repeated string tags=1; +} diff --git a/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go new file mode 100644 index 000000000..6752c21ee --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go @@ -0,0 +1,553 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Keystore_Sign_FullMethodName = "/loop.internal.pb.keystore.Keystore/Sign" + Keystore_SignBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/SignBatch" + Keystore_Verify_FullMethodName = "/loop.internal.pb.keystore.Keystore/Verify" + Keystore_VerifyBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/VerifyBatch" + Keystore_ListKeys_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListKeys" + Keystore_ImportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ImportKey" + Keystore_ExportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ExportKey" + Keystore_CreateKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/CreateKey" + Keystore_DeleteKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/DeleteKey" + Keystore_AddTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/AddTag" + Keystore_RemoveTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/RemoveTag" + Keystore_ListTags_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListTags" + Keystore_RunUDF_FullMethodName = "/loop.internal.pb.keystore.Keystore/RunUDF" +) + +// KeystoreClient is the client API for Keystore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type KeystoreClient interface { + Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) + SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) + Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) + VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) + ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) + ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) + ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) + CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) + DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) + AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) + RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) + ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) + RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) +} + +type keystoreClient struct { + cc grpc.ClientConnInterface +} + +func NewKeystoreClient(cc grpc.ClientConnInterface) KeystoreClient { + return &keystoreClient{cc} +} + +func (c *keystoreClient) Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) { + out := new(SignResponse) + err := c.cc.Invoke(ctx, Keystore_Sign_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) { + out := new(SignBatchResponse) + err := c.cc.Invoke(ctx, Keystore_SignBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) { + out := new(VerifyResponse) + err := c.cc.Invoke(ctx, Keystore_Verify_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) { + out := new(VerifyBatchResponse) + err := c.cc.Invoke(ctx, Keystore_VerifyBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) { + out := new(ListKeysResponse) + err := c.cc.Invoke(ctx, Keystore_ListKeys_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) { + out := new(ImportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ImportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) { + out := new(ExportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ExportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) { + out := new(CreateKeyResponse) + err := c.cc.Invoke(ctx, Keystore_CreateKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) { + out := new(DeleteKeyResponse) + err := c.cc.Invoke(ctx, Keystore_DeleteKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) { + out := new(AddTagResponse) + err := c.cc.Invoke(ctx, Keystore_AddTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) { + out := new(RemoveTagResponse) + err := c.cc.Invoke(ctx, Keystore_RemoveTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) { + out := new(ListTagsResponse) + err := c.cc.Invoke(ctx, Keystore_ListTags_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) { + out := new(RunUDFResponse) + err := c.cc.Invoke(ctx, Keystore_RunUDF_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeystoreServer is the server API for Keystore service. +// All implementations must embed UnimplementedKeystoreServer +// for forward compatibility +type KeystoreServer interface { + Sign(context.Context, *SignRequest) (*SignResponse, error) + SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) + Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) + VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) + ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) + ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) + ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) + CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) + DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) + AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) + RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) + ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) + RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) + mustEmbedUnimplementedKeystoreServer() +} + +// UnimplementedKeystoreServer must be embedded to have forward compatible implementations. +type UnimplementedKeystoreServer struct { +} + +func (UnimplementedKeystoreServer) Sign(context.Context, *SignRequest) (*SignResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") +} +func (UnimplementedKeystoreServer) SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SignBatch not implemented") +} +func (UnimplementedKeystoreServer) Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") +} +func (UnimplementedKeystoreServer) VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method VerifyBatch not implemented") +} +func (UnimplementedKeystoreServer) ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListKeys not implemented") +} +func (UnimplementedKeystoreServer) ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ImportKey not implemented") +} +func (UnimplementedKeystoreServer) ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExportKey not implemented") +} +func (UnimplementedKeystoreServer) CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateKey not implemented") +} +func (UnimplementedKeystoreServer) DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteKey not implemented") +} +func (UnimplementedKeystoreServer) AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddTag not implemented") +} +func (UnimplementedKeystoreServer) RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemoveTag not implemented") +} +func (UnimplementedKeystoreServer) ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListTags not implemented") +} +func (UnimplementedKeystoreServer) RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RunUDF not implemented") +} +func (UnimplementedKeystoreServer) mustEmbedUnimplementedKeystoreServer() {} + +// UnsafeKeystoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to KeystoreServer will +// result in compilation errors. +type UnsafeKeystoreServer interface { + mustEmbedUnimplementedKeystoreServer() +} + +func RegisterKeystoreServer(s grpc.ServiceRegistrar, srv KeystoreServer) { + s.RegisterService(&Keystore_ServiceDesc, srv) +} + +func _Keystore_Sign_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Sign(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Sign_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Sign(ctx, req.(*SignRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_SignBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).SignBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_SignBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).SignBatch(ctx, req.(*SignBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_Verify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Verify(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Verify_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Verify(ctx, req.(*VerifyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_VerifyBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).VerifyBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_VerifyBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).VerifyBatch(ctx, req.(*VerifyBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListKeys_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListKeysRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListKeys(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListKeys_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListKeys(ctx, req.(*ListKeysRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ImportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ImportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ImportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ImportKey(ctx, req.(*ImportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ExportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ExportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ExportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ExportKey(ctx, req.(*ExportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_CreateKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).CreateKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_CreateKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).CreateKey(ctx, req.(*CreateKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_DeleteKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).DeleteKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_DeleteKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).DeleteKey(ctx, req.(*DeleteKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_AddTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).AddTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_AddTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).AddTag(ctx, req.(*AddTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RemoveTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RemoveTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RemoveTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RemoveTag(ctx, req.(*RemoveTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListTags_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTagsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListTags(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListTags_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListTags(ctx, req.(*ListTagsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RunUDF_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunUDFRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RunUDF(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RunUDF_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RunUDF(ctx, req.(*RunUDFRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Keystore_ServiceDesc is the grpc.ServiceDesc for Keystore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Keystore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "loop.internal.pb.keystore.Keystore", + HandlerType: (*KeystoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Sign", + Handler: _Keystore_Sign_Handler, + }, + { + MethodName: "SignBatch", + Handler: _Keystore_SignBatch_Handler, + }, + { + MethodName: "Verify", + Handler: _Keystore_Verify_Handler, + }, + { + MethodName: "VerifyBatch", + Handler: _Keystore_VerifyBatch_Handler, + }, + { + MethodName: "ListKeys", + Handler: _Keystore_ListKeys_Handler, + }, + { + MethodName: "ImportKey", + Handler: _Keystore_ImportKey_Handler, + }, + { + MethodName: "ExportKey", + Handler: _Keystore_ExportKey_Handler, + }, + { + MethodName: "CreateKey", + Handler: _Keystore_CreateKey_Handler, + }, + { + MethodName: "DeleteKey", + Handler: _Keystore_DeleteKey_Handler, + }, + { + MethodName: "AddTag", + Handler: _Keystore_AddTag_Handler, + }, + { + MethodName: "RemoveTag", + Handler: _Keystore_RemoveTag_Handler, + }, + { + MethodName: "ListTags", + Handler: _Keystore_ListTags_Handler, + }, + { + MethodName: "RunUDF", + Handler: _Keystore_RunUDF_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "keystore.proto", +} diff --git a/pkg/loop/internal/pb/relayer.pb.go b/pkg/loop/internal/pb/relayer.pb.go index 6e947679c..8eb519064 100644 --- a/pkg/loop/internal/pb/relayer.pb.go +++ b/pkg/loop/internal/pb/relayer.pb.go @@ -424,17 +424,17 @@ func (x *PluginArgs) GetPluginConfig() []byte { return nil } -// NewChainWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewChainWriter]. -type NewChainWriterRequest struct { +// NewContractWriterRequest has request parameters for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewContractWriter]. +type NewContractWriterRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ChainWriterConfig []byte `protobuf:"bytes,1,opt,name=chainWriterConfig,proto3" json:"chainWriterConfig,omitempty"` + ContractWriterConfig []byte `protobuf:"bytes,1,opt,name=contractWriterConfig,proto3" json:"contractWriterConfig,omitempty"` } -func (x *NewChainWriterRequest) Reset() { - *x = NewChainWriterRequest{} +func (x *NewContractWriterRequest) Reset() { + *x = NewContractWriterRequest{} if protoimpl.UnsafeEnabled { mi := &file_relayer_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -442,13 +442,13 @@ func (x *NewChainWriterRequest) Reset() { } } -func (x *NewChainWriterRequest) String() string { +func (x *NewContractWriterRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*NewChainWriterRequest) ProtoMessage() {} +func (*NewContractWriterRequest) ProtoMessage() {} -func (x *NewChainWriterRequest) ProtoReflect() protoreflect.Message { +func (x *NewContractWriterRequest) ProtoReflect() protoreflect.Message { mi := &file_relayer_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -460,29 +460,29 @@ func (x *NewChainWriterRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use NewChainWriterRequest.ProtoReflect.Descriptor instead. -func (*NewChainWriterRequest) Descriptor() ([]byte, []int) { +// Deprecated: Use NewContractWriterRequest.ProtoReflect.Descriptor instead. +func (*NewContractWriterRequest) Descriptor() ([]byte, []int) { return file_relayer_proto_rawDescGZIP(), []int{7} } -func (x *NewChainWriterRequest) GetChainWriterConfig() []byte { +func (x *NewContractWriterRequest) GetContractWriterConfig() []byte { if x != nil { - return x.ChainWriterConfig + return x.ContractWriterConfig } return nil } -// NewChainWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewChainWriter]. -type NewChainWriterReply struct { +// NewContractWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewContractWriter]. +type NewContractWriterReply struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ChainWriterID uint32 `protobuf:"varint,1,opt,name=chainWriterID,proto3" json:"chainWriterID,omitempty"` + ContractWriterID uint32 `protobuf:"varint,1,opt,name=contractWriterID,proto3" json:"contractWriterID,omitempty"` } -func (x *NewChainWriterReply) Reset() { - *x = NewChainWriterReply{} +func (x *NewContractWriterReply) Reset() { + *x = NewContractWriterReply{} if protoimpl.UnsafeEnabled { mi := &file_relayer_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -490,13 +490,13 @@ func (x *NewChainWriterReply) Reset() { } } -func (x *NewChainWriterReply) String() string { +func (x *NewContractWriterReply) String() string { return protoimpl.X.MessageStringOf(x) } -func (*NewChainWriterReply) ProtoMessage() {} +func (*NewContractWriterReply) ProtoMessage() {} -func (x *NewChainWriterReply) ProtoReflect() protoreflect.Message { +func (x *NewContractWriterReply) ProtoReflect() protoreflect.Message { mi := &file_relayer_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -508,14 +508,14 @@ func (x *NewChainWriterReply) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use NewChainWriterReply.ProtoReflect.Descriptor instead. -func (*NewChainWriterReply) Descriptor() ([]byte, []int) { +// Deprecated: Use NewContractWriterReply.ProtoReflect.Descriptor instead. +func (*NewContractWriterReply) Descriptor() ([]byte, []int) { return file_relayer_proto_rawDescGZIP(), []int{8} } -func (x *NewChainWriterReply) GetChainWriterID() uint32 { +func (x *NewContractWriterReply) GetContractWriterID() uint32 { if x != nil { - return x.ChainWriterID + return x.ContractWriterID } return 0 } @@ -2635,332 +2635,334 @@ var file_relayer_proto_rawDesc = []byte{ 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x49, 0x44, 0x12, 0x22, 0x0a, 0x0c, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, - 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x45, 0x0a, 0x15, - 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x11, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, - 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x11, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x22, 0x3b, 0x0a, 0x13, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, - 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0d, 0x52, 0x0d, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x44, - 0x22, 0x4e, 0x0a, 0x18, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x32, 0x0a, 0x14, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x14, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x7b, 0x0a, 0x18, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x6c, - 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x52, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, - 0x73, 0x12, 0x30, 0x0a, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x41, 0x72, 0x67, 0x73, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x50, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x41, 0x72, 0x67, 0x73, 0x52, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x41, - 0x72, 0x67, 0x73, 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2a, 0x0a, - 0x10, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, - 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, - 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x49, 0x0a, 0x18, 0x4e, 0x65, 0x77, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, - 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x52, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x52, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, - 0x41, 0x72, 0x67, 0x73, 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2a, - 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x13, 0x0a, 0x11, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, - 0x31, 0x0a, 0x0f, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x12, 0x1e, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x0a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x52, 0x04, 0x68, 0x65, - 0x61, 0x64, 0x22, 0x17, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x3e, 0x0a, 0x13, 0x47, - 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x22, 0x4f, 0x0a, 0x0b, 0x43, - 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x55, 0x0a, 0x17, - 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, - 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x22, 0x7d, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x26, 0x0a, 0x05, - 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x6e, - 0x6f, 0x64, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, - 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x22, 0x68, 0x0a, 0x0a, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x12, 0x18, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, - 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x82, 0x01, 0x0a, - 0x12, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x74, 0x6f, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x74, 0x6f, 0x12, 0x24, 0x0a, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, - 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x22, 0x0a, - 0x0c, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x0c, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x68, 0x65, 0x63, - 0x6b, 0x22, 0xa6, 0x02, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, - 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x69, - 0x67, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x07, 0x73, 0x69, 0x67, - 0x6e, 0x65, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, - 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x73, 0x12, 0x0c, 0x0a, 0x01, 0x46, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0d, 0x52, 0x01, 0x46, 0x12, 0x24, 0x0a, 0x0d, 0x6f, 0x6e, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x6f, - 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x34, 0x0a, 0x15, - 0x6f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x15, 0x6f, 0x66, 0x66, - 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x26, 0x0a, 0x0e, 0x6f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x6f, 0x66, 0x66, 0x63, - 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x53, 0x0a, 0x13, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x3c, 0x0a, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, - 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, - 0x37, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, - 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x22, 0x1b, 0x0a, 0x19, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x49, 0x0a, 0x17, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, - 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x12, 0x2e, 0x0a, 0x12, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, - 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x12, 0x63, 0x6f, + 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x4e, 0x0a, 0x18, + 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x32, 0x0a, 0x14, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x14, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, + 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x44, 0x0a, 0x16, + 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, + 0x49, 0x44, 0x22, 0x4e, 0x0a, 0x18, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x32, + 0x0a, 0x14, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x14, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2a, 0x0a, 0x10, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x49, 0x44, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, + 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x7b, 0x0a, 0x18, 0x4e, 0x65, 0x77, 0x50, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, + 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, + 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x52, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x41, + 0x72, 0x67, 0x73, 0x12, 0x30, 0x0a, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x41, 0x72, 0x67, + 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x50, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x41, 0x72, 0x67, 0x73, 0x52, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, + 0x6e, 0x41, 0x72, 0x67, 0x73, 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, + 0x2a, 0x0a, 0x10, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, + 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x70, 0x6c, 0x75, 0x67, 0x69, + 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x49, 0x0a, 0x18, 0x4e, + 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, + 0x41, 0x72, 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x52, 0x09, 0x72, 0x65, 0x6c, + 0x61, 0x79, 0x41, 0x72, 0x67, 0x73, 0x22, 0x44, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, + 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, 0x44, 0x22, 0x13, 0x0a, 0x11, + 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0x31, 0x0a, 0x0f, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x12, 0x1e, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x52, 0x04, + 0x68, 0x65, 0x61, 0x64, 0x22, 0x17, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x3e, 0x0a, + 0x13, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x68, 0x61, 0x69, 0x6e, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x22, 0x4f, 0x0a, + 0x0b, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, + 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, + 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x55, + 0x0a, 0x17, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, + 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, + 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x7d, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, + 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x26, + 0x0a, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x26, 0x0a, 0x0f, + 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, + 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x68, 0x0a, 0x0a, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x44, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x82, + 0x01, 0x0a, 0x12, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x74, 0x6f, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x74, 0x6f, 0x12, 0x24, 0x0a, 0x06, 0x61, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, + 0x22, 0x0a, 0x0c, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x68, + 0x65, 0x63, 0x6b, 0x22, 0xa6, 0x02, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x0b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x07, + 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x07, 0x73, + 0x69, 0x67, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x6d, + 0x69, 0x74, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x73, 0x12, 0x0c, 0x0a, 0x01, 0x46, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x01, 0x46, 0x12, 0x24, 0x0a, 0x0d, 0x6f, 0x6e, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0d, 0x6f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x34, + 0x0a, 0x15, 0x6f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x15, 0x6f, + 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x56, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x26, 0x0a, 0x0e, 0x6f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x6f, 0x66, + 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x53, 0x0a, 0x13, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x3c, 0x0a, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x22, 0x37, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x22, 0x1b, 0x0a, 0x19, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, - 0x22, 0x1c, 0x0a, 0x1a, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x66, - 0x0a, 0x18, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x26, 0x0a, 0x0e, 0x63, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, - 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x22, 0x3d, 0x0a, 0x13, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, - 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x51, 0x0a, 0x11, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x3c, 0x0a, 0x0e, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, - 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, - 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x1a, 0x0a, 0x18, 0x4c, 0x61, 0x74, 0x65, - 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x22, 0x3a, 0x0a, 0x16, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x20, - 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, - 0x22, 0x61, 0x0a, 0x0f, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, - 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x14, 0x0a, - 0x05, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x72, 0x6f, - 0x75, 0x6e, 0x64, 0x22, 0x6e, 0x0a, 0x0d, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x12, 0x3f, 0x0a, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, 0x48, 0x61, - 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, 0x48, - 0x61, 0x73, 0x68, 0x22, 0x52, 0x0a, 0x1a, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, - 0x16, 0x0a, 0x06, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, - 0x06, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x22, 0xc8, 0x01, 0x0a, 0x0f, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x0d, 0x72, - 0x65, 0x70, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0d, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x62, - 0x0a, 0x1b, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x4f, 0x6e, 0x63, 0x68, - 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x65, 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x1b, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x23, 0x0a, 0x21, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x64, 0x45, 0x70, 0x6f, 0x63, - 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x5b, 0x0a, 0x1f, 0x4c, 0x61, 0x74, 0x65, - 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, - 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x22, 0x0a, 0x0c, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, - 0x14, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, - 0x65, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x14, 0x0a, 0x12, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x2c, 0x0a, 0x10, 0x46, - 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, - 0x18, 0x0a, 0x07, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x1f, 0x0a, 0x09, 0x4e, 0x61, 0x6d, - 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xa3, 0x01, 0x0a, 0x11, 0x48, - 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x12, 0x4d, 0x0a, 0x0c, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, - 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x2e, - 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x0c, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x1a, - 0x3f, 0x0a, 0x11, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x22, 0x3a, 0x0a, 0x06, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x65, - 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6e, 0x65, - 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4b, 0x0a, 0x11, - 0x53, 0x74, 0x61, 0x72, 0x6b, 0x6e, 0x65, 0x74, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x12, 0x1a, 0x0a, 0x01, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x01, 0x78, 0x12, 0x1a, 0x0a, - 0x01, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x01, 0x79, 0x22, 0x37, 0x0a, 0x13, 0x53, 0x74, 0x61, - 0x72, 0x6b, 0x6e, 0x65, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x48, 0x61, 0x73, 0x68, - 0x12, 0x20, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x04, 0x68, 0x61, - 0x73, 0x68, 0x32, 0x4f, 0x0a, 0x0d, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x6c, 0x61, - 0x79, 0x65, 0x72, 0x12, 0x3e, 0x0a, 0x0a, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x12, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x6c, 0x61, - 0x79, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x00, 0x32, 0x73, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, - 0x39, 0x0a, 0x08, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x1a, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x63, 0x63, 0x6f, 0x75, - 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x04, 0x53, 0x69, - 0x67, 0x6e, 0x12, 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x69, 0x67, - 0x6e, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0xf2, 0x04, 0x0a, 0x07, 0x52, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x12, 0x4a, 0x0a, 0x0e, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, - 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, - 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x68, - 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, - 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, - 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x49, 0x0a, 0x17, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x2e, 0x0a, 0x12, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, + 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x12, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, + 0x69, 0x78, 0x22, 0x1c, 0x0a, 0x1a, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x22, 0x66, 0x0a, 0x18, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x26, 0x0a, 0x0e, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, + 0x67, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x22, 0x3d, 0x0a, 0x13, 0x4c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x26, 0x0a, 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x49, 0x6e, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, + 0x49, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x51, 0x0a, 0x11, 0x4c, 0x61, 0x74, 0x65, 0x73, + 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x3c, 0x0a, 0x0e, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x1a, 0x0a, 0x18, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x3a, 0x0a, 0x16, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x12, 0x20, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x22, 0x61, 0x0a, 0x0f, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x22, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, + 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x70, 0x6f, + 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x12, + 0x14, 0x0a, 0x05, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x22, 0x6e, 0x0a, 0x0d, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x3f, 0x0a, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x48, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, + 0x61, 0x48, 0x61, 0x73, 0x68, 0x22, 0x52, 0x0a, 0x1a, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x06, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x22, 0xc8, 0x01, 0x0a, 0x0f, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, + 0x0d, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x70, 0x6f, + 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0d, 0x72, 0x65, 0x70, 0x6f, 0x72, + 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x70, 0x6f, + 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x12, 0x62, 0x0a, 0x1b, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x4f, 0x6e, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x74, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x1b, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x64, 0x4f, 0x6e, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x23, 0x0a, 0x21, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x64, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x5b, 0x0a, 0x1f, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, + 0x41, 0x6e, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x22, 0x0a, + 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, + 0x74, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x05, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x22, 0x14, 0x0a, 0x12, 0x46, 0x72, 0x6f, 0x6d, 0x41, + 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x2c, 0x0a, + 0x10, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x12, 0x18, 0x0a, 0x07, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x1f, 0x0a, 0x09, 0x4e, + 0x61, 0x6d, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xa3, 0x01, 0x0a, + 0x11, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x4d, 0x0a, 0x0c, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, + 0x72, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x0c, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, + 0x74, 0x1a, 0x3f, 0x0a, 0x11, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, + 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x3a, 0x0a, 0x06, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, + 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, + 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4b, + 0x0a, 0x11, 0x53, 0x74, 0x61, 0x72, 0x6b, 0x6e, 0x65, 0x74, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x12, 0x1a, 0x0a, 0x01, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x01, 0x78, 0x12, + 0x1a, 0x0a, 0x01, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x01, 0x79, 0x22, 0x37, 0x0a, 0x13, 0x53, + 0x74, 0x61, 0x72, 0x6b, 0x6e, 0x65, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x48, 0x61, + 0x73, 0x68, 0x12, 0x20, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x42, 0x69, 0x67, 0x49, 0x6e, 0x74, 0x52, 0x04, + 0x68, 0x61, 0x73, 0x68, 0x32, 0x4f, 0x0a, 0x0d, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x52, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x3e, 0x0a, 0x0a, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x6c, 0x61, + 0x79, 0x65, 0x72, 0x12, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0x73, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x12, 0x39, 0x0a, 0x08, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x41, 0x63, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x04, + 0x53, 0x69, 0x67, 0x6e, 0x12, 0x11, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, 0x69, 0x67, 0x6e, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x53, + 0x69, 0x67, 0x6e, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0xfb, 0x04, 0x0a, 0x07, 0x52, + 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, + 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, + 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, + 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x11, 0x4e, + 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, - 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x11, 0x4e, 0x65, - 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, - 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, - 0x3e, 0x0a, 0x0a, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x12, 0x17, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, - 0x4a, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x12, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, - 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x10, 0x4c, - 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x12, - 0x1d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, - 0x08, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x12, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0xb6, 0x01, - 0x0a, 0x16, 0x4f, 0x66, 0x66, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x65, 0x72, 0x12, 0x44, 0x0a, 0x0c, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x56, - 0x0a, 0x12, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, - 0x65, 0x66, 0x69, 0x78, 0x12, 0x1f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0x8d, 0x02, 0x0a, 0x15, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x61, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x54, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x72, - 0x12, 0x59, 0x0a, 0x13, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, + 0x70, 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x0a, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x12, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0e, 0x47, 0x65, + 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1b, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, + 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x08, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x12, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0xb6, 0x01, 0x0a, 0x16, 0x4f, 0x66, 0x66, + 0x63, 0x68, 0x61, 0x69, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, + 0x74, 0x65, 0x72, 0x12, 0x44, 0x0a, 0x0c, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, + 0x65, 0x73, 0x74, 0x12, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, + 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x56, 0x0a, 0x12, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, + 0x1f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, + 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, + 0x67, 0x65, 0x73, 0x74, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x00, 0x32, 0x8d, 0x02, 0x0a, 0x15, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x54, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x59, 0x0a, 0x13, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x4c, - 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x19, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, - 0x00, 0x12, 0x53, 0x0a, 0x11, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, - 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0x82, 0x02, 0x0a, 0x13, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x61, 0x63, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x12, 0x38, - 0x0a, 0x08, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x12, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x13, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, - 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x6e, 0x0a, 0x1a, 0x4c, 0x61, 0x74, 0x65, - 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, - 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, - 0x41, 0x6e, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x25, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, + 0x6c, 0x73, 0x12, 0x20, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, + 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x19, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, + 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x11, + 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, + 0x74, 0x12, 0x1e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x00, 0x32, 0x82, 0x02, 0x0a, 0x13, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x08, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x12, 0x15, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x00, 0x12, 0x6e, 0x0a, 0x1a, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x64, 0x45, 0x70, 0x6f, 0x63, - 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x0b, 0x46, 0x72, 0x6f, 0x6d, - 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x46, - 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0xf5, 0x01, 0x0a, 0x07, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x12, - 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, - 0x61, 0x6d, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, 0x05, 0x43, 0x6c, - 0x6f, 0x73, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, 0x05, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x16, + 0x68, 0x12, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x64, 0x45, 0x70, + 0x6f, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x44, 0x69, + 0x67, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x64, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x0b, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x12, 0x18, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x46, 0x72, 0x6f, 0x6d, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x32, 0xf5, 0x01, 0x0a, 0x07, 0x53, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x1a, 0x0f, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, 0x05, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, - 0x12, 0x41, 0x0a, 0x0c, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, - 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x00, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, - 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, - 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x39, 0x0a, 0x05, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x0c, 0x48, + 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x1a, 0x17, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, + 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x43, + 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, + 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, + 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2984,8 +2986,8 @@ var file_relayer_proto_goTypes = []interface{}{ (*SignReply)(nil), // 4: loop.SignReply (*RelayArgs)(nil), // 5: loop.RelayArgs (*PluginArgs)(nil), // 6: loop.PluginArgs - (*NewChainWriterRequest)(nil), // 7: loop.NewChainWriterRequest - (*NewChainWriterReply)(nil), // 8: loop.NewChainWriterReply + (*NewContractWriterRequest)(nil), // 7: loop.NewContractWriterRequest + (*NewContractWriterReply)(nil), // 8: loop.NewContractWriterReply (*NewContractReaderRequest)(nil), // 9: loop.NewContractReaderRequest (*NewContractReaderReply)(nil), // 10: loop.NewContractReaderReply (*NewPluginProviderRequest)(nil), // 11: loop.NewPluginProviderRequest @@ -3050,7 +3052,7 @@ var file_relayer_proto_depIdxs = []int32{ 0, // 16: loop.PluginRelayer.NewRelayer:input_type -> loop.NewRelayerRequest 51, // 17: loop.Keystore.Accounts:input_type -> google.protobuf.Empty 3, // 18: loop.Keystore.Sign:input_type -> loop.SignRequest - 7, // 19: loop.Relayer.NewChainWriter:input_type -> loop.NewChainWriterRequest + 7, // 19: loop.Relayer.NewContractWriter:input_type -> loop.NewContractWriterRequest 9, // 20: loop.Relayer.NewContractReader:input_type -> loop.NewContractReaderRequest 13, // 21: loop.Relayer.NewConfigProvider:input_type -> loop.NewConfigProviderRequest 11, // 22: loop.Relayer.NewPluginProvider:input_type -> loop.NewPluginProviderRequest @@ -3073,7 +3075,7 @@ var file_relayer_proto_depIdxs = []int32{ 1, // 39: loop.PluginRelayer.NewRelayer:output_type -> loop.NewRelayerReply 2, // 40: loop.Keystore.Accounts:output_type -> loop.AccountsReply 4, // 41: loop.Keystore.Sign:output_type -> loop.SignReply - 8, // 42: loop.Relayer.NewChainWriter:output_type -> loop.NewChainWriterReply + 8, // 42: loop.Relayer.NewContractWriter:output_type -> loop.NewContractWriterReply 10, // 43: loop.Relayer.NewContractReader:output_type -> loop.NewContractReaderReply 14, // 44: loop.Relayer.NewConfigProvider:output_type -> loop.NewConfigProviderReply 12, // 45: loop.Relayer.NewPluginProvider:output_type -> loop.NewPluginProviderReply @@ -3192,7 +3194,7 @@ func file_relayer_proto_init() { } } file_relayer_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NewChainWriterRequest); i { + switch v := v.(*NewContractWriterRequest); i { case 0: return &v.state case 1: @@ -3204,7 +3206,7 @@ func file_relayer_proto_init() { } } file_relayer_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NewChainWriterReply); i { + switch v := v.(*NewContractWriterReply); i { case 0: return &v.state case 1: diff --git a/pkg/loop/internal/pb/relayer.proto b/pkg/loop/internal/pb/relayer.proto index efbeda99e..211351d2c 100644 --- a/pkg/loop/internal/pb/relayer.proto +++ b/pkg/loop/internal/pb/relayer.proto @@ -42,7 +42,7 @@ message SignReply { } service Relayer { - rpc NewChainWriter(NewChainWriterRequest) returns (NewChainWriterReply) {} + rpc NewContractWriter(NewContractWriterRequest) returns (NewContractWriterReply) {} rpc NewContractReader (NewContractReaderRequest) returns (NewContractReaderReply) {} rpc NewConfigProvider (NewConfigProviderRequest) returns (NewConfigProviderReply) {} rpc NewPluginProvider (NewPluginProviderRequest) returns (NewPluginProviderReply) {} @@ -69,14 +69,14 @@ message PluginArgs { bytes pluginConfig = 2; } -// NewChainWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewChainWriter]. -message NewChainWriterRequest { - bytes chainWriterConfig = 1; +// NewContractWriterRequest has request parameters for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewContractWriter]. +message NewContractWriterRequest { + bytes contractWriterConfig = 1; } -// NewChainWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewChainWriter]. -message NewChainWriterReply { - uint32 chainWriterID = 1; +// NewContractWriterReply has return arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewContractWriter]. +message NewContractWriterReply { + uint32 contractWriterID = 1; } // NewContractReaderRequest has arguments for [github.com/smartcontractkit/chainlink-common/pkg/loop.Relayer.NewContractReader]. diff --git a/pkg/loop/internal/pb/relayer_grpc.pb.go b/pkg/loop/internal/pb/relayer_grpc.pb.go index 214842b92..aa6a804db 100644 --- a/pkg/loop/internal/pb/relayer_grpc.pb.go +++ b/pkg/loop/internal/pb/relayer_grpc.pb.go @@ -237,7 +237,7 @@ var Keystore_ServiceDesc = grpc.ServiceDesc{ } const ( - Relayer_NewChainWriter_FullMethodName = "/loop.Relayer/NewChainWriter" + Relayer_NewContractWriter_FullMethodName = "/loop.Relayer/NewContractWriter" Relayer_NewContractReader_FullMethodName = "/loop.Relayer/NewContractReader" Relayer_NewConfigProvider_FullMethodName = "/loop.Relayer/NewConfigProvider" Relayer_NewPluginProvider_FullMethodName = "/loop.Relayer/NewPluginProvider" @@ -251,7 +251,7 @@ const ( // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. type RelayerClient interface { - NewChainWriter(ctx context.Context, in *NewChainWriterRequest, opts ...grpc.CallOption) (*NewChainWriterReply, error) + NewContractWriter(ctx context.Context, in *NewContractWriterRequest, opts ...grpc.CallOption) (*NewContractWriterReply, error) NewContractReader(ctx context.Context, in *NewContractReaderRequest, opts ...grpc.CallOption) (*NewContractReaderReply, error) NewConfigProvider(ctx context.Context, in *NewConfigProviderRequest, opts ...grpc.CallOption) (*NewConfigProviderReply, error) NewPluginProvider(ctx context.Context, in *NewPluginProviderRequest, opts ...grpc.CallOption) (*NewPluginProviderReply, error) @@ -269,9 +269,9 @@ func NewRelayerClient(cc grpc.ClientConnInterface) RelayerClient { return &relayerClient{cc} } -func (c *relayerClient) NewChainWriter(ctx context.Context, in *NewChainWriterRequest, opts ...grpc.CallOption) (*NewChainWriterReply, error) { - out := new(NewChainWriterReply) - err := c.cc.Invoke(ctx, Relayer_NewChainWriter_FullMethodName, in, out, opts...) +func (c *relayerClient) NewContractWriter(ctx context.Context, in *NewContractWriterRequest, opts ...grpc.CallOption) (*NewContractWriterReply, error) { + out := new(NewContractWriterReply) + err := c.cc.Invoke(ctx, Relayer_NewContractWriter_FullMethodName, in, out, opts...) if err != nil { return nil, err } @@ -345,7 +345,7 @@ func (c *relayerClient) Transact(ctx context.Context, in *TransactionRequest, op // All implementations must embed UnimplementedRelayerServer // for forward compatibility type RelayerServer interface { - NewChainWriter(context.Context, *NewChainWriterRequest) (*NewChainWriterReply, error) + NewContractWriter(context.Context, *NewContractWriterRequest) (*NewContractWriterReply, error) NewContractReader(context.Context, *NewContractReaderRequest) (*NewContractReaderReply, error) NewConfigProvider(context.Context, *NewConfigProviderRequest) (*NewConfigProviderReply, error) NewPluginProvider(context.Context, *NewPluginProviderRequest) (*NewPluginProviderReply, error) @@ -360,8 +360,8 @@ type RelayerServer interface { type UnimplementedRelayerServer struct { } -func (UnimplementedRelayerServer) NewChainWriter(context.Context, *NewChainWriterRequest) (*NewChainWriterReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method NewChainWriter not implemented") +func (UnimplementedRelayerServer) NewContractWriter(context.Context, *NewContractWriterRequest) (*NewContractWriterReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method NewContractWriter not implemented") } func (UnimplementedRelayerServer) NewContractReader(context.Context, *NewContractReaderRequest) (*NewContractReaderReply, error) { return nil, status.Errorf(codes.Unimplemented, "method NewContractReader not implemented") @@ -397,20 +397,20 @@ func RegisterRelayerServer(s grpc.ServiceRegistrar, srv RelayerServer) { s.RegisterService(&Relayer_ServiceDesc, srv) } -func _Relayer_NewChainWriter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(NewChainWriterRequest) +func _Relayer_NewContractWriter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(NewContractWriterRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(RelayerServer).NewChainWriter(ctx, in) + return srv.(RelayerServer).NewContractWriter(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: Relayer_NewChainWriter_FullMethodName, + FullMethod: Relayer_NewContractWriter_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RelayerServer).NewChainWriter(ctx, req.(*NewChainWriterRequest)) + return srv.(RelayerServer).NewContractWriter(ctx, req.(*NewContractWriterRequest)) } return interceptor(ctx, in, info, handler) } @@ -549,8 +549,8 @@ var Relayer_ServiceDesc = grpc.ServiceDesc{ HandlerType: (*RelayerServer)(nil), Methods: []grpc.MethodDesc{ { - MethodName: "NewChainWriter", - Handler: _Relayer_NewChainWriter_Handler, + MethodName: "NewContractWriter", + Handler: _Relayer_NewContractWriter_Handler, }, { MethodName: "NewContractReader", diff --git a/pkg/loop/internal/pb/relayerset/relayerset.pb.go b/pkg/loop/internal/pb/relayerset/relayerset.pb.go index 05e07cabf..da4346d0b 100644 --- a/pkg/loop/internal/pb/relayerset/relayerset.pb.go +++ b/pkg/loop/internal/pb/relayerset/relayerset.pb.go @@ -675,17 +675,17 @@ func (x *NewContractReaderResponse) GetContractReaderId() uint32 { return 0 } -type NewChainWriterRequest struct { +type NewContractWriterRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - RelayerId *RelayerId `protobuf:"bytes,1,opt,name=relayerId,proto3" json:"relayerId,omitempty"` - ChainWriterConfig []byte `protobuf:"bytes,2,opt,name=chainWriterConfig,proto3" json:"chainWriterConfig,omitempty"` + RelayerId *RelayerId `protobuf:"bytes,1,opt,name=relayerId,proto3" json:"relayerId,omitempty"` + ContractWriterConfig []byte `protobuf:"bytes,2,opt,name=contractWriterConfig,proto3" json:"contractWriterConfig,omitempty"` } -func (x *NewChainWriterRequest) Reset() { - *x = NewChainWriterRequest{} +func (x *NewContractWriterRequest) Reset() { + *x = NewContractWriterRequest{} if protoimpl.UnsafeEnabled { mi := &file_relayerset_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -693,13 +693,13 @@ func (x *NewChainWriterRequest) Reset() { } } -func (x *NewChainWriterRequest) String() string { +func (x *NewContractWriterRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*NewChainWriterRequest) ProtoMessage() {} +func (*NewContractWriterRequest) ProtoMessage() {} -func (x *NewChainWriterRequest) ProtoReflect() protoreflect.Message { +func (x *NewContractWriterRequest) ProtoReflect() protoreflect.Message { mi := &file_relayerset_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -711,35 +711,35 @@ func (x *NewChainWriterRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use NewChainWriterRequest.ProtoReflect.Descriptor instead. -func (*NewChainWriterRequest) Descriptor() ([]byte, []int) { +// Deprecated: Use NewContractWriterRequest.ProtoReflect.Descriptor instead. +func (*NewContractWriterRequest) Descriptor() ([]byte, []int) { return file_relayerset_proto_rawDescGZIP(), []int{12} } -func (x *NewChainWriterRequest) GetRelayerId() *RelayerId { +func (x *NewContractWriterRequest) GetRelayerId() *RelayerId { if x != nil { return x.RelayerId } return nil } -func (x *NewChainWriterRequest) GetChainWriterConfig() []byte { +func (x *NewContractWriterRequest) GetContractWriterConfig() []byte { if x != nil { - return x.ChainWriterConfig + return x.ContractWriterConfig } return nil } -type NewChainWriterResponse struct { +type NewContractWriterResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ChainWriterId uint32 `protobuf:"varint,1,opt,name=chainWriterId,proto3" json:"chainWriterId,omitempty"` + ContractWriterId uint32 `protobuf:"varint,1,opt,name=contractWriterId,proto3" json:"contractWriterId,omitempty"` } -func (x *NewChainWriterResponse) Reset() { - *x = NewChainWriterResponse{} +func (x *NewContractWriterResponse) Reset() { + *x = NewContractWriterResponse{} if protoimpl.UnsafeEnabled { mi := &file_relayerset_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -747,13 +747,13 @@ func (x *NewChainWriterResponse) Reset() { } } -func (x *NewChainWriterResponse) String() string { +func (x *NewContractWriterResponse) String() string { return protoimpl.X.MessageStringOf(x) } -func (*NewChainWriterResponse) ProtoMessage() {} +func (*NewContractWriterResponse) ProtoMessage() {} -func (x *NewChainWriterResponse) ProtoReflect() protoreflect.Message { +func (x *NewContractWriterResponse) ProtoReflect() protoreflect.Message { mi := &file_relayerset_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -765,14 +765,14 @@ func (x *NewChainWriterResponse) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use NewChainWriterResponse.ProtoReflect.Descriptor instead. -func (*NewChainWriterResponse) Descriptor() ([]byte, []int) { +// Deprecated: Use NewContractWriterResponse.ProtoReflect.Descriptor instead. +func (*NewContractWriterResponse) Descriptor() ([]byte, []int) { return file_relayerset_proto_rawDescGZIP(), []int{13} } -func (x *NewChainWriterResponse) GetChainWriterId() uint32 { +func (x *NewContractWriterResponse) GetContractWriterId() uint32 { if x != nil { - return x.ChainWriterId + return x.ContractWriterId } return 0 } @@ -1066,111 +1066,113 @@ var file_relayerset_proto_rawDesc = []byte{ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x49, 0x64, 0x22, 0x7f, 0x0a, 0x15, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, - 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x09, + 0x72, 0x49, 0x64, 0x22, 0x88, 0x01, 0x0a, 0x18, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x38, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, + 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x52, + 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x12, 0x32, 0x0a, 0x14, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x14, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x47, + 0x0a, 0x19, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, + 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x10, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, + 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x64, 0x22, 0x4d, 0x0a, 0x11, 0x4c, 0x61, 0x74, 0x65, 0x73, + 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x52, 0x09, 0x72, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x11, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, - 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x11, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x22, 0x3e, 0x0a, 0x16, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, - 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, - 0x0a, 0x0d, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x49, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, - 0x65, 0x72, 0x49, 0x64, 0x22, 0x4d, 0x0a, 0x11, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, - 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x09, 0x72, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6c, - 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, - 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x52, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x49, 0x64, 0x22, 0x5e, 0x0a, 0x12, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, - 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x22, 0xaa, 0x01, 0x0a, 0x1b, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x48, - 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x48, 0x65, 0x61, - 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x72, - 0x65, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x39, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x22, 0x29, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x32, 0xe4, 0x07, 0x0a, 0x0a, - 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x53, 0x65, 0x74, 0x12, 0x50, 0x0a, 0x03, 0x47, 0x65, - 0x74, 0x12, 0x22, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, - 0x73, 0x65, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x04, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, - 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, - 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, - 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x11, 0x4e, 0x65, 0x77, - 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x29, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, - 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, - 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x50, - 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x29, 0x2e, 0x6c, + 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x22, 0x5e, 0x0a, 0x12, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, + 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, + 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x68, 0x65, + 0x69, 0x67, 0x68, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x74, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xaa, 0x01, 0x0a, 0x1b, 0x52, 0x65, 0x6c, 0x61, 0x79, + 0x65, 0x72, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x39, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6f, + 0x72, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x22, 0x29, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4e, 0x61, + 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x32, 0xed, + 0x07, 0x0a, 0x0a, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x53, 0x65, 0x74, 0x12, 0x50, 0x0a, + 0x03, 0x47, 0x65, 0x74, 0x12, 0x22, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, + 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x5b, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, + 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x6c, + 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x28, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, + 0x65, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, + 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x11, + 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, + 0x72, 0x12, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, + 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, - 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, - 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x63, 0x0a, 0x0e, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, - 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x26, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, - 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, - 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, - 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x53, 0x74, - 0x61, 0x72, 0x74, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, - 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, - 0x12, 0x44, 0x0a, 0x0c, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, - 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, - 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, + 0x65, 0x77, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x11, 0x4e, 0x65, + 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, + 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, + 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x11, 0x4e, 0x65, 0x77, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x12, 0x29, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, + 0x4e, 0x65, 0x77, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4e, 0x65, 0x77, 0x43, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x57, 0x72, 0x69, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, + 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x61, 0x0a, 0x13, - 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, - 0x6f, 0x72, 0x74, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, - 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, - 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, - 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, - 0x51, 0x0a, 0x0b, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, - 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, - 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x24, 0x2e, 0x6c, 0x6f, 0x6f, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, + 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x1a, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, + 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, + 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0c, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x65, 0x61, + 0x64, 0x79, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, + 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x16, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x61, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x61, + 0x79, 0x65, 0x72, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x12, + 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, + 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x72, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x0b, 0x52, + 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x12, 0x5e, 0x0a, 0x11, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4c, 0x61, 0x74, - 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x12, 0x22, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, + 0x61, 0x79, 0x65, 0x72, 0x49, 0x64, 0x1a, 0x24, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, + 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, + 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5e, + 0x0a, 0x11, 0x52, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x12, 0x22, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, + 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, - 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x6c, 0x6f, - 0x6f, 0x70, 0x2e, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x2e, 0x4c, 0x61, - 0x74, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0x4e, 0x5a, 0x4c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, - 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x62, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, - 0x65, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x48, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x4e, + 0x5a, 0x4c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, + 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, + 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2f, 0x70, 0x62, 0x2f, 0x72, 0x65, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x73, 0x65, 0x74, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1199,8 +1201,8 @@ var file_relayerset_proto_goTypes = []interface{}{ (*NewPluginProviderResponse)(nil), // 9: loop.relayerset.NewPluginProviderResponse (*NewContractReaderRequest)(nil), // 10: loop.relayerset.NewContractReaderRequest (*NewContractReaderResponse)(nil), // 11: loop.relayerset.NewContractReaderResponse - (*NewChainWriterRequest)(nil), // 12: loop.relayerset.NewChainWriterRequest - (*NewChainWriterResponse)(nil), // 13: loop.relayerset.NewChainWriterResponse + (*NewContractWriterRequest)(nil), // 12: loop.relayerset.NewContractWriterRequest + (*NewContractWriterResponse)(nil), // 13: loop.relayerset.NewContractWriterResponse (*LatestHeadRequest)(nil), // 14: loop.relayerset.LatestHeadRequest (*LatestHeadResponse)(nil), // 15: loop.relayerset.LatestHeadResponse (*RelayerHealthReportResponse)(nil), // 16: loop.relayerset.RelayerHealthReportResponse @@ -1218,14 +1220,14 @@ var file_relayerset_proto_depIdxs = []int32{ 5, // 6: loop.relayerset.NewPluginProviderRequest.relayArgs:type_name -> loop.relayerset.RelayArgs 7, // 7: loop.relayerset.NewPluginProviderRequest.pluginArgs:type_name -> loop.relayerset.PluginArgs 0, // 8: loop.relayerset.NewContractReaderRequest.relayerId:type_name -> loop.relayerset.RelayerId - 0, // 9: loop.relayerset.NewChainWriterRequest.relayerId:type_name -> loop.relayerset.RelayerId + 0, // 9: loop.relayerset.NewContractWriterRequest.relayerId:type_name -> loop.relayerset.RelayerId 0, // 10: loop.relayerset.LatestHeadRequest.relayerId:type_name -> loop.relayerset.RelayerId 18, // 11: loop.relayerset.RelayerHealthReportResponse.report:type_name -> loop.relayerset.RelayerHealthReportResponse.ReportEntry 1, // 12: loop.relayerset.RelayerSet.Get:input_type -> loop.relayerset.GetRelayerRequest 3, // 13: loop.relayerset.RelayerSet.List:input_type -> loop.relayerset.ListAllRelayersRequest 8, // 14: loop.relayerset.RelayerSet.NewPluginProvider:input_type -> loop.relayerset.NewPluginProviderRequest 10, // 15: loop.relayerset.RelayerSet.NewContractReader:input_type -> loop.relayerset.NewContractReaderRequest - 12, // 16: loop.relayerset.RelayerSet.NewChainWriter:input_type -> loop.relayerset.NewChainWriterRequest + 12, // 16: loop.relayerset.RelayerSet.NewContractWriter:input_type -> loop.relayerset.NewContractWriterRequest 0, // 17: loop.relayerset.RelayerSet.StartRelayer:input_type -> loop.relayerset.RelayerId 0, // 18: loop.relayerset.RelayerSet.CloseRelayer:input_type -> loop.relayerset.RelayerId 0, // 19: loop.relayerset.RelayerSet.RelayerReady:input_type -> loop.relayerset.RelayerId @@ -1236,7 +1238,7 @@ var file_relayerset_proto_depIdxs = []int32{ 4, // 24: loop.relayerset.RelayerSet.List:output_type -> loop.relayerset.ListAllRelayersResponse 9, // 25: loop.relayerset.RelayerSet.NewPluginProvider:output_type -> loop.relayerset.NewPluginProviderResponse 11, // 26: loop.relayerset.RelayerSet.NewContractReader:output_type -> loop.relayerset.NewContractReaderResponse - 13, // 27: loop.relayerset.RelayerSet.NewChainWriter:output_type -> loop.relayerset.NewChainWriterResponse + 13, // 27: loop.relayerset.RelayerSet.NewContractWriter:output_type -> loop.relayerset.NewContractWriterResponse 19, // 28: loop.relayerset.RelayerSet.StartRelayer:output_type -> google.protobuf.Empty 19, // 29: loop.relayerset.RelayerSet.CloseRelayer:output_type -> google.protobuf.Empty 19, // 30: loop.relayerset.RelayerSet.RelayerReady:output_type -> google.protobuf.Empty @@ -1401,7 +1403,7 @@ func file_relayerset_proto_init() { } } file_relayerset_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NewChainWriterRequest); i { + switch v := v.(*NewContractWriterRequest); i { case 0: return &v.state case 1: @@ -1413,7 +1415,7 @@ func file_relayerset_proto_init() { } } file_relayerset_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NewChainWriterResponse); i { + switch v := v.(*NewContractWriterResponse); i { case 0: return &v.state case 1: diff --git a/pkg/loop/internal/pb/relayerset/relayerset.proto b/pkg/loop/internal/pb/relayerset/relayerset.proto index 5e1e4e441..0244c7127 100644 --- a/pkg/loop/internal/pb/relayerset/relayerset.proto +++ b/pkg/loop/internal/pb/relayerset/relayerset.proto @@ -67,13 +67,13 @@ message NewContractReaderResponse { uint32 contractReaderId = 1; } -message NewChainWriterRequest { +message NewContractWriterRequest { RelayerId relayerId = 1; - bytes chainWriterConfig = 2; + bytes contractWriterConfig = 2; } -message NewChainWriterResponse { - uint32 chainWriterId = 1; +message NewContractWriterResponse { + uint32 contractWriterId = 1; } message LatestHeadRequest { @@ -99,7 +99,7 @@ service RelayerSet { rpc List(ListAllRelayersRequest) returns (ListAllRelayersResponse) {} rpc NewPluginProvider(NewPluginProviderRequest) returns (NewPluginProviderResponse) {} rpc NewContractReader(NewContractReaderRequest) returns (NewContractReaderResponse) {} - rpc NewChainWriter(NewChainWriterRequest) returns (NewChainWriterResponse) {} + rpc NewContractWriter(NewContractWriterRequest) returns (NewContractWriterResponse) {} rpc StartRelayer(RelayerId) returns (google.protobuf.Empty) {} rpc CloseRelayer(RelayerId) returns (google.protobuf.Empty) {} diff --git a/pkg/loop/internal/pb/relayerset/relayerset_grpc.pb.go b/pkg/loop/internal/pb/relayerset/relayerset_grpc.pb.go index d922c51c0..21220a253 100644 --- a/pkg/loop/internal/pb/relayerset/relayerset_grpc.pb.go +++ b/pkg/loop/internal/pb/relayerset/relayerset_grpc.pb.go @@ -24,7 +24,7 @@ const ( RelayerSet_List_FullMethodName = "/loop.relayerset.RelayerSet/List" RelayerSet_NewPluginProvider_FullMethodName = "/loop.relayerset.RelayerSet/NewPluginProvider" RelayerSet_NewContractReader_FullMethodName = "/loop.relayerset.RelayerSet/NewContractReader" - RelayerSet_NewChainWriter_FullMethodName = "/loop.relayerset.RelayerSet/NewChainWriter" + RelayerSet_NewContractWriter_FullMethodName = "/loop.relayerset.RelayerSet/NewContractWriter" RelayerSet_StartRelayer_FullMethodName = "/loop.relayerset.RelayerSet/StartRelayer" RelayerSet_CloseRelayer_FullMethodName = "/loop.relayerset.RelayerSet/CloseRelayer" RelayerSet_RelayerReady_FullMethodName = "/loop.relayerset.RelayerSet/RelayerReady" @@ -41,7 +41,7 @@ type RelayerSetClient interface { List(ctx context.Context, in *ListAllRelayersRequest, opts ...grpc.CallOption) (*ListAllRelayersResponse, error) NewPluginProvider(ctx context.Context, in *NewPluginProviderRequest, opts ...grpc.CallOption) (*NewPluginProviderResponse, error) NewContractReader(ctx context.Context, in *NewContractReaderRequest, opts ...grpc.CallOption) (*NewContractReaderResponse, error) - NewChainWriter(ctx context.Context, in *NewChainWriterRequest, opts ...grpc.CallOption) (*NewChainWriterResponse, error) + NewContractWriter(ctx context.Context, in *NewContractWriterRequest, opts ...grpc.CallOption) (*NewContractWriterResponse, error) StartRelayer(ctx context.Context, in *RelayerId, opts ...grpc.CallOption) (*emptypb.Empty, error) CloseRelayer(ctx context.Context, in *RelayerId, opts ...grpc.CallOption) (*emptypb.Empty, error) RelayerReady(ctx context.Context, in *RelayerId, opts ...grpc.CallOption) (*emptypb.Empty, error) @@ -94,9 +94,9 @@ func (c *relayerSetClient) NewContractReader(ctx context.Context, in *NewContrac return out, nil } -func (c *relayerSetClient) NewChainWriter(ctx context.Context, in *NewChainWriterRequest, opts ...grpc.CallOption) (*NewChainWriterResponse, error) { - out := new(NewChainWriterResponse) - err := c.cc.Invoke(ctx, RelayerSet_NewChainWriter_FullMethodName, in, out, opts...) +func (c *relayerSetClient) NewContractWriter(ctx context.Context, in *NewContractWriterRequest, opts ...grpc.CallOption) (*NewContractWriterResponse, error) { + out := new(NewContractWriterResponse) + err := c.cc.Invoke(ctx, RelayerSet_NewContractWriter_FullMethodName, in, out, opts...) if err != nil { return nil, err } @@ -165,7 +165,7 @@ type RelayerSetServer interface { List(context.Context, *ListAllRelayersRequest) (*ListAllRelayersResponse, error) NewPluginProvider(context.Context, *NewPluginProviderRequest) (*NewPluginProviderResponse, error) NewContractReader(context.Context, *NewContractReaderRequest) (*NewContractReaderResponse, error) - NewChainWriter(context.Context, *NewChainWriterRequest) (*NewChainWriterResponse, error) + NewContractWriter(context.Context, *NewContractWriterRequest) (*NewContractWriterResponse, error) StartRelayer(context.Context, *RelayerId) (*emptypb.Empty, error) CloseRelayer(context.Context, *RelayerId) (*emptypb.Empty, error) RelayerReady(context.Context, *RelayerId) (*emptypb.Empty, error) @@ -191,8 +191,8 @@ func (UnimplementedRelayerSetServer) NewPluginProvider(context.Context, *NewPlug func (UnimplementedRelayerSetServer) NewContractReader(context.Context, *NewContractReaderRequest) (*NewContractReaderResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method NewContractReader not implemented") } -func (UnimplementedRelayerSetServer) NewChainWriter(context.Context, *NewChainWriterRequest) (*NewChainWriterResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method NewChainWriter not implemented") +func (UnimplementedRelayerSetServer) NewContractWriter(context.Context, *NewContractWriterRequest) (*NewContractWriterResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method NewContractWriter not implemented") } func (UnimplementedRelayerSetServer) StartRelayer(context.Context, *RelayerId) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method StartRelayer not implemented") @@ -297,20 +297,20 @@ func _RelayerSet_NewContractReader_Handler(srv interface{}, ctx context.Context, return interceptor(ctx, in, info, handler) } -func _RelayerSet_NewChainWriter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(NewChainWriterRequest) +func _RelayerSet_NewContractWriter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(NewContractWriterRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(RelayerSetServer).NewChainWriter(ctx, in) + return srv.(RelayerSetServer).NewContractWriter(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: RelayerSet_NewChainWriter_FullMethodName, + FullMethod: RelayerSet_NewContractWriter_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RelayerSetServer).NewChainWriter(ctx, req.(*NewChainWriterRequest)) + return srv.(RelayerSetServer).NewContractWriter(ctx, req.(*NewContractWriterRequest)) } return interceptor(ctx, in, info, handler) } @@ -447,8 +447,8 @@ var RelayerSet_ServiceDesc = grpc.ServiceDesc{ Handler: _RelayerSet_NewContractReader_Handler, }, { - MethodName: "NewChainWriter", - Handler: _RelayerSet_NewChainWriter_Handler, + MethodName: "NewContractWriter", + Handler: _RelayerSet_NewContractWriter_Handler, }, { MethodName: "StartRelayer", diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go index c22ed15f4..d47269ca0 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go @@ -95,6 +95,7 @@ func TestCodecClient(t *testing.T) { type fakeCodecInterfaceTester struct { interfaceTesterBase + interfacetests.TestSelectionSupport impl types.Codec } @@ -144,7 +145,10 @@ func (f *fakeCodec) Encode(_ context.Context, item any, itemType string) ([]byte return []byte{}, nil case interfacetests.TestItemWithConfigExtra: ts := item.(*interfacetests.TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = interfacetests.AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) return encoder.Marshal(ts) case interfacetests.TestItemType, interfacetests.TestItemSliceType, interfacetests.TestItemArray2Type, interfacetests.TestItemArray1Type: diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader.go index 64e1dd191..5b49a7afe 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader.go @@ -6,6 +6,7 @@ import ( "encoding/json" "errors" "fmt" + "iter" "reflect" "github.com/fxamacker/cbor/v2" @@ -196,6 +197,44 @@ func (c *Client) GetLatestValue(ctx context.Context, readIdentifier string, conf return DecodeVersionedBytes(retVal, reply.RetVal) } +func (c *Client) GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, retVal any) (*types.Head, error) { + _, asValueType := retVal.(*values.Value) + + versionedParams, err := EncodeVersionedBytes(params, c.encodeWith) + if err != nil { + return nil, err + } + + pbConfidence, err := confidenceToProto(confidenceLevel) + if err != nil { + return nil, err + } + + reply, err := c.grpc.GetLatestValueWithHeadData( + ctx, + &pb.GetLatestValueRequest{ + ReadIdentifier: readIdentifier, + Confidence: pbConfidence, + Params: versionedParams, + AsValueType: asValueType, + }, + ) + if err != nil { + return nil, net.WrapRPCErr(err) + } + + var headData *types.Head + if reply.HeadData != nil { + headData = &types.Head{ + Height: reply.HeadData.Height, + Hash: reply.HeadData.Hash, + Timestamp: reply.HeadData.Timestamp, + } + } + + return headData, DecodeVersionedBytes(retVal, reply.RetVal) +} + func (c *Client) BatchGetLatestValues(ctx context.Context, request types.BatchGetLatestValuesRequest) (types.BatchGetLatestValuesResult, error) { pbRequest, err := convertBatchGetLatestValuesRequestToProto(request, c.encodeWith) if err != nil { @@ -242,6 +281,43 @@ func (c *Client) QueryKey(ctx context.Context, contract types.BoundContract, fil return convertSequencesFromProto(reply.Sequences, sequenceDataType) } +func (c *Client) QueryKeys(ctx context.Context, keyQueries []types.ContractKeyFilter, limitAndSort query.LimitAndSort) (iter.Seq2[string, types.Sequence], error) { + var filters []*pb.ContractKeyFilter + for _, keyQuery := range keyQueries { + _, asValueType := keyQuery.SequenceDataType.(*values.Value) + contract := convertBoundContractToProto(keyQuery.Contract) + + pbQueryFilter, err := convertQueryFilterToProto(keyQuery.KeyFilter, c.encodeWith) + if err != nil { + return nil, err + } + + filters = append(filters, &pb.ContractKeyFilter{ + Contract: contract, + Filter: pbQueryFilter, + AsValueType: asValueType, + }) + } + + pbLimitAndSort, err := convertLimitAndSortToProto(limitAndSort) + if err != nil { + return nil, err + } + + reply, err := c.grpc.QueryKeys( + ctx, + &pb.QueryKeysRequest{ + Filters: filters, + LimitAndSort: pbLimitAndSort, + }, + ) + if err != nil { + return nil, net.WrapRPCErr(err) + } + + return convertSequencesWithKeyFromProto(reply.Sequences, keyQueries) +} + func (c *Client) Bind(ctx context.Context, bindings []types.BoundContract) error { pbBindings := make([]*pb.BoundContract, len(bindings)) for i, b := range bindings { @@ -360,6 +436,53 @@ func (c *Server) GetLatestValue(ctx context.Context, request *pb.GetLatestValueR return &pb.GetLatestValueReply{RetVal: versionedBytes}, nil } +func (c *Server) GetLatestValueWithHeadData(ctx context.Context, request *pb.GetLatestValueRequest) (*pb.GetLatestValueWithHeadDataReply, error) { + params, err := getContractEncodedType(request.ReadIdentifier, c.impl, true) + if err != nil { + return nil, err + } + + if err = DecodeVersionedBytes(params, request.Params); err != nil { + return nil, err + } + + retVal, err := getContractEncodedType(request.ReadIdentifier, c.impl, false) + if err != nil { + return nil, err + } + + confidenceLevel, err := confidenceFromProto(request.Confidence) + if err != nil { + return nil, err + } + + headData, err := c.impl.GetLatestValueWithHeadData(ctx, request.ReadIdentifier, confidenceLevel, params, retVal) + if err != nil { + return nil, err + } + + encodeWith := EncodingVersion(request.Params.Version) + if request.AsValueType { + encodeWith = ValuesEncodingVersion + } + + versionedBytes, err := EncodeVersionedBytes(retVal, encodeWith) + if err != nil { + return nil, err + } + + var headDataProto *pb.Head + if headData != nil { + headDataProto = &pb.Head{ + Height: headData.Height, + Hash: headData.Hash, + Timestamp: headData.Timestamp, + } + } + + return &pb.GetLatestValueWithHeadDataReply{RetVal: versionedBytes, HeadData: headDataProto}, nil +} + func (c *Server) BatchGetLatestValues(ctx context.Context, pbRequest *pb.BatchGetLatestValuesRequest) (*pb.BatchGetLatestValuesReply, error) { request, err := convertBatchGetLatestValuesRequestFromProto(pbRequest, c.impl) if err != nil { @@ -377,7 +500,7 @@ func (c *Server) BatchGetLatestValues(ctx context.Context, pbRequest *pb.BatchGe func (c *Server) QueryKey(ctx context.Context, request *pb.QueryKeyRequest) (*pb.QueryKeyReply, error) { contract := convertBoundContractFromProto(request.Contract) - queryFilter, err := convertQueryFiltersFromProto(request, contract, c.impl) + queryFilter, err := convertQueryFiltersFromProto(request.Filter, contract, c.impl) if err != nil { return nil, err } @@ -410,6 +533,46 @@ func (c *Server) QueryKey(ctx context.Context, request *pb.QueryKeyRequest) (*pb return &pb.QueryKeyReply{Sequences: pbSequences}, nil } +func (c *Server) QueryKeys(ctx context.Context, request *pb.QueryKeysRequest) (*pb.QueryKeysReply, error) { + var filters []types.ContractKeyFilter + for _, keyQuery := range request.Filters { + contract := convertBoundContractFromProto(keyQuery.Contract) + + queryFilter, err := convertQueryFiltersFromProto(keyQuery.Filter, contract, c.impl) + if err != nil { + return nil, err + } + + sequenceDataType, err := getContractEncodedType(contract.ReadIdentifier(queryFilter.Key), c.impl, false) + if err != nil { + return nil, err + } + + filters = append(filters, types.ContractKeyFilter{ + Contract: contract, + KeyFilter: queryFilter, + SequenceDataType: sequenceDataType, + }) + } + + limitAndSort, err := convertLimitAndSortFromProto(request.GetLimitAndSort()) + if err != nil { + return nil, err + } + + sequences, err := c.impl.QueryKeys(ctx, filters, limitAndSort) + if err != nil { + return nil, err + } + + pbSequences, err := convertSequencesWithKeyToVersionedBytesProto(sequences, request.Filters, c.encodeWith) + if err != nil { + return nil, err + } + + return &pb.QueryKeysReply{Sequences: pbSequences}, nil +} + func (c *Server) Bind(ctx context.Context, bindings *pb.BindRequest) (*emptypb.Empty, error) { tBindings := make([]types.BoundContract, len(bindings.Bindings)) for i, b := range bindings.Bindings { @@ -618,11 +781,11 @@ func convertLimitAndSortToProto(limitAndSort query.LimitAndSort) (*pb.LimitAndSo var tp pb.SortType switch sort := sortBy.(type) { - case query.SortByBlock: + case query.SortByBlock, *query.SortByBlock: tp = pb.SortType_SortBlock - case query.SortByTimestamp: + case query.SortByTimestamp, *query.SortByTimestamp: tp = pb.SortType_SortTimestamp - case query.SortBySequence: + case query.SortBySequence, *query.SortBySequence: tp = pb.SortType_SortSequence default: return &pb.LimitAndSort{}, status.Errorf(codes.InvalidArgument, "Unknown sort by type: %T", sort) @@ -673,6 +836,42 @@ func convertSequencesToVersionedBytesProto(sequences []types.Sequence, version E return pbSequences, nil } +func convertSequencesWithKeyToVersionedBytesProto(sequences iter.Seq2[string, types.Sequence], filters []*pb.ContractKeyFilter, encodeWith EncodingVersion) ([]*pb.SequenceWithKey, error) { + keyToEncodingVersion := make(map[string]EncodingVersion) + for _, filter := range filters { + if filter.AsValueType { + keyToEncodingVersion[filter.Filter.Key] = ValuesEncodingVersion + } else { + keyToEncodingVersion[filter.Filter.Key] = encodeWith + } + } + + var pbSequences []*pb.SequenceWithKey + for key, sequence := range sequences { + version, ok := keyToEncodingVersion[key] + if !ok { + return nil, fmt.Errorf("missing encoding version for key %s", key) + } + + versionedSequenceDataType, err := EncodeVersionedBytes(sequence.Data, version) + if err != nil { + return nil, err + } + pbSequence := &pb.SequenceWithKey{ + Key: key, + SequenceCursor: sequence.Cursor, + Head: &pb.Head{ + Height: sequence.Height, + Hash: sequence.Hash, + Timestamp: sequence.Timestamp, + }, + Data: versionedSequenceDataType, + } + pbSequences = append(pbSequences, pbSequence) + } + return pbSequences, nil +} + func parseBatchGetLatestValuesReply(request types.BatchGetLatestValuesRequest, reply *pb.BatchGetLatestValuesReply) (types.BatchGetLatestValuesResult, error) { if reply == nil { return nil, fmt.Errorf("received nil reply from grpc BatchGetLatestValues") @@ -759,8 +958,7 @@ func convertBoundContractFromProto(contract *pb.BoundContract) types.BoundContra } } -func convertQueryFiltersFromProto(request *pb.QueryKeyRequest, contract types.BoundContract, impl types.ContractReader) (query.KeyFilter, error) { - pbQueryFilters := request.Filter +func convertQueryFiltersFromProto(pbQueryFilters *pb.QueryKeyFilter, contract types.BoundContract, impl types.ContractReader) (query.KeyFilter, error) { queryFilter := query.KeyFilter{Key: pbQueryFilters.Key} for _, pbQueryFilter := range pbQueryFilters.Expression { expression, err := convertExpressionFromProto(pbQueryFilter, contract, queryFilter.Key, impl) @@ -864,16 +1062,9 @@ func convertLimitAndSortFromProto(limitAndSort *pb.LimitAndSort) (query.LimitAnd func convertSequencesFromProto(pbSequences []*pb.Sequence, sequenceDataType any) ([]types.Sequence, error) { sequences := make([]types.Sequence, len(pbSequences)) - seqTypeOf := reflect.TypeOf(sequenceDataType) - - // get the non-pointer data type for the sequence data - nonPointerType := seqTypeOf - if seqTypeOf.Kind() == reflect.Pointer { - nonPointerType = seqTypeOf.Elem() - } - - if nonPointerType.Kind() == reflect.Pointer { - return nil, fmt.Errorf("%w: sequenceDataType does not support pointers to pointers", types.ErrInvalidType) + seqTypeOf, nonPointerType, err := getSequenceTypeInformation(sequenceDataType) + if err != nil { + return nil, err } for idx, pbSequence := range pbSequences { @@ -901,6 +1092,80 @@ func convertSequencesFromProto(pbSequences []*pb.Sequence, sequenceDataType any) return sequences, nil } +func getSequenceTypeInformation(sequenceDataType any) (reflect.Type, reflect.Type, error) { + seqTypeOf := reflect.TypeOf(sequenceDataType) + + // get the non-pointer data type for the sequence data + nonPointerType := seqTypeOf + if seqTypeOf.Kind() == reflect.Pointer { + nonPointerType = seqTypeOf.Elem() + } + + if nonPointerType.Kind() == reflect.Pointer { + return nil, nil, fmt.Errorf("%w: sequenceDataType does not support pointers to pointers", types.ErrInvalidType) + } + return seqTypeOf, nonPointerType, nil +} + +func convertSequencesWithKeyFromProto(pbSequences []*pb.SequenceWithKey, keyQueries []types.ContractKeyFilter) (iter.Seq2[string, types.Sequence], error) { + type sequenceWithKey struct { + Key string + Sequence types.Sequence + } + + sequencesWithKey := make([]sequenceWithKey, len(pbSequences)) + + keyToSeqTypeOf := make(map[string]reflect.Type) + keyToNonPointerType := make(map[string]reflect.Type) + + for _, keyQuery := range keyQueries { + seqTypeOf, nonPointerType, err := getSequenceTypeInformation(keyQuery.SequenceDataType) + if err != nil { + return nil, err + } + + keyToSeqTypeOf[keyQuery.Key] = seqTypeOf + keyToNonPointerType[keyQuery.Key] = nonPointerType + } + + for idx, pbSequence := range pbSequences { + seqTypeOf, nonPointerType := keyToSeqTypeOf[pbSequence.Key], keyToNonPointerType[pbSequence.Key] + + cpy := reflect.New(nonPointerType).Interface() + if err := DecodeVersionedBytes(cpy, pbSequence.Data); err != nil { + return nil, err + } + + // match provided data type either as pointer or non-pointer + if seqTypeOf.Kind() != reflect.Pointer { + cpy = reflect.Indirect(reflect.ValueOf(cpy)).Interface() + } + pbSeq := pbSequences[idx] + sequencesWithKey[idx] = sequenceWithKey{ + Key: pbSeq.Key, + Sequence: types.Sequence{ + Cursor: pbSeq.SequenceCursor, + Head: types.Head{ + Height: pbSeq.Head.Height, + Hash: pbSeq.Head.Hash, + Timestamp: pbSeq.Head.Timestamp, + }, + Data: cpy, + }, + } + } + + return func(yield func(string, types.Sequence) bool) { + for _, s := range sequencesWithKey { + if !yield(s.Key, s.Sequence) { + return + } + } + }, nil +} + func RegisterContractReaderService(s *grpc.Server, contractReader types.ContractReader) { - pb.RegisterServiceServer(s, &goplugin.ServiceServer{Srv: contractReader}) + service := goplugin.ServiceServer{Srv: contractReader} + pb.RegisterServiceServer(s, &service) + pb.RegisterContractReaderServer(s, NewServer(contractReader)) } diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go index 667131020..c050f9cf9 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "iter" "math/big" "reflect" "sort" @@ -29,7 +30,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" "github.com/smartcontractkit/chainlink-common/pkg/values" - . "github.com/smartcontractkit/chainlink-common/pkg/types/interfacetests" //nolint + . "github.com/smartcontractkit/chainlink-common/pkg/types/interfacetests" ) func TestVersionedBytesFunctions(t *testing.T) { @@ -84,6 +85,7 @@ func TestContractReaderInterfaceTests(t *testing.T) { contractreadertest.WithContractReaderLoopEncoding(version), ), true, + false, ) } }) @@ -297,15 +299,16 @@ func makeEncoder() cbor.EncMode { type fakeContractReaderInterfaceTester struct { interfaceTesterBase + TestSelectionSupport impl types.ContractReader - cw fakeChainWriter + cw fakeContractWriter } func (it *fakeContractReaderInterfaceTester) Setup(_ *testing.T) { fake, ok := it.impl.(*fakeContractReader) if ok { fake.vals = make(map[string][]valConfidencePair) - fake.triggers = make(map[string][]eventConfidencePair) + fake.triggers = newEventsRecorder() fake.stored = make(map[string][]TestStruct) } } @@ -314,7 +317,7 @@ func (it *fakeContractReaderInterfaceTester) GetContractReader(_ *testing.T) typ return it.impl } -func (it *fakeContractReaderInterfaceTester) GetChainWriter(_ *testing.T) types.ChainWriter { +func (it *fakeContractReaderInterfaceTester) GetContractWriter(_ *testing.T) types.ContractWriter { it.cw.cr = it.impl.(*fakeContractReader) return &it.cw } @@ -350,22 +353,95 @@ type eventConfidencePair struct { confidenceLevel primitives.ConfidenceLevel } +type dynamicTopicEventConfidencePair struct { + someDynamicTopicEvent SomeDynamicTopicEvent + confidenceLevel primitives.ConfidenceLevel +} +type event struct { + contractID string + event any + confidenceLevel primitives.ConfidenceLevel + eventType string +} + +type eventsRecorder struct { + mux sync.Mutex + events []event +} + +func newEventsRecorder() *eventsRecorder { + return &eventsRecorder{} +} + +func (e *eventsRecorder) RecordEvent(contractID string, evt any, confidenceLevel primitives.ConfidenceLevel, eventType string) error { + e.mux.Lock() + defer e.mux.Unlock() + + switch eventType { + case EventName: + _, ok := evt.(TestStruct) + if !ok { + return fmt.Errorf("unexpected event type %T", evt) + } + case DynamicTopicEventName: + _, ok := evt.(SomeDynamicTopicEvent) + if !ok { + return fmt.Errorf("unexpected event type %T", evt) + } + + } + + e.events = append(e.events, event{contractID: contractID, event: evt, confidenceLevel: confidenceLevel, eventType: eventType}) + + return nil +} + +func (e *eventsRecorder) setConfidenceLevelOnAllEvents(confidenceLevel primitives.ConfidenceLevel) { + e.mux.Lock() + defer e.mux.Unlock() + + for i := range e.events { + e.events[i].confidenceLevel = confidenceLevel + } +} + +func (e *eventsRecorder) getEvents(filters ...func(event) bool) []event { + e.mux.Lock() + defer e.mux.Unlock() + + events := make([]event, 0) + for _, event := range e.events { + match := true + for _, filter := range filters { + if !filter(event) { + match = false + break + } + } + if match { + events = append(events, event) + } + } + + return events +} + type fakeContractReader struct { types.UnimplementedContractReader fakeTypeProvider vals map[string][]valConfidencePair - triggers map[string][]eventConfidencePair + triggers *eventsRecorder stored map[string][]TestStruct batchStored BatchCallEntry lock sync.Mutex } -type fakeChainWriter struct { - types.ChainWriter +type fakeContractWriter struct { + types.ContractWriter cr *fakeContractReader } -func (f *fakeChainWriter) SubmitTransaction(_ context.Context, contractName, method string, args any, transactionID string, toAddress string, meta *types.TxMeta, value *big.Int) error { +func (f *fakeContractWriter) SubmitTransaction(_ context.Context, contractName, method string, args any, transactionID string, toAddress string, meta *types.TxMeta, value *big.Int) error { contractID := toAddress + "-" + contractName switch method { case MethodSettingStruct: @@ -381,12 +457,14 @@ func (f *fakeChainWriter) SubmitTransaction(_ context.Context, contractName, met } f.cr.SetUintLatestValue(contractID, v.Value, ExpectedGetLatestValueArgs{}) case MethodTriggeringEvent: - v, ok := args.(TestStruct) - if !ok { - return fmt.Errorf("unexpected type %T", args) + if err := f.cr.triggers.RecordEvent(contractID, args, primitives.Unconfirmed, EventName); err != nil { + return fmt.Errorf("failed to record event: %w", err) + } + case MethodTriggeringEventWithDynamicTopic: + if err := f.cr.triggers.RecordEvent(contractID, args, primitives.Unconfirmed, DynamicTopicEventName); err != nil { + return fmt.Errorf("failed to record event: %w", err) } - f.cr.SetTrigger(contractID, &v) - case "batchChainWrite": + case "batchContractWrite": v, ok := args.(BatchCallEntry) if !ok { return fmt.Errorf("unexpected type %T", args) @@ -399,11 +477,11 @@ func (f *fakeChainWriter) SubmitTransaction(_ context.Context, contractName, met return nil } -func (f *fakeChainWriter) GetTransactionStatus(ctx context.Context, transactionID string) (types.TransactionStatus, error) { +func (f *fakeContractWriter) GetTransactionStatus(ctx context.Context, transactionID string) (types.TransactionStatus, error) { return types.Finalized, nil } -func (f *fakeChainWriter) GetFeeComponents(ctx context.Context) (*types.ChainFeeComponents, error) { +func (f *fakeContractWriter) GetFeeComponents(ctx context.Context) (*types.ChainFeeComponents, error) { return &types.ChainFeeComponents{}, nil } @@ -484,21 +562,27 @@ func (f *fakeContractReader) GetLatestValue(_ context.Context, readIdentifier st rv := returnVal.(*TestStructWithExtraField) rv.TestStruct = *pv rv.ExtraField = AnyExtraValue - rv.Account = anyAccountBytes + rv.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } rv.BigField = big.NewInt(2) return nil } else if strings.HasSuffix(readIdentifier, EventName) { f.lock.Lock() defer f.lock.Unlock() - triggers := f.triggers[contractName] - if len(triggers) == 0 { + events := f.triggers.getEvents(func(e event) bool { + return e.contractID == contractName && e.eventType == EventName + }) + + if len(events) == 0 { return types.ErrNotFound } - for i := len(triggers) - 1; i >= 0; i-- { - if triggers[i].confidenceLevel == confidenceLevel { - *returnVal.(*TestStruct) = triggers[i].testStruct + for i := len(events) - 1; i >= 0; i-- { + if events[i].confidenceLevel == confidenceLevel { + *returnVal.(*TestStruct) = events[i].event.(TestStruct) return nil } } @@ -508,14 +592,33 @@ func (f *fakeContractReader) GetLatestValue(_ context.Context, readIdentifier st f.lock.Lock() defer f.lock.Unlock() param := params.(*FilterEventParams) - triggers := f.triggers[contractName] + triggers := f.triggers.getEvents(func(e event) bool { return e.contractID == contractName && e.eventType == EventName }) for i := len(triggers) - 1; i >= 0; i-- { - if *triggers[i].testStruct.Field == param.Field { - *returnVal.(*TestStruct) = triggers[i].testStruct + testStruct := triggers[i].event.(TestStruct) + if *testStruct.Field == param.Field { + *returnVal.(*TestStruct) = testStruct return nil } } return types.ErrNotFound + } else if strings.HasSuffix(readIdentifier, DynamicTopicEventName) { + f.lock.Lock() + defer f.lock.Unlock() + + triggers := f.triggers.getEvents(func(e event) bool { return e.contractID == contractName && e.eventType == DynamicTopicEventName }) + + if len(triggers) == 0 { + return types.ErrNotFound + } + + for i := len(triggers) - 1; i >= 0; i-- { + if triggers[i].confidenceLevel == confidenceLevel { + *returnVal.(*SomeDynamicTopicEvent) = triggers[i].event.(SomeDynamicTopicEvent) + return nil + } + } + + return fmt.Errorf("%w: no event with %s confidence was found ", types.ErrNotFound, confidenceLevel) } else if !strings.HasSuffix(readIdentifier, MethodTakingLatestParamsReturningTestStruct) { return errors.New("unknown method " + readIdentifier) } @@ -545,6 +648,15 @@ func (f *fakeContractReader) GetLatestValue(_ context.Context, readIdentifier st return nil } +func (f *fakeContractReader) GetLatestValueWithHeadData(_ context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (*types.Head, error) { + err := f.GetLatestValue(context.Background(), readIdentifier, confidenceLevel, params, returnVal) + if err != nil { + return nil, err + } + + return &types.Head{}, nil +} + func (f *fakeContractReader) BatchGetLatestValues(_ context.Context, request types.BatchGetLatestValuesRequest) (types.BatchGetLatestValuesResult, error) { result := make(types.BatchGetLatestValuesResult) for requestContract, requestContractBatch := range request { @@ -569,7 +681,10 @@ func (f *fakeContractReader) BatchGetLatestValues(_ context.Context, request typ *returnVal.(*[]uint64) = AnySliceToReadWithoutAnArgument } else if req.ReadName == MethodReturningSeenStruct { ts := *req.Params.(*TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) returnVal = &TestStructWithExtraField{ TestStruct: ts, @@ -598,112 +713,166 @@ func (f *fakeContractReader) BatchGetLatestValues(_ context.Context, request typ return result, nil } -func (f *fakeContractReader) QueryKey(_ context.Context, bc types.BoundContract, filter query.KeyFilter, limitAndSort query.LimitAndSort, sequenceType any) ([]types.Sequence, error) { - _, isValueType := sequenceType.(*values.Value) - if filter.Key == EventName { - f.lock.Lock() - defer f.lock.Unlock() - if len(f.triggers) == 0 { - return []types.Sequence{}, nil +func (f *fakeContractReader) QueryKey(ctx context.Context, bc types.BoundContract, filter query.KeyFilter, limitAndSort query.LimitAndSort, sequenceType any) ([]types.Sequence, error) { + seqsIter, err := f.QueryKeys(ctx, []types.ContractKeyFilter{types.ContractKeyFilter{ + KeyFilter: filter, + Contract: bc, + SequenceDataType: sequenceType, + }}, limitAndSort) + + if err != nil { + return nil, err + } + + if seqsIter != nil { + var seqs []types.Sequence + for _, seq := range seqsIter { + seqs = append(seqs, seq) } - var sequences []types.Sequence - for idx, trigger := range f.triggers[bc.String()] { - doAppend := true - for _, expr := range filter.Expressions { - if primitive, ok := expr.Primitive.(*primitives.Comparator); ok { - if len(primitive.ValueComparators) == 0 { - return nil, fmt.Errorf("value comparator for %s should not be empty", primitive.Name) - } - if primitive.Name == "Field" { - for _, valComp := range primitive.ValueComparators { - doAppend = doAppend && Compare(*trigger.testStruct.Field, *valComp.Value.(*int32), valComp.Operator) - } + return seqs, nil + } + + return nil, nil +} + +type sequenceWithEventType struct { + eventType string + sequence types.Sequence +} + +func (f *fakeContractReader) QueryKeys(_ context.Context, filters []types.ContractKeyFilter, limitAndSort query.LimitAndSort) (iter.Seq2[string, types.Sequence], error) { + f.lock.Lock() + defer f.lock.Unlock() + + supportedEventTypes := map[string]struct{}{EventName: {}, DynamicTopicEventName: {}} + + for _, filter := range filters { + if _, ok := supportedEventTypes[filter.Key]; !ok { + return nil, fmt.Errorf("unsupported event type %s", filter.Key) + } + } + + if len(filters) > 1 { + fmt.Printf("filters: %v\n", filters) + } + + isValueType := false + eventTypeToFilter := map[string]types.ContractKeyFilter{} + for _, filter := range filters { + eventTypeToFilter[filter.Key] = filter + _, isValueType = filter.SequenceDataType.(*values.Value) + } + + events := f.triggers.getEvents(func(e event) bool { + filter := eventTypeToFilter[e.eventType] + + if e.contractID != filter.Contract.String() { + return false + } + _, filterExistsForType := eventTypeToFilter[e.eventType] + + return filterExistsForType + }) + + var sequences []sequenceWithEventType + for idx, trigger := range events { + filter := eventTypeToFilter[trigger.eventType] + + doAppend := true + for _, expr := range filter.Expressions { + if primitive, ok := expr.Primitive.(*primitives.Comparator); ok { + if len(primitive.ValueComparators) == 0 { + return nil, fmt.Errorf("value comparator for %s should not be empty", primitive.Name) + } + if primitive.Name == "Field" { + for _, valComp := range primitive.ValueComparators { + doAppend = doAppend && Compare(*trigger.event.(TestStruct).Field, *valComp.Value.(*int32), valComp.Operator) } } } + } - var skipAppend bool - if limitAndSort.HasCursorLimit() { - cursor, err := strconv.Atoi(limitAndSort.Limit.Cursor) - if err != nil { - return nil, err - } + var skipAppend bool + if limitAndSort.HasCursorLimit() { + cursor, err := strconv.Atoi(limitAndSort.Limit.Cursor) + if err != nil { + return nil, err + } - // assume CursorFollowing order for now - if cursor >= idx { - skipAppend = true - } + // assume CursorFollowing order for now + if cursor >= idx { + skipAppend = true } + } - if (len(filter.Expressions) == 0 || doAppend) && !skipAppend { - if isValueType { - value, err := values.Wrap(trigger.testStruct) - if err != nil { - return nil, err - } - sequences = append(sequences, types.Sequence{Cursor: strconv.Itoa(idx), Data: &value}) - } else { - sequences = append(sequences, types.Sequence{Cursor: fmt.Sprintf("%d", idx), Data: trigger.testStruct}) + if (len(eventTypeToFilter[trigger.eventType].Expressions) == 0 || doAppend) && !skipAppend { + if isValueType { + value, err := values.Wrap(trigger.event) + if err != nil { + return nil, err } + sequences = append(sequences, sequenceWithEventType{eventType: trigger.eventType, sequence: types.Sequence{Cursor: strconv.Itoa(idx), Data: &value}}) + } else { + sequences = append(sequences, sequenceWithEventType{eventType: trigger.eventType, sequence: types.Sequence{Cursor: fmt.Sprintf("%d", idx), Data: trigger.event}}) } + } - if limitAndSort.Limit.Count > 0 && len(sequences) >= int(limitAndSort.Limit.Count) { - break - } + if limitAndSort.Limit.Count > 0 && len(sequences) >= int(limitAndSort.Limit.Count) { + break } + } - if isValueType { - if !limitAndSort.HasSequenceSort() && !limitAndSort.HasCursorLimit() { - sort.Slice(sequences, func(i, j int) bool { - valI := *sequences[i].Data.(*values.Value) - valJ := *sequences[j].Data.(*values.Value) + if isValueType { + if !limitAndSort.HasSequenceSort() && !limitAndSort.HasCursorLimit() { + sort.Slice(sequences, func(i, j int) bool { + valI := *sequences[i].sequence.Data.(*values.Value) + valJ := *sequences[j].sequence.Data.(*values.Value) - mapI := valI.(*values.Map) - mapJ := valJ.(*values.Map) + mapI := valI.(*values.Map) + mapJ := valJ.(*values.Map) - if mapI.Underlying["Field"] == nil || mapJ.Underlying["Field"] == nil { - return false - } - var iVal int32 - err := mapI.Underlying["Field"].UnwrapTo(&iVal) - if err != nil { - panic(err) - } + if mapI.Underlying["Field"] == nil || mapJ.Underlying["Field"] == nil { + return false + } + var iVal int32 + err := mapI.Underlying["Field"].UnwrapTo(&iVal) + if err != nil { + panic(err) + } - var jVal int32 - err = mapJ.Underlying["Field"].UnwrapTo(&jVal) - if err != nil { - panic(err) - } + var jVal int32 + err = mapJ.Underlying["Field"].UnwrapTo(&jVal) + if err != nil { + panic(err) + } - return iVal > jVal - }) - } - } else { - if !limitAndSort.HasSequenceSort() && !limitAndSort.HasCursorLimit() { - sort.Slice(sequences, func(i, j int) bool { - if sequences[i].Data.(TestStruct).Field == nil || sequences[j].Data.(TestStruct).Field == nil { - return false - } - return *sequences[i].Data.(TestStruct).Field > *sequences[j].Data.(TestStruct).Field - }) + return iVal > jVal + }) + } + } else { + if !limitAndSort.HasSequenceSort() && !limitAndSort.HasCursorLimit() { + if len(eventTypeToFilter) == 1 { + if _, ok := eventTypeToFilter[EventName]; ok { + sort.Slice(sequences, func(i, j int) bool { + if sequences[i].sequence.Data.(TestStruct).Field == nil || sequences[j].sequence.Data.(TestStruct).Field == nil { + return false + } + return *sequences[i].sequence.Data.(TestStruct).Field > *sequences[j].sequence.Data.(TestStruct).Field + }) + } } } - - return sequences, nil } - return nil, nil -} -func (f *fakeContractReader) SetTrigger(contractID string, testStruct *TestStruct) { - f.lock.Lock() - defer f.lock.Unlock() - if _, ok := f.triggers[contractID]; !ok { - f.triggers[contractID] = []eventConfidencePair{} - } + return func(yield func(string, types.Sequence) bool) { + for _, s := range sequences { + if !yield(s.eventType, s.sequence) { + return + } + } + }, nil - f.triggers[contractID] = append(f.triggers[contractID], eventConfidencePair{testStruct: *testStruct, confidenceLevel: primitives.Unconfirmed}) } func (f *fakeContractReader) GenerateBlocksTillConfidenceLevel(_ *testing.T, _, _ string, confidenceLevel primitives.ConfidenceLevel) { @@ -716,11 +885,7 @@ func (f *fakeContractReader) GenerateBlocksTillConfidenceLevel(_ *testing.T, _, } } - for contractID, triggers := range f.triggers { - for i, trigger := range triggers { - f.triggers[contractID][i] = eventConfidencePair{testStruct: trigger.testStruct, confidenceLevel: confidenceLevel} - } - } + f.triggers.setConfidenceLevelOnAllEvents(confidenceLevel) } type errContractReader struct { @@ -998,6 +1163,7 @@ func runContractReaderByIDQueryKey(t *testing.T) { func(t *testing.T) { t.Parallel() fake := &fakeContractReader{} + fakeCW := &fakeContractWriter{cr: fake} tester := &fakeContractReaderInterfaceTester{impl: fake} tester.Setup(t) @@ -1015,14 +1181,14 @@ func runContractReaderByIDQueryKey(t *testing.T) { require.NoError(t, cr.Bind(ctx, toBind)) ts1AnyContract := CreateTestStruct(0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnyContract, anyContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnyContract, anyContract, types.Unconfirmed) ts2AnyContract := CreateTestStruct(1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnyContract, anyContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnyContract, anyContract, types.Unconfirmed) ts1AnySecondContract := CreateTestStruct(0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnySecondContract, anySecondContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnySecondContract, anySecondContract, types.Unconfirmed) ts2AnySecondContract := CreateTestStruct(1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnySecondContract, anySecondContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnySecondContract, anySecondContract, types.Unconfirmed) tsAnyContractType := &TestStruct{} require.Eventually(t, func() bool { @@ -1041,6 +1207,8 @@ func runContractReaderByIDQueryKey(t *testing.T) { func(t *testing.T) { t.Parallel() fake := &fakeContractReader{} + fakeCW := &fakeContractWriter{cr: fake} + tester := &fakeContractReaderInterfaceTester{impl: fake} tester.Setup(t) @@ -1061,22 +1229,22 @@ func runContractReaderByIDQueryKey(t *testing.T) { require.NoError(t, cr.Bind(ctx, toBind)) ts1AnyContract1 := CreateTestStruct(0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnyContract1, anyContract1, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnyContract1, anyContract1, types.Unconfirmed) ts2AnyContract1 := CreateTestStruct(1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnyContract1, anyContract1, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnyContract1, anyContract1, types.Unconfirmed) ts1AnyContract2 := CreateTestStruct(2, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnyContract2, anyContract2, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnyContract2, anyContract2, types.Unconfirmed) ts2AnyContract2 := CreateTestStruct(3, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnyContract2, anyContract2, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnyContract2, anyContract2, types.Unconfirmed) ts1AnySecondContract1 := CreateTestStruct(4, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnySecondContract1, anySecondContract1, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnySecondContract1, anySecondContract1, types.Unconfirmed) ts2AnySecondContract1 := CreateTestStruct(5, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnySecondContract1, anySecondContract1, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnySecondContract1, anySecondContract1, types.Unconfirmed) ts1AnySecondContract2 := CreateTestStruct(6, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1AnySecondContract2, anySecondContract2, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts1AnySecondContract2, anySecondContract2, types.Unconfirmed) ts2AnySecondContract2 := CreateTestStruct(7, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2AnySecondContract2, anySecondContract2, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, fakeCW, MethodTriggeringEvent, ts2AnySecondContract2, anySecondContract2, types.Unconfirmed) tsAnyContractType := &TestStruct{} require.Eventually(t, func() bool { diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go index 00c8adae7..aa66b4edb 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go @@ -44,11 +44,16 @@ func (*cannotEncode) UnmarshalText() error { type interfaceTesterBase struct{} var anyAccountBytes = []byte{1, 2, 3} +var anyAccountString = string(anyAccountBytes) func (it *interfaceTesterBase) GetAccountBytes(_ int) []byte { return anyAccountBytes } +func (it *interfaceTesterBase) GetAccountString(_ int) string { + return anyAccountString +} + func (it *interfaceTesterBase) Name() string { return "relay client" } @@ -94,6 +99,11 @@ func (fakeTypeProvider) CreateContractType(readName string, isEncode bool) (any, return &FilterEventParams{}, nil } return &TestStruct{}, nil + case strings.HasSuffix(readName, DynamicTopicEventName), strings.HasSuffix(readName, EventWithFilterName): + if isEncode { + return &FilterEventParams{}, nil + } + return &SomeDynamicTopicEvent{}, nil case strings.HasSuffix(readName, EventNameField): if isEncode { var typ int32 diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/test/contract_reader_loop_tester.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/test/contract_reader_loop_tester.go index 427552d80..23671fbe5 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/test/contract_reader_loop_tester.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/test/contract_reader_loop_tester.go @@ -33,7 +33,7 @@ func TestAllEncodings(t *testing.T, test func(contractreader.EncodingVersion) fu type LoopTesterOpt func(*contractReaderLoopTester) -// WrapContractReaderTesterForLoop allows you to test a [types.ContractReader] and [types.ChainWriter] implementation behind a LOOP server +// WrapContractReaderTesterForLoop allows you to test a [types.ContractReader] and [types.ContractWriter] implementation behind a LOOP server func WrapContractReaderTesterForLoop(wrapped ChainComponentsInterfaceTester[*testing.T], opts ...LoopTesterOpt) ChainComponentsInterfaceTester[*testing.T] { tester := &contractReaderLoopTester{ ChainComponentsInterfaceTester: wrapped, @@ -56,6 +56,7 @@ func WithContractReaderLoopEncoding(version contractreader.EncodingVersion) Loop type contractReaderLoopTester struct { ChainComponentsInterfaceTester[*testing.T] lst loopServerTester + conn *grpc.ClientConn encodeWith contractreader.EncodingVersion } @@ -71,10 +72,11 @@ func (c *contractReaderLoopTester) Setup(t *testing.T) { } c.lst.Setup(t) + c.conn = c.lst.GetConn(t) } func (c *contractReaderLoopTester) GetContractReader(t *testing.T) types.ContractReader { - return contractreader.NewClient(nil, c.lst.GetConn(t), contractreader.WithClientEncoding(c.encodeWith)) + return contractreader.NewClient(nil, c.conn, contractreader.WithClientEncoding(c.encodeWith)) } func (c *contractReaderLoopTester) Name() string { diff --git a/pkg/loop/internal/relayer/pluginprovider/chainwriter/chain_writer.go b/pkg/loop/internal/relayer/pluginprovider/contractwriter/contract_writer.go similarity index 88% rename from pkg/loop/internal/relayer/pluginprovider/chainwriter/chain_writer.go rename to pkg/loop/internal/relayer/pluginprovider/contractwriter/contract_writer.go index f1f39b78d..f5fe32471 100644 --- a/pkg/loop/internal/relayer/pluginprovider/chainwriter/chain_writer.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractwriter/contract_writer.go @@ -1,4 +1,4 @@ -package chainwriter +package contractwriter import ( "context" @@ -14,20 +14,20 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/types" ) -var _ types.ChainWriter = (*Client)(nil) +var _ types.ContractWriter = (*Client)(nil) type ClientOpt func(*Client) type Client struct { *goplugin.ServiceClient - grpc pb.ChainWriterClient + grpc pb.ContractWriterClient encodeWith contractreader.EncodingVersion } func NewClient(b *net.BrokerExt, cc grpc.ClientConnInterface, opts ...ClientOpt) *Client { client := &Client{ ServiceClient: goplugin.NewServiceClient(b, cc), - grpc: pb.NewChainWriterClient(cc), + grpc: pb.NewContractWriterClient(cc), encodeWith: contractreader.DefaultEncodingVersion, } @@ -91,17 +91,17 @@ func (c *Client) GetFeeComponents(ctx context.Context) (*types.ChainFeeComponent // Server. -var _ pb.ChainWriterServer = (*Server)(nil) +var _ pb.ContractWriterServer = (*Server)(nil) type ServerOpt func(*Server) type Server struct { - pb.UnimplementedChainWriterServer - impl types.ChainWriter + pb.UnimplementedContractWriterServer + impl types.ContractWriter encodeWith contractreader.EncodingVersion } -func NewServer(impl types.ChainWriter, opts ...ServerOpt) pb.ChainWriterServer { +func NewServer(impl types.ContractWriter, opts ...ServerOpt) pb.ContractWriterServer { server := &Server{ impl: impl, encodeWith: contractreader.DefaultEncodingVersion, @@ -150,6 +150,6 @@ func (s *Server) GetFeeComponents(ctx context.Context, _ *emptypb.Empty) (*pb.Ge }, nil } -func RegisterChainWriterService(s *grpc.Server, chainWriter types.ChainWriter) { - pb.RegisterServiceServer(s, &goplugin.ServiceServer{Srv: chainWriter}) +func RegisterContractWriterService(s *grpc.Server, contractWriter types.ContractWriter) { + pb.RegisterServiceServer(s, &goplugin.ServiceServer{Srv: contractWriter}) } diff --git a/pkg/loop/internal/relayer/pluginprovider/chainwriter/converters.go b/pkg/loop/internal/relayer/pluginprovider/contractwriter/converters.go similarity index 97% rename from pkg/loop/internal/relayer/pluginprovider/chainwriter/converters.go rename to pkg/loop/internal/relayer/pluginprovider/contractwriter/converters.go index 732e0c199..1a21d6bb3 100644 --- a/pkg/loop/internal/relayer/pluginprovider/chainwriter/converters.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractwriter/converters.go @@ -1,4 +1,4 @@ -package chainwriter +package contractwriter import ( "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb" diff --git a/pkg/loop/internal/relayer/pluginprovider/chainwriter/converters_test.go b/pkg/loop/internal/relayer/pluginprovider/contractwriter/converters_test.go similarity index 66% rename from pkg/loop/internal/relayer/pluginprovider/chainwriter/converters_test.go rename to pkg/loop/internal/relayer/pluginprovider/contractwriter/converters_test.go index 6e41e42fa..43539a2f9 100644 --- a/pkg/loop/internal/relayer/pluginprovider/chainwriter/converters_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractwriter/converters_test.go @@ -1,4 +1,4 @@ -package chainwriter_test +package contractwriter_test import ( "math/big" @@ -7,36 +7,36 @@ import ( "github.com/stretchr/testify/require" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb" - "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/chainwriter" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractwriter" "github.com/smartcontractkit/chainlink-common/pkg/types" ) func TestTxMetaFromProto(t *testing.T) { t.Run("with nil meta", func(t *testing.T) { - meta := chainwriter.TxMetaFromProto(nil) + meta := contractwriter.TxMetaFromProto(nil) require.Nil(t, meta) }) t.Run("with nil workflow id", func(t *testing.T) { - meta := chainwriter.TxMetaFromProto(&pb.TransactionMeta{}) + meta := contractwriter.TxMetaFromProto(&pb.TransactionMeta{}) require.NotNil(t, meta) require.Nil(t, meta.WorkflowExecutionID) }) t.Run("with workflow id", func(t *testing.T) { - meta := chainwriter.TxMetaFromProto(&pb.TransactionMeta{WorkflowExecutionId: "workflow-id"}) + meta := contractwriter.TxMetaFromProto(&pb.TransactionMeta{WorkflowExecutionId: "workflow-id"}) require.NotNil(t, meta) require.Equal(t, "workflow-id", *meta.WorkflowExecutionID) }) t.Run("without gas limit", func(t *testing.T) { - meta := chainwriter.TxMetaFromProto(&pb.TransactionMeta{}) + meta := contractwriter.TxMetaFromProto(&pb.TransactionMeta{}) require.NotNil(t, meta) require.Nil(t, meta.GasLimit) }) t.Run("with gas limit", func(t *testing.T) { - meta := chainwriter.TxMetaFromProto(&pb.TransactionMeta{GasLimit: pb.NewBigIntFromInt(big.NewInt(10))}) + meta := contractwriter.TxMetaFromProto(&pb.TransactionMeta{GasLimit: pb.NewBigIntFromInt(big.NewInt(10))}) require.NotNil(t, meta) require.Equal(t, big.NewInt(10), meta.GasLimit) }) @@ -44,31 +44,31 @@ func TestTxMetaFromProto(t *testing.T) { func TestTxMetaToProto(t *testing.T) { t.Run("with nil meta", func(t *testing.T) { - proto := chainwriter.TxMetaToProto(nil) + proto := contractwriter.TxMetaToProto(nil) require.Nil(t, proto) }) t.Run("with empty workflow id", func(t *testing.T) { - proto := chainwriter.TxMetaToProto(&types.TxMeta{}) + proto := contractwriter.TxMetaToProto(&types.TxMeta{}) require.NotNil(t, proto) require.Empty(t, proto.WorkflowExecutionId) }) t.Run("with workflow id", func(t *testing.T) { workflowID := "workflow-id" - proto := chainwriter.TxMetaToProto(&types.TxMeta{WorkflowExecutionID: &workflowID}) + proto := contractwriter.TxMetaToProto(&types.TxMeta{WorkflowExecutionID: &workflowID}) require.NotNil(t, proto) require.Equal(t, workflowID, proto.WorkflowExecutionId) }) t.Run("without gas limit", func(t *testing.T) { - proto := chainwriter.TxMetaToProto(&types.TxMeta{}) + proto := contractwriter.TxMetaToProto(&types.TxMeta{}) require.NotNil(t, proto) require.Empty(t, proto.GasLimit) }) t.Run("with gas limit", func(t *testing.T) { - proto := chainwriter.TxMetaToProto(&types.TxMeta{GasLimit: big.NewInt(10)}) + proto := contractwriter.TxMetaToProto(&types.TxMeta{GasLimit: big.NewInt(10)}) require.NotNil(t, proto) require.Equal(t, big.NewInt(10), proto.GasLimit.Int()) }) diff --git a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go index 9282a48fd..167bbe5a6 100644 --- a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go +++ b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go @@ -74,8 +74,8 @@ func (p *ProviderClient) OnchainConfigCodec() median.OnchainConfigCodec { return p.onchainConfigCodec } -func (m *ProviderClient) ContractReader() types.ContractReader { - return m.contractReader +func (p *ProviderClient) ContractReader() types.ContractReader { + return p.contractReader } func (p *ProviderClient) Codec() types.Codec { diff --git a/pkg/loop/internal/relayer/relayer.go b/pkg/loop/internal/relayer/relayer.go index 97cdc35e6..e15470c3b 100644 --- a/pkg/loop/internal/relayer/relayer.go +++ b/pkg/loop/internal/relayer/relayer.go @@ -17,8 +17,8 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/goplugin" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb" - "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/chainwriter" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractreader" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractwriter" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/ext/ccip" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/ext/median" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/ext/mercury" @@ -195,15 +195,15 @@ func newRelayerClient(b *net.BrokerExt, conn grpc.ClientConnInterface) *relayerC return &relayerClient{b, goplugin.NewServiceClient(b, conn), pb.NewRelayerClient(conn)} } -func (r *relayerClient) NewChainWriter(_ context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { - cwc := r.NewClientConn("ChainWriter", func(ctx context.Context) (uint32, net.Resources, error) { - reply, err := r.relayer.NewChainWriter(ctx, &pb.NewChainWriterRequest{ChainWriterConfig: chainWriterConfig}) +func (r *relayerClient) NewContractWriter(_ context.Context, contractWriterConfig []byte) (types.ContractWriter, error) { + cwc := r.NewClientConn("ContractWriter", func(ctx context.Context) (uint32, net.Resources, error) { + reply, err := r.relayer.NewContractWriter(ctx, &pb.NewContractWriterRequest{ContractWriterConfig: contractWriterConfig}) if err != nil { return 0, nil, err } - return reply.ChainWriterID, nil, nil + return reply.ContractWriterID, nil, nil }) - return chainwriter.NewClient(r.WithName("ChainWriterClient"), cwc), nil + return contractwriter.NewClient(r.WithName("ContractWriterClient"), cwc), nil } func (r *relayerClient) NewContractReader(_ context.Context, contractReaderConfig []byte) (types.ContractReader, error) { @@ -377,8 +377,8 @@ func newChainRelayerServer(impl looptypes.Relayer, b *net.BrokerExt) *relayerSer return &relayerServer{impl: impl, BrokerExt: b.WithName("ChainRelayerServer")} } -func (r *relayerServer) NewChainWriter(ctx context.Context, request *pb.NewChainWriterRequest) (*pb.NewChainWriterReply, error) { - cw, err := r.impl.NewChainWriter(ctx, request.GetChainWriterConfig()) +func (r *relayerServer) NewContractWriter(ctx context.Context, request *pb.NewContractWriterRequest) (*pb.NewContractWriterReply, error) { + cw, err := r.impl.NewContractWriter(ctx, request.GetContractWriterConfig()) if err != nil { return nil, err } @@ -387,15 +387,15 @@ func (r *relayerServer) NewChainWriter(ctx context.Context, request *pb.NewChain return nil, err } - const name = "ChainWriter" + const name = "ContractWriter" id, _, err := r.ServeNew(name, func(s *grpc.Server) { - chainwriter.RegisterChainWriterService(s, cw) + contractwriter.RegisterContractWriterService(s, cw) }, net.Resource{Closer: cw, Name: name}) if err != nil { return nil, err } - return &pb.NewChainWriterReply{ChainWriterID: id}, nil + return &pb.NewContractWriterReply{ContractWriterID: id}, nil } func (r *relayerServer) NewContractReader(ctx context.Context, request *pb.NewContractReaderRequest) (*pb.NewContractReaderReply, error) { diff --git a/pkg/loop/internal/relayer/test/relayer.go b/pkg/loop/internal/relayer/test/relayer.go index f226284a1..dd04ab902 100644 --- a/pkg/loop/internal/relayer/test/relayer.go +++ b/pkg/loop/internal/relayer/test/relayer.go @@ -135,7 +135,7 @@ func (s staticPluginRelayer) Name() string { panic("unimplemented") } func (s staticPluginRelayer) HealthReport() map[string]error { panic("unimplemented") } -func (s staticPluginRelayer) NewChainWriter(_ context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { +func (s staticPluginRelayer) NewContractWriter(_ context.Context, _ []byte) (types.ContractWriter, error) { return nil, errors.New("not implemented") } diff --git a/pkg/loop/internal/relayerset/client.go b/pkg/loop/internal/relayerset/client.go index 5dc4524b3..b9909f024 100644 --- a/pkg/loop/internal/relayerset/client.go +++ b/pkg/loop/internal/relayerset/client.go @@ -152,14 +152,14 @@ func (k *Client) NewContractReader(ctx context.Context, relayID types.RelayID, c return resp.ContractReaderId, nil } -func (k *Client) NewChainWriter(ctx context.Context, relayID types.RelayID, chainWriterConfig []byte) (uint32, error) { - req := &relayerset.NewChainWriterRequest{ - RelayerId: &relayerset.RelayerId{ChainId: relayID.ChainID, Network: relayID.Network}, - ChainWriterConfig: chainWriterConfig, +func (k *Client) NewContractWriter(ctx context.Context, relayID types.RelayID, contractWriterConfig []byte) (uint32, error) { + req := &relayerset.NewContractWriterRequest{ + RelayerId: &relayerset.RelayerId{ChainId: relayID.ChainID, Network: relayID.Network}, + ContractWriterConfig: contractWriterConfig, } - resp, err := k.relayerSetClient.NewChainWriter(ctx, req) + resp, err := k.relayerSetClient.NewContractWriter(ctx, req) if err != nil { - return 0, fmt.Errorf("error getting new chain writer: %w", err) + return 0, fmt.Errorf("error getting new contract writer: %w", err) } - return resp.ChainWriterId, nil + return resp.ContractWriterId, nil } diff --git a/pkg/loop/internal/relayerset/relayer.go b/pkg/loop/internal/relayerset/relayer.go index fe0cd4351..e42d09e7f 100644 --- a/pkg/loop/internal/relayerset/relayer.go +++ b/pkg/loop/internal/relayerset/relayer.go @@ -7,8 +7,8 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer" - "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/chainwriter" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractreader" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractwriter" "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink-common/pkg/types/core" ) @@ -49,15 +49,15 @@ func (r *relayerClient) NewContractReader(_ context.Context, contractReaderConfi return contractreader.NewClient(r.relayerSetClient.BrokerExt.WithName("ContractReaderClientInRelayerSet"), cc), nil } -func (r *relayerClient) NewChainWriter(_ context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { - cwc := r.relayerSetClient.NewClientConn("ChainWriter", func(ctx context.Context) (uint32, net.Resources, error) { - chainWriterID, err := r.relayerSetClient.NewChainWriter(ctx, r.relayerID, chainWriterConfig) +func (r *relayerClient) NewContractWriter(_ context.Context, contractWriterConfig []byte) (types.ContractWriter, error) { + cwc := r.relayerSetClient.NewClientConn("ContractWriter", func(ctx context.Context) (uint32, net.Resources, error) { + contractWriterID, err := r.relayerSetClient.NewContractWriter(ctx, r.relayerID, contractWriterConfig) if err != nil { return 0, nil, err } - return chainWriterID, nil, nil + return contractWriterID, nil, nil }) - return chainwriter.NewClient(r.relayerSetClient.BrokerExt.WithName("ChainWriterClient"), cwc), nil + return contractwriter.NewClient(r.relayerSetClient.BrokerExt.WithName("ContractWriterClient"), cwc), nil } func (r *relayerClient) Start(ctx context.Context) error { diff --git a/pkg/loop/internal/relayerset/server.go b/pkg/loop/internal/relayerset/server.go index 8e809f1f8..a3fa55ec9 100644 --- a/pkg/loop/internal/relayerset/server.go +++ b/pkg/loop/internal/relayerset/server.go @@ -13,8 +13,8 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb/relayerset" - "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/chainwriter" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractreader" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayer/pluginprovider/contractwriter" "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/relayerset/inprocessprovider" "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink-common/pkg/types/core" @@ -139,15 +139,15 @@ func (s *Server) NewPluginProvider(ctx context.Context, req *relayerset.NewPlugi return &relayerset.NewPluginProviderResponse{PluginProviderId: providerID}, nil } -// RelayerSet is supposed to serve relayers, which then hold a ContractReader and ChainWriter. Serving NewContractReader -// and NewChainWriter from RelayerSet is a way to save us from instantiating an extra server for the Relayer. Without +// RelayerSet is supposed to serve relayers, which then hold a ContractReader and ContractWriter. Serving NewContractReader +// and NewContractWriter from RelayerSet is a way to save us from instantiating an extra server for the Relayer. Without // this approach, the calls we would make normally are // - RelayerSet.Get -> Relayer // - Relayer.NewContractReader -> ContractReader // // We could translate this to the GRPC world by having each call to RelayerSet.Get wrap the returned relayer in a server // and register that to the GRPC server. However this is actually pretty inefficient since a relayer object on its own -// is not useful. Users will always want to use the relayer to instantiate a contractreader or chainwriter. So we can avoid +// is not useful. Users will always want to use the relayer to instantiate a contractreader or contractwriter. So we can avoid // the intermediate server for the relayer by just storing a reference to the relayerSet client and the relayer we want // to fetch. I.e. the calls described above instead would become: // - RelayerSet.Get -> (RelayerSetClient, RelayerID). Effectively this call just acts as check that Relayer exists @@ -182,47 +182,47 @@ func (s *Server) NewContractReader(ctx context.Context, req *relayerset.NewContr return &relayerset.NewContractReaderResponse{ContractReaderId: id}, nil } -// RelayerSet is supposed to serve relayers, which then hold a ContractReader and ChainWriter. Serving NewChainWriter -// and NewChainWriter from RelayerSet is a way to save us from instantiating an extra server for the Relayer. Without +// RelayerSet is supposed to serve relayers, which then hold a ContractReader and ContractWriter. Serving NewContractWriter +// and NewContractWriter from RelayerSet is a way to save us from instantiating an extra server for the Relayer. Without // this approach, the calls we would make normally are // - RelayerSet.Get -> Relayer -// - Relayer.NewChainWriter -> ChainWriter +// - Relayer.NewContractWriter -> ContractWriter // // We could translate this to the GRPC world by having each call to RelayerSet.Get wrap the returned relayer in a server // and register that to the GRPC server. However this is actually pretty inefficient since a relayer object on its own -// is not useful. Users will always want to use the relayer to instantiate a contractreader or chainwriter. So we can avoid +// is not useful. Users will always want to use the relayer to instantiate a contractreader or contractwriter. So we can avoid // the intermediate server for the relayer by just storing a reference to the relayerSet client and the relayer we want // to fetch. I.e. the calls described above instead would become: // - RelayerSet.Get -> (RelayerSetClient, RelayerID). Effectively this call just acts as check that Relayer exists // -// RelayerClient.NewChainWriter -> This is a call to RelayerSet.NewChainWriter with (relayerID, []contractReaderConfig); -// The implementation will then fetch the relayer and call NewChainWriter on it -func (s *Server) NewChainWriter(ctx context.Context, req *relayerset.NewChainWriterRequest) (*relayerset.NewChainWriterResponse, error) { +// RelayerClient.NewContractWriter -> This is a call to RelayerSet.NewContractWriter with (relayerID, []contractWriterConfig); +// The implementation will then fetch the relayer and call NewContractWriter on it +func (s *Server) NewContractWriter(ctx context.Context, req *relayerset.NewContractWriterRequest) (*relayerset.NewContractWriterResponse, error) { relayer, err := s.getRelayer(ctx, req.RelayerId) if err != nil { return nil, err } - chainWriter, err := relayer.NewChainWriter(ctx, req.ChainWriterConfig) + contractWriter, err := relayer.NewContractWriter(ctx, req.ContractWriterConfig) if err != nil { return nil, status.Errorf(codes.Internal, "error creating contract reader: %v", err) } - // Start ChainWriter service - if err = chainWriter.Start(ctx); err != nil { + // Start ContractWriter service + if err = contractWriter.Start(ctx); err != nil { return nil, err } - // Start gRPC service for the ChainWriter service above - const name = "ChainWriterInRelayerSet" + // Start gRPC service for the ContractWriter service above + const name = "ContractWriterInRelayerSet" id, _, err := s.broker.ServeNew(name, func(s *grpc.Server) { - chainwriter.RegisterChainWriterService(s, chainWriter) - }, net.Resource{Closer: chainWriter, Name: name}) + contractwriter.RegisterContractWriterService(s, contractWriter) + }, net.Resource{Closer: contractWriter, Name: name}) if err != nil { return nil, err } - return &relayerset.NewChainWriterResponse{ChainWriterId: id}, nil + return &relayerset.NewContractWriterResponse{ContractWriterId: id}, nil } // getProviderConnection wraps a non-LOOPP provider in an in process provider server. This can be removed once all providers are LOOPP providers. diff --git a/pkg/loop/internal/test/test_plugin.go b/pkg/loop/internal/test/test_plugin.go index fe175c224..1aec20cb6 100644 --- a/pkg/loop/internal/test/test_plugin.go +++ b/pkg/loop/internal/test/test_plugin.go @@ -71,8 +71,9 @@ func PluginTest[TB testing.TB, I any](tb TB, name string, p plugin.Plugin, testF require.NotNil(tb, config) c := plugin.NewClient(&plugin.ClientConfig{ - Reattach: config, - Plugins: map[string]plugin.Plugin{name: p}, + Reattach: config, + Plugins: map[string]plugin.Plugin{name: p}, + SkipHostEnv: true, }) tb.Cleanup(c.Kill) clientProtocol, err := c.Client() diff --git a/pkg/loop/internal/types/types.go b/pkg/loop/internal/types/types.go index e97225313..30f8a73fa 100644 --- a/pkg/loop/internal/types/types.go +++ b/pkg/loop/internal/types/types.go @@ -3,6 +3,8 @@ package internal import ( "context" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink-common/pkg/types/core" ) @@ -43,9 +45,9 @@ type OCR3CapabilityProvider interface { type Relayer interface { types.ChainService - // NewChainWriter returns a new ChainWriter. + // NewContractWriter returns a new ContractWriter. // The format of config depends on the implementation. - NewChainWriter(ctx context.Context, chainWriterConfig []byte) (types.ChainWriter, error) + NewContractWriter(ctx context.Context, contractWriterConfig []byte) (types.ContractWriter, error) // NewContractReader returns a new ContractReader. // The format of contractReaderConfig depends on the implementation. @@ -54,3 +56,9 @@ type Relayer interface { NewPluginProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.PluginProvider, error) NewLLOProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.LLOProvider, error) } + +// Keystore This interface contains all the keystore GRPC functionality, keystore.Keystore is meant to be exposed to consumers and the keystore.Management interface in exposed only to the core node +type Keystore interface { + services.Service + keystore.GRPCService +} diff --git a/pkg/loop/keystore_service.go b/pkg/loop/keystore_service.go new file mode 100644 index 000000000..1cd93dad7 --- /dev/null +++ b/pkg/loop/keystore_service.go @@ -0,0 +1,32 @@ +package loop + +import ( + "context" + "fmt" + "os/exec" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/goplugin" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" +) + +// KeystoreService is a [types.Service] that maintains an internal [keystore.Keystore]. +type KeystoreService struct { + goplugin.PluginService[*GRPCPluginKeystore, keystore.GRPCService] +} + +func NewKeystoreService(lggr logger.Logger, grpcOpts GRPCOpts, cmd func() *exec.Cmd, config []byte) *KeystoreService { + newService := func(ctx context.Context, instance any) (keystore.GRPCService, error) { + plug, ok := instance.(*keystore.Client) + if !ok { + return nil, fmt.Errorf("expected PluginKeystore but got %T", instance) + } + return plug, nil + } + stopCh := make(chan struct{}) + lggr = logger.Named(lggr, "KeystoreService") + var rs KeystoreService + broker := BrokerConfig{StopCh: stopCh, Logger: lggr, GRPCOpts: grpcOpts} + rs.Init(PluginKeystoreName, &GRPCPluginKeystore{BrokerConfig: broker}, newService, lggr, cmd, stopCh) + return &rs +} diff --git a/pkg/loop/mocks/relayer.go b/pkg/loop/mocks/relayer.go index 097546a0d..10ef5e3d6 100644 --- a/pkg/loop/mocks/relayer.go +++ b/pkg/loop/mocks/relayer.go @@ -347,65 +347,6 @@ func (_c *Relayer_Name_Call) RunAndReturn(run func() string) *Relayer_Name_Call return _c } -// NewChainWriter provides a mock function with given fields: ctx, chainWriterConfig -func (_m *Relayer) NewChainWriter(ctx context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { - ret := _m.Called(ctx, chainWriterConfig) - - if len(ret) == 0 { - panic("no return value specified for NewChainWriter") - } - - var r0 types.ChainWriter - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ChainWriter, error)); ok { - return rf(ctx, chainWriterConfig) - } - if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ChainWriter); ok { - r0 = rf(ctx, chainWriterConfig) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(types.ChainWriter) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { - r1 = rf(ctx, chainWriterConfig) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Relayer_NewChainWriter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewChainWriter' -type Relayer_NewChainWriter_Call struct { - *mock.Call -} - -// NewChainWriter is a helper method to define mock.On call -// - ctx context.Context -// - chainWriterConfig []byte -func (_e *Relayer_Expecter) NewChainWriter(ctx interface{}, chainWriterConfig interface{}) *Relayer_NewChainWriter_Call { - return &Relayer_NewChainWriter_Call{Call: _e.mock.On("NewChainWriter", ctx, chainWriterConfig)} -} - -func (_c *Relayer_NewChainWriter_Call) Run(run func(ctx context.Context, chainWriterConfig []byte)) *Relayer_NewChainWriter_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]byte)) - }) - return _c -} - -func (_c *Relayer_NewChainWriter_Call) Return(_a0 types.ChainWriter, _a1 error) *Relayer_NewChainWriter_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *Relayer_NewChainWriter_Call) RunAndReturn(run func(context.Context, []byte) (types.ChainWriter, error)) *Relayer_NewChainWriter_Call { - _c.Call.Return(run) - return _c -} - // NewConfigProvider provides a mock function with given fields: _a0, _a1 func (_m *Relayer) NewConfigProvider(_a0 context.Context, _a1 types.RelayArgs) (types.ConfigProvider, error) { ret := _m.Called(_a0, _a1) @@ -524,6 +465,65 @@ func (_c *Relayer_NewContractReader_Call) RunAndReturn(run func(context.Context, return _c } +// NewContractWriter provides a mock function with given fields: ctx, contractWriterConfig +func (_m *Relayer) NewContractWriter(ctx context.Context, contractWriterConfig []byte) (types.ContractWriter, error) { + ret := _m.Called(ctx, contractWriterConfig) + + if len(ret) == 0 { + panic("no return value specified for NewContractWriter") + } + + var r0 types.ContractWriter + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ContractWriter, error)); ok { + return rf(ctx, contractWriterConfig) + } + if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ContractWriter); ok { + r0 = rf(ctx, contractWriterConfig) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(types.ContractWriter) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { + r1 = rf(ctx, contractWriterConfig) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Relayer_NewContractWriter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewContractWriter' +type Relayer_NewContractWriter_Call struct { + *mock.Call +} + +// NewContractWriter is a helper method to define mock.On call +// - ctx context.Context +// - contractWriterConfig []byte +func (_e *Relayer_Expecter) NewContractWriter(ctx interface{}, contractWriterConfig interface{}) *Relayer_NewContractWriter_Call { + return &Relayer_NewContractWriter_Call{Call: _e.mock.On("NewContractWriter", ctx, contractWriterConfig)} +} + +func (_c *Relayer_NewContractWriter_Call) Run(run func(ctx context.Context, contractWriterConfig []byte)) *Relayer_NewContractWriter_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]byte)) + }) + return _c +} + +func (_c *Relayer_NewContractWriter_Call) Return(_a0 types.ContractWriter, _a1 error) *Relayer_NewContractWriter_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Relayer_NewContractWriter_Call) RunAndReturn(run func(context.Context, []byte) (types.ContractWriter, error)) *Relayer_NewContractWriter_Call { + _c.Call.Return(run) + return _c +} + // NewLLOProvider provides a mock function with given fields: _a0, _a1, _a2 func (_m *Relayer) NewLLOProvider(_a0 context.Context, _a1 types.RelayArgs, _a2 types.PluginArgs) (types.LLOProvider, error) { ret := _m.Called(_a0, _a1, _a2) diff --git a/pkg/loop/plugin_keystore.go b/pkg/loop/plugin_keystore.go new file mode 100644 index 000000000..a5b3b68bf --- /dev/null +++ b/pkg/loop/plugin_keystore.go @@ -0,0 +1,53 @@ +package loop + +import ( + "context" + + "github.com/hashicorp/go-plugin" + "google.golang.org/grpc" + + keystorepb "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/types/keystore" +) + +// PluginKeystoreName is the name for keystore.Keystore +const PluginKeystoreName = "keystore" + +func PluginKeystoreHandshakeConfig() plugin.HandshakeConfig { + return plugin.HandshakeConfig{ + MagicCookieKey: "CL_PLUGIN_KEYSTORE_MAGIC_COOKIE", + MagicCookieValue: "fe81b132-0d3d-4c16-9f13-c2f7bfd3c361", + } +} + +type GRPCPluginKeystore struct { + plugin.NetRPCUnsupportedPlugin + + BrokerConfig + + PluginServer keystorepb.GRPCService + + pluginClient *keystorepb.Client +} + +func (p *GRPCPluginKeystore) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + return keystorepb.RegisterKeystoreServer(server, broker, p.BrokerConfig, p.PluginServer) +} + +func (p *GRPCPluginKeystore) GRPCClient(_ context.Context, broker *plugin.GRPCBroker, conn *grpc.ClientConn) (interface{}, error) { + if p.pluginClient == nil { + p.pluginClient = keystorepb.NewKeystoreClient(broker, p.BrokerConfig, conn) + } else { + p.pluginClient.Refresh(broker, conn) + } + + return keystore.Keystore(p.pluginClient), nil +} + +func (p *GRPCPluginKeystore) ClientConfig() *plugin.ClientConfig { + c := &plugin.ClientConfig{ + HandshakeConfig: PluginKeystoreHandshakeConfig(), + Plugins: map[string]plugin.Plugin{PluginKeystoreName: p}, + } + return ManagedGRPCClientConfig(c, p.BrokerConfig) +} diff --git a/pkg/loop/relayer_service.go b/pkg/loop/relayer_service.go index c076d61d0..7420854fa 100644 --- a/pkg/loop/relayer_service.go +++ b/pkg/loop/relayer_service.go @@ -48,11 +48,11 @@ func (r *RelayerService) NewContractReader(ctx context.Context, contractReaderCo return r.Service.NewContractReader(ctx, contractReaderConfig) } -func (r *RelayerService) NewChainWriter(ctx context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { +func (r *RelayerService) NewContractWriter(ctx context.Context, contractWriterConfig []byte) (types.ContractWriter, error) { if err := r.WaitCtx(ctx); err != nil { return nil, err } - return r.Service.NewChainWriter(ctx, chainWriterConfig) + return r.Service.NewContractWriter(ctx, contractWriterConfig) } func (r *RelayerService) NewConfigProvider(ctx context.Context, args types.RelayArgs) (types.ConfigProvider, error) { diff --git a/pkg/loop/reportingplugins/loopp_service_test.go b/pkg/loop/reportingplugins/loopp_service_test.go index e17c0a231..e4ba1bd31 100644 --- a/pkg/loop/reportingplugins/loopp_service_test.go +++ b/pkg/loop/reportingplugins/loopp_service_test.go @@ -50,39 +50,42 @@ func TestLOOPPService(t *testing.T) { {Plugin: reportingplugins.PluginServiceName}, } for _, ts := range tests { - looppSvc := reportingplugins.NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { - return NewHelperProcessCommand(ts.Plugin) - }, - core.ReportingPluginServiceConfig{}, - nettest.MockConn{}, - pipelinetest.PipelineRunner, - telemetrytest.Telemetry, - errorlogtest.ErrorLog, - keyvaluestoretest.KeyValueStore{}, - relayersettest.RelayerSet{}) - hook := looppSvc.XXXTestHook() - servicetest.Run(t, looppSvc) - - t.Run("control", func(t *testing.T) { - reportingplugintest.RunFactory(t, looppSvc) - }) - - t.Run("Kill", func(t *testing.T) { - hook.Kill() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - reportingplugintest.RunFactory(t, looppSvc) - }) - - t.Run("Reset", func(t *testing.T) { - hook.Reset() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - reportingplugintest.RunFactory(t, looppSvc) + t.Run(ts.Plugin, func(t *testing.T) { + t.Parallel() + looppSvc := reportingplugins.NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { + return NewHelperProcessCommand(ts.Plugin) + }, + core.ReportingPluginServiceConfig{}, + nettest.MockConn{}, + pipelinetest.PipelineRunner, + telemetrytest.Telemetry, + errorlogtest.ErrorLog, + keyvaluestoretest.KeyValueStore{}, + relayersettest.RelayerSet{}) + hook := looppSvc.XXXTestHook() + servicetest.Run(t, looppSvc) + + t.Run("control", func(t *testing.T) { + reportingplugintest.RunFactory(t, looppSvc) + }) + + t.Run("Kill", func(t *testing.T) { + hook.Kill() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + reportingplugintest.RunFactory(t, looppSvc) + }) + + t.Run("Reset", func(t *testing.T) { + hook.Reset() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + reportingplugintest.RunFactory(t, looppSvc) + }) }) } } diff --git a/pkg/loop/reportingplugins/ocr3/loopp_service_test.go b/pkg/loop/reportingplugins/ocr3/loopp_service_test.go index 5b17c263f..b15531cc5 100644 --- a/pkg/loop/reportingplugins/ocr3/loopp_service_test.go +++ b/pkg/loop/reportingplugins/ocr3/loopp_service_test.go @@ -54,40 +54,43 @@ func TestLOOPPService(t *testing.T) { }, } for _, ts := range tests { - looppSvc := NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { - return NewHelperProcessCommand(ts.Plugin) - }, - core.ReportingPluginServiceConfig{}, - nettest.MockConn{}, - pipelinetest.PipelineRunner, - telemetrytest.Telemetry, - errorlogtest.ErrorLog, - core.CapabilitiesRegistry(nil), - keyvaluestoretest.KeyValueStore{}, - relayersettest.RelayerSet{}) - hook := looppSvc.XXXTestHook() - servicetest.Run(t, looppSvc) - - t.Run("control", func(t *testing.T) { - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) - }) - - t.Run("Kill", func(t *testing.T) { - hook.Kill() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) - }) - - t.Run("Reset", func(t *testing.T) { - hook.Reset() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + t.Run(ts.Plugin, func(t *testing.T) { + t.Parallel() + looppSvc := NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { + return NewHelperProcessCommand(ts.Plugin) + }, + core.ReportingPluginServiceConfig{}, + nettest.MockConn{}, + pipelinetest.PipelineRunner, + telemetrytest.Telemetry, + errorlogtest.ErrorLog, + core.CapabilitiesRegistry(nil), + keyvaluestoretest.KeyValueStore{}, + relayersettest.RelayerSet{}) + hook := looppSvc.XXXTestHook() + servicetest.Run(t, looppSvc) + + t.Run("control", func(t *testing.T) { + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) + + t.Run("Kill", func(t *testing.T) { + hook.Kill() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) + + t.Run("Reset", func(t *testing.T) { + hook.Reset() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) }) } } diff --git a/pkg/loop/server.go b/pkg/loop/server.go index 3741e4b1d..6cf4f0c5b 100644 --- a/pkg/loop/server.go +++ b/pkg/loop/server.go @@ -58,7 +58,7 @@ func newServer(loggerName string) (*Server, error) { lggr, err := NewLogger() if err != nil { - return nil, fmt.Errorf("error creating logger: %s", err) + return nil, fmt.Errorf("error creating logger: %w", err) } lggr = logger.Named(lggr, loggerName) s.Logger = logger.Sugared(lggr) @@ -90,15 +90,26 @@ func (s *Server) start() error { if tracingConfig.Enabled { attributes = tracingConfig.Attributes() } + beholderCfg := beholder.Config{ - InsecureConnection: envCfg.TelemetryInsecureConnection, - CACertFile: envCfg.TelemetryCACertFile, - OtelExporterGRPCEndpoint: envCfg.TelemetryEndpoint, - ResourceAttributes: append(attributes, envCfg.TelemetryAttributes.AsStringAttributes()...), - TraceSampleRatio: envCfg.TelemetryTraceSampleRatio, + InsecureConnection: envCfg.TelemetryInsecureConnection, + CACertFile: envCfg.TelemetryCACertFile, + OtelExporterGRPCEndpoint: envCfg.TelemetryEndpoint, + ResourceAttributes: append(attributes, envCfg.TelemetryAttributes.AsStringAttributes()...), + TraceSampleRatio: envCfg.TelemetryTraceSampleRatio, + AuthHeaders: envCfg.TelemetryAuthHeaders, + AuthPublicKeyHex: envCfg.TelemetryAuthPubKeyHex, + EmitterBatchProcessor: envCfg.TelemetryEmitterBatchProcessor, + EmitterExportTimeout: envCfg.TelemetryEmitterExportTimeout, + EmitterExportInterval: envCfg.TelemetryEmitterExportInterval, + EmitterExportMaxBatchSize: envCfg.TelemetryEmitterExportMaxBatchSize, + EmitterMaxQueueSize: envCfg.TelemetryEmitterMaxQueueSize, } if tracingConfig.Enabled { + if beholderCfg.AuthHeaders != nil { + tracingConfig.AuthHeaders = beholderCfg.AuthHeaders + } exporter, err := tracingConfig.NewSpanExporter() if err != nil { return fmt.Errorf("failed to setup tracing exporter: %w", err) diff --git a/pkg/loop/standard_capabilities_test.go b/pkg/loop/standard_capabilities_test.go index b9d63c01e..e437cd82e 100644 --- a/pkg/loop/standard_capabilities_test.go +++ b/pkg/loop/standard_capabilities_test.go @@ -61,6 +61,7 @@ func TestRunningStandardCapabilitiesPluginOutOfProcess(t *testing.T) { func newOutOfProcessStandardCapabilitiesService(t *testing.T, staticChecks bool, stopCh <-chan struct{}) loop.StandardCapabilities { scl := loop.StandardCapabilitiesLoop{Logger: logger.Test(t), BrokerConfig: loop.BrokerConfig{Logger: logger.Test(t), StopCh: stopCh}} cc := scl.ClientConfig() + cc.SkipHostEnv = true cc.Cmd = NewHelperProcessCommand(loop.PluginStandardCapabilitiesName, staticChecks, 0) c := plugin.NewClient(cc) t.Cleanup(c.Kill) diff --git a/pkg/loop/telem.go b/pkg/loop/telem.go index 4d80d496b..c66949b23 100644 --- a/pkg/loop/telem.go +++ b/pkg/loop/telem.go @@ -53,6 +53,9 @@ type TracingConfig struct { // OnDialError is called when the dialer fails, providing an opportunity to log. OnDialError func(error) + + // Auth + AuthHeaders map[string]string } // NewGRPCOpts initializes open telemetry and returns GRPCOpts with telemetry interceptors. @@ -150,7 +153,10 @@ func (config TracingConfig) NewSpanExporter() (sdktrace.SpanExporter, error) { return nil, err } - traceExporter, err := otlptracegrpc.New(ctx, otlptracegrpc.WithGRPCConn(conn)) + traceExporter, err := otlptracegrpc.New(ctx, + otlptracegrpc.WithGRPCConn(conn), + otlptracegrpc.WithHeaders(config.AuthHeaders), + ) if err != nil { return nil, err } diff --git a/pkg/loop/testutils/utils.go b/pkg/loop/testutils/utils.go index 8df3cce6d..555255f5b 100644 --- a/pkg/loop/testutils/utils.go +++ b/pkg/loop/testutils/utils.go @@ -11,7 +11,7 @@ import ( // the duplication of the function is required so that the test of the LOOP servers themselves // can dog food the same testers without creating a circular dependency. -// WrapContractReaderTesterForLoop allows you to test a [types.ContractReader] and [types.ChainWriter] implementation behind a LOOP server +// WrapContractReaderTesterForLoop allows you to test a [types.ContractReader] and [types.ContractWriter] implementation behind a LOOP server func WrapContractReaderTesterForLoop(wrapped interfacetests.ChainComponentsInterfaceTester[*testing.T]) interfacetests.ChainComponentsInterfaceTester[*testing.T] { return test.WrapContractReaderTesterForLoop(wrapped) } diff --git a/pkg/metrics/metrics_labeler.go b/pkg/metrics/metrics_labeler.go new file mode 100644 index 000000000..ade67d780 --- /dev/null +++ b/pkg/metrics/metrics_labeler.go @@ -0,0 +1,33 @@ +package metrics + +type Labeler struct { + Labels map[string]string +} + +func NewLabeler() Labeler { + return Labeler{Labels: make(map[string]string)} +} + +// With adds multiple key-value pairs to the Labeler to eventually be consumed by a Beholder metrics resource +func (c Labeler) With(keyValues ...string) Labeler { + newCustomMetricsLabeler := NewLabeler() + + if len(keyValues)%2 != 0 { + // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged + return c + } + + // Copy existing labels from the current agent + for k, v := range c.Labels { + newCustomMetricsLabeler.Labels[k] = v + } + + // Add new key-value pairs + for i := 0; i < len(keyValues); i += 2 { + key := keyValues[i] + value := keyValues[i+1] + newCustomMetricsLabeler.Labels[key] = value + } + + return newCustomMetricsLabeler +} diff --git a/pkg/metrics/metrics_labeler_test.go b/pkg/metrics/metrics_labeler_test.go new file mode 100644 index 000000000..25d5e51d7 --- /dev/null +++ b/pkg/metrics/metrics_labeler_test.go @@ -0,0 +1,16 @@ +package metrics + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// tests CustomMessageAgent does not share state across new instances created by `With` +func Test_CustomMessageAgent(t *testing.T) { + cma := NewLabeler() + cma1 := cma.With("key1", "value1") + cma2 := cma1.With("key2", "value2") + + assert.NotEqual(t, cma1.Labels, cma2.Labels) +} diff --git a/pkg/services/health.go b/pkg/services/health.go index 59b4cb60f..7108e53b6 100644 --- a/pkg/services/health.go +++ b/pkg/services/health.go @@ -16,12 +16,13 @@ import ( // HealthReporter should be implemented by any type requiring health checks. type HealthReporter interface { // Ready should return nil if ready, or an error message otherwise. From the k8s docs: - // > ready means it’s initialized and clearCond means that it can accept traffic in kubernetes + // > ready means it’s initialized and healthy means that it can accept traffic in kubernetes // See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/ Ready() error // HealthReport returns a full health report of the callee including its dependencies. - // key is the dep name, value is nil if clearCond, or error message otherwise. + // Keys are based on Name(), with nil values when healthy or errors otherwise. // Use CopyHealth to collect reports from sub-services. + // This should run very fast, so avoid doing computation and instead prefer reporting pre-calculated state. HealthReport() map[string]error // Name returns the fully qualified name of the component. Usually the logger name. Name() string @@ -86,7 +87,12 @@ var ( ) ) +// Deprecated: Use NewHealthChecker func NewChecker(ver, sha string) *HealthChecker { + return NewHealthChecker(ver, sha) +} + +func NewHealthChecker(ver, sha string) *HealthChecker { if ver == "" || sha == "" { if bi, ok := debug.ReadBuildInfo(); ok { if ver == "" { @@ -251,3 +257,14 @@ func (c *HealthChecker) IsHealthy() (healthy bool, errors map[string]error) { return } + +// ContainsError - returns true if report contains targetErr +func ContainsError(report map[string]error, targetErr error) bool { + for _, err := range report { + if errors.Is(err, targetErr) { + return true + } + } + + return false +} diff --git a/pkg/services/health_test.go b/pkg/services/health_test.go new file mode 100644 index 000000000..325d2cf20 --- /dev/null +++ b/pkg/services/health_test.go @@ -0,0 +1,58 @@ +package services + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestContainsError(t *testing.T) { + anError := errors.New("an error") + anotherError := errors.New("another error") + testCases := []struct { + Name string + Report map[string]error + Target error + ExpectedResult bool + }{ + { + Name: "nil map", + Report: nil, + Target: anError, + ExpectedResult: false, + }, + { + Name: "report contains service, but it's healthy", + Report: map[string]error{"service": nil}, + Target: anError, + ExpectedResult: false, + }, + { + Name: "service is not healthy, but it's not caused by target error", + Report: map[string]error{"service": anotherError}, + Target: anError, + ExpectedResult: false, + }, + { + Name: "service is not healthy and contains wrapper target", + Report: map[string]error{"service": fmt.Errorf("wrapped error: %w", anError)}, + Target: anError, + ExpectedResult: true, + }, + { + Name: "service is not healthy due to multiple errors including target", + Report: map[string]error{"service": errors.Join(anError, anotherError)}, + Target: anError, + ExpectedResult: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + actualResult := ContainsError(tc.Report, tc.Target) + assert.Equal(t, tc.ExpectedResult, actualResult) + }) + } +} diff --git a/pkg/services/multi.go b/pkg/services/multi.go index 9d5620bd4..5bdfe9633 100644 --- a/pkg/services/multi.go +++ b/pkg/services/multi.go @@ -7,7 +7,7 @@ import ( "sync" ) -// StartClose is a subset of the ServiceCtx interface. +// StartClose is a subset of the Service interface. type StartClose interface { Start(context.Context) error Close() error diff --git a/pkg/services/service.go b/pkg/services/service.go index 893f470be..85726e924 100644 --- a/pkg/services/service.go +++ b/pkg/services/service.go @@ -8,7 +8,6 @@ import ( "github.com/google/uuid" "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "golang.org/x/exp/maps" @@ -44,14 +43,28 @@ type Engine struct { StopChan logger.SugaredLogger + tracer trace.Tracer + wg sync.WaitGroup - emitHealthErr func(error) - conds map[string]error - condsMu sync.RWMutex + serviceMethods interface { + emitHealthErr(error) + ifStarted(func() error) error + ifNotStopped(func() error) error + } + conds map[string]error + condsMu sync.RWMutex } // Go runs fn in a tracked goroutine that will block closing the service. +// +// If this operation runs continuously in the background, then do not trace it. +// If this operation will terminate, consider tracing via Tracer: +// +// v.e.Go(func(ctx context.Context) { +// ctx, span := v.e.Tracer().Start(ctx, "MyOperationName") +// defer span.End() +// }) func (e *Engine) Go(fn func(context.Context)) { e.wg.Add(1) go func() { @@ -65,6 +78,13 @@ func (e *Engine) Go(fn func(context.Context)) { // GoTick is like Go but calls fn for each tick. // // v.e.GoTick(services.NewTicker(time.Minute), v.method) +// +// Consider tracing each tick via Tracer: +// +// v.e.GoTick(services.NewTicker(time.Minute), func(ctx context.Context) { +// ctx, span := v.e.Tracer().Start(ctx, "MyOperationName") +// defer span.End() +// }) func (e *Engine) GoTick(ticker *timeutil.Ticker, fn func(context.Context)) { e.Go(func(ctx context.Context) { defer ticker.Stop() @@ -79,15 +99,26 @@ func (e *Engine) GoTick(ticker *timeutil.Ticker, fn func(context.Context)) { }) } +// Tracer returns the otel tracer with service attributes included. +func (e *Engine) Tracer() trace.Tracer { + return e.tracer +} + // EmitHealthErr records an error to be reported via the next call to Healthy(). -func (e *Engine) EmitHealthErr(err error) { e.emitHealthErr(err) } +func (e *Engine) EmitHealthErr(err error) { e.serviceMethods.emitHealthErr(err) } + +// IfStarted calls fn only if the service is started. +func (e *Engine) IfStarted(fn func() error) error { return e.serviceMethods.ifStarted(fn) } + +// IfNotStopped calls fn only if the service is not stopped. +func (e *Engine) IfNotStopped(fn func() error) error { return e.serviceMethods.ifNotStopped(fn) } // SetHealthCond records a condition key and an error, which causes an unhealthy report, until ClearHealthCond(condition) is called. // condition keys are for internal use only, and do not show up in the health report. func (e *Engine) SetHealthCond(condition string, err error) { e.condsMu.Lock() defer e.condsMu.Unlock() - e.conds[condition] = fmt.Errorf("%s: %e", condition, err) + e.conds[condition] = fmt.Errorf("%s: %w", condition, err) } // ClearHealthCond removes a condition and error recorded by SetHealthCond. @@ -157,20 +188,18 @@ func (c Config) NewService(lggr logger.Logger) Service { return c.new(logger.Sugared(lggr)) } -const scopeName = "github.com/smartcontractkit/chainlink-common/pkg/services" - func (c Config) new(lggr logger.SugaredLogger) *service { lggr = lggr.Named(c.Name) s := &service{ - tracer: otel.GetTracerProvider().Tracer(scopeName), - cfg: c, + cfg: c, eng: Engine{ StopChan: make(StopChan), SugaredLogger: lggr, + tracer: otel.GetTracerProvider().Tracer(lggr.Name()), conds: make(map[string]error), }, } - s.eng.emitHealthErr = s.StateMachine.SvcErrBuffer.Append + s.eng.serviceMethods = s // give Engine access to some service methods if c.NewSubServices != nil { s.subs = c.NewSubServices(lggr) } @@ -179,10 +208,9 @@ func (c Config) new(lggr logger.SugaredLogger) *service { type service struct { StateMachine - tracer trace.Tracer - cfg Config - eng Engine - subs []Service + cfg Config + eng Engine + subs []Service } // Ready implements [HealthReporter.Ready] and overrides and extends [utils.StartStopOnce.Ready()] to include [Config.SubServices] @@ -218,10 +246,7 @@ func (s *service) Name() string { return s.eng.SugaredLogger.Name() } func (s *service) Start(ctx context.Context) error { return s.StartOnce(s.cfg.Name, func() error { var span trace.Span - ctx, span = s.tracer.Start(ctx, "Start", trace.WithAttributes( - attribute.String("service.name", s.cfg.Name), - attribute.String("service.instance", s.Name()), // full name from logger - )) + ctx, span = s.eng.tracer.Start(ctx, "Start") defer span.End() s.eng.Info("Starting") @@ -265,3 +290,19 @@ func (s *service) Close() error { return }) } + +func (s *service) emitHealthErr(err error) { s.StateMachine.SvcErrBuffer.Append(err) } + +func (s *service) ifStarted(fn func() error) (err error) { + if !s.IfStarted(func() { err = fn() }) { + return fmt.Errorf("service is %s, not started", s.State()) + } + return +} + +func (s *service) ifNotStopped(fn func() error) (err error) { + if !s.IfNotStopped(func() { err = fn() }) { + return errors.New("service is stopped") + } + return +} diff --git a/pkg/services/service_example_configured_test.go b/pkg/services/service_example_configured_test.go index 350440195..1765827af 100644 --- a/pkg/services/service_example_configured_test.go +++ b/pkg/services/service_example_configured_test.go @@ -5,6 +5,8 @@ import ( "fmt" "time" + "go.opentelemetry.io/otel/attribute" + . "github.com/smartcontractkit/chainlink-common/pkg/internal/example" // nolint "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/services" @@ -32,6 +34,13 @@ func (c *configured) close() error { // do processes all outstanding work func (c *configured) do(ctx context.Context) { + ctx, span := c.eng.Tracer().Start(ctx, "DoWork") + defer span.End() + var count, errs int + defer func() { + span.SetAttributes(attribute.Int("count", count)) + span.SetAttributes(attribute.Int("errs", errs)) + }() for { select { case <-ctx.Done(): @@ -40,8 +49,10 @@ func (c *configured) do(ctx context.Context) { if !ok { return } + count++ name, err := work() if err != nil { + errs++ c.eng.SetHealthCond(name, err) } else { c.eng.ClearHealthCond(name) diff --git a/pkg/services/servicetest/run.go b/pkg/services/servicetest/run.go index 8a1046e3c..70ab607f1 100644 --- a/pkg/services/servicetest/run.go +++ b/pkg/services/servicetest/run.go @@ -28,7 +28,10 @@ type TestingT interface { func Run[R Runnable](tb TestingT, r R) R { tb.Helper() require.NoError(tb, r.Start(tests.Context(tb)), "service failed to start") - tb.Cleanup(func() { assert.NoError(tb, r.Close(), "error closing service") }) + tb.Cleanup(func() { + tb.Helper() + assert.NoError(tb, r.Close(), "error closing service") + }) return r } diff --git a/pkg/services/stop.go b/pkg/services/stop.go index 900ec6c84..d7f95f658 100644 --- a/pkg/services/stop.go +++ b/pkg/services/stop.go @@ -1,6 +1,9 @@ package services -import "context" +import ( + "context" + "time" +) // A StopChan signals when some work should stop. // Use StopChanR if you already have a read only <-chan. @@ -16,6 +19,11 @@ func (s StopChan) Ctx(ctx context.Context) (context.Context, context.CancelFunc) return StopRChan((<-chan struct{})(s)).Ctx(ctx) } +// CtxWithTimeout cancels a [context.Context] when StopChan is closed. +func (s StopChan) CtxWithTimeout(timeout time.Duration) (context.Context, context.CancelFunc) { + return s.CtxCancel(context.WithTimeout(context.Background(), timeout)) +} + // CtxCancel cancels a [context.Context] when StopChan is closed. // Returns ctx and cancel unmodified, for convenience. func (s StopChan) CtxCancel(ctx context.Context, cancel context.CancelFunc) (context.Context, context.CancelFunc) { @@ -36,6 +44,11 @@ func (s StopRChan) Ctx(ctx context.Context) (context.Context, context.CancelFunc return s.CtxCancel(context.WithCancel(ctx)) } +// CtxWithTimeout cancels a [context.Context] when StopChan is closed. +func (s StopRChan) CtxWithTimeout(timeout time.Duration) (context.Context, context.CancelFunc) { + return s.CtxCancel(context.WithTimeout(context.Background(), timeout)) +} + // CtxCancel cancels a [context.Context] when StopChan is closed. // Returns ctx and cancel unmodified, for convenience. func (s StopRChan) CtxCancel(ctx context.Context, cancel context.CancelFunc) (context.Context, context.CancelFunc) { diff --git a/pkg/services/ticker.go b/pkg/services/ticker.go index 5ce92f911..de47d1fd9 100644 --- a/pkg/services/ticker.go +++ b/pkg/services/ticker.go @@ -12,6 +12,7 @@ const DefaultJitter timeutil.JitterPct = 0.1 // NewTicker returns a new timeutil.Ticker configured to: // - fire the first tick immediately // - apply DefaultJitter to each period +// Ticker.Stop should be called to prevent goroutine leaks. func NewTicker(period time.Duration) *timeutil.Ticker { return TickerConfig{JitterPct: DefaultJitter}.NewTicker(period) } @@ -23,6 +24,8 @@ type TickerConfig struct { JitterPct timeutil.JitterPct } +// NewTicker returns a new timeutil.Ticker for the given configuration. +// Ticker.Stop should be called to prevent goroutine leaks. func (c TickerConfig) NewTicker(period time.Duration) *timeutil.Ticker { first := true return timeutil.NewTicker(func() time.Duration { diff --git a/pkg/sqlutil/pg/dialects.go b/pkg/sqlutil/pg/dialects.go new file mode 100644 index 000000000..260b646ba --- /dev/null +++ b/pkg/sqlutil/pg/dialects.go @@ -0,0 +1,19 @@ +package pg + +import ( + // need to make sure pgx driver is registered before opening connection + _ "github.com/jackc/pgx/v4/stdlib" +) + +// DialectName is a compiler enforced type used that maps to database dialect names +type DialectName string + +const ( + // Postgres represents the postgres dialect. + Postgres DialectName = "pgx" + // TransactionWrappedPostgres is useful for tests. + // When the connection is opened, it starts a transaction and all + // operations performed on the DB will be within that transaction. + TransactionWrappedPostgres DialectName = "txdb" + InMemoryPostgres DialectName = "duckdb" +) diff --git a/pkg/sqlutil/pg/pg.go b/pkg/sqlutil/pg/pg.go new file mode 100644 index 000000000..25ed30d64 --- /dev/null +++ b/pkg/sqlutil/pg/pg.go @@ -0,0 +1,35 @@ +package pg + +import ( + "os" + "testing" + + "github.com/google/uuid" + "github.com/jmoiron/sqlx" + "github.com/scylladb/go-reflectx" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func NewTestDB(t testing.TB, dbURL string) *sqlx.DB { + err := RegisterTxDb(dbURL) + if err != nil { + t.Fatalf("failed to register txdb dialect: %s", err.Error()) + return nil + } + db, err := sqlx.Open(string(TransactionWrappedPostgres), uuid.New().String()) + require.NoError(t, err) + t.Cleanup(func() { assert.NoError(t, db.Close()) }) + db.MapperFunc(reflectx.CamelToSnakeASCII) + + return db +} + +func TestURL(t testing.TB) string { + dbURL, ok := os.LookupEnv("CL_DATABASE_URL") + if ok { + return dbURL + } + t.Log("CL_DATABASE_URL not set--falling back to testing txdb backed by an in-memory db") + return string(InMemoryPostgres) +} diff --git a/pkg/sqlutil/pg/txdb.go b/pkg/sqlutil/pg/txdb.go new file mode 100644 index 000000000..4e1347036 --- /dev/null +++ b/pkg/sqlutil/pg/txdb.go @@ -0,0 +1,548 @@ +package pg + +import ( + "context" + "database/sql" + "database/sql/driver" + "fmt" + "io" + "net/url" + "strings" + "sync" + "testing" + + "github.com/jmoiron/sqlx" + "go.uber.org/multierr" + + "github.com/smartcontractkit/chainlink-common/pkg/utils" +) + +// txdb is a simplified version of https://github.com/DATA-DOG/go-txdb +// +// The original lib has various problems and is hard to understand because it +// tries to be more general. The version in this file is more tightly focused +// to our needs and should be easier to reason about and less likely to have +// subtle bugs/races. +// +// It doesn't currently support savepoints but could be made to if necessary. +// +// Transaction BEGIN/ROLLBACK effectively becomes a no-op, this should have no +// negative impact on normal test operation. +// +// If you MUST test BEGIN/ROLLBACK behaviour, you will have to configure your +// store to use the raw DialectPostgres dialect and setup a one-use database. +// See heavyweight.FullTestDB() as a convenience function to help you do this, +// but please use sparingly because as it's name implies, it is expensive. +func RegisterTxDb(dbURL string) error { + registerMutex.Lock() + defer registerMutex.Unlock() + drivers := sql.Drivers() + + for _, driver := range drivers { + if driver == string(TransactionWrappedPostgres) { + // TxDB driver already registered + return nil + } + } + + if dbURL != string(InMemoryPostgres) { + if testing.Short() { + // -short tests don't need a DB + return nil + } + parsed, err := url.Parse(dbURL) + if err != nil { + return fmt.Errorf("failed to parse %s as a database URL: %w", dbURL, err) + } + if parsed.Path == "" { + return fmt.Errorf("database url `%s` must point to your test database. Note that the test database MUST end in `_test` to differentiate from a possible production DB. HINT: Try postgresql://postgres@localhost:5432/chainlink_test?sslmode=disable", parsed.String()) + } + if !strings.HasSuffix(parsed.Path, "_test") { + return fmt.Errorf("cannot run tests against database named `%s`. Note that the test database MUST end in `_test` to differentiate from a possible production DB. HINT: Try postgresql://postgres@localhost:5432/chainlink_test?sslmode=disable", parsed.Path[1:]) + } + } + name := string(TransactionWrappedPostgres) + sql.Register(name, &txDriver{ + dbURL: dbURL, + conns: make(map[string]*conn), + }) + sqlx.BindDriver(name, sqlx.DOLLAR) + return nil +} + +// Calling sql.Register() will panic if it's called more than once. +// We cannot atomically check whether the driver is already registered and +// register it if necessary. So a mutex protecting the call to sql.Drivers() +// is used to avoid the race condition. +var registerMutex sync.Mutex + +var _ driver.Conn = &conn{} + +var _ driver.Validator = &conn{} +var _ driver.SessionResetter = &conn{} + +// txDriver is a sql driver which runs on a single transaction. +// When `Close` is called, transaction is rolled back. +type txDriver struct { + sync.Mutex + db *sql.DB + conns map[string]*conn + + dbURL string +} + +func (d *txDriver) Open(dsn string) (driver.Conn, error) { + d.Lock() + defer d.Unlock() + // Open real db connection if its the first call + if d.db == nil { + dialect := string(Postgres) + if d.dbURL == string(InMemoryPostgres) { + dialect = d.dbURL + } + db, err := sql.Open(dialect, d.dbURL) + if err != nil { + return nil, err + } + d.db = db + } + c, exists := d.conns[dsn] + if !exists || !c.tryOpen() { + tx, err := d.db.Begin() + if err != nil { + return nil, err + } + c = &conn{abort: make(chan struct{}), tx: tx, opened: 1, dsn: dsn} + c.removeSelf = func() error { + return d.deleteConn(c) + } + d.conns[dsn] = c + } + return c, nil +} + +// deleteConn is called by a connection when it is closed via the `close` method. +// It also auto-closes the DB when the last checked out connection is closed. +func (d *txDriver) deleteConn(c *conn) error { + // must lock here to avoid racing with Open + d.Lock() + defer d.Unlock() + + if d.conns[c.dsn] != c { + return nil // already been replaced + } + + delete(d.conns, c.dsn) + close(c.abort) // abort any pending queries + + if len(d.conns) == 0 && d.db != nil { + if d.db != nil { + if err := d.db.Close(); err != nil { + return err + } + d.db = nil + } + } + return nil +} + +type conn struct { + sync.Mutex + abort chan struct{} + dsn string + tx *sql.Tx // tx may be shared by many conns, definitive one lives in the map keyed by DSN on the txDriver. Do not modify from conn + closed bool + opened int + removeSelf func() error +} + +func (c *conn) Begin() (driver.Tx, error) { + c.Lock() + defer c.Unlock() + if c.closed { + return nil, fmt.Errorf("conn is closed") + } + // Begin is a noop because the transaction was already opened + return tx{c.tx}, nil +} + +// Implement the "ConnBeginTx" interface +func (c *conn) BeginTx(_ context.Context, opts driver.TxOptions) (driver.Tx, error) { + // Context is ignored, because single transaction is shared by all callers, thus caller should not be able to + // control it with local context + return c.Begin() +} + +// Prepare returns a prepared statement, bound to this connection. +func (c *conn) Prepare(query string) (driver.Stmt, error) { + ctx, cancel := utils.ContextFromChan(c.abort) + defer cancel() + return c.PrepareContext(ctx, query) +} + +// Implement the "ConnPrepareContext" interface +func (c *conn) PrepareContext(_ context.Context, query string) (driver.Stmt, error) { + c.Lock() + defer c.Unlock() + if c.closed { + return nil, fmt.Errorf("conn is closed") + } + + // It is not safe to give the passed in context to the tx directly + // because the tx is shared by many conns and cancelling the context will + // destroy the tx which can affect other conns. Instead, we pass the context + // passed to NewTestDb when the database was set up so the operation can at + // least be aborted immediately if the whole test is interrupted. + ctx, cancel := utils.ContextFromChan(c.abort) + defer cancel() + + st, err := c.tx.PrepareContext(ctx, query) + if err != nil { + return nil, err + } + return &stmt{c.abort, st, c}, nil +} + +// IsValid is called prior to placing the connection into the +// connection pool by database/sql. The connection will be discarded if false is returned. +func (c *conn) IsValid() bool { + c.Lock() + defer c.Unlock() + return !c.closed +} + +func (c *conn) ResetSession(_ context.Context) error { + // Ensure bad connections are reported: From database/sql/driver: + // If a connection is never returned to the connection pool but immediately reused, then + // ResetSession is called prior to reuse but IsValid is not called. + c.Lock() + defer c.Unlock() + if c.closed { + return driver.ErrBadConn + } + + return nil +} + +// pgx returns nil +func (c *conn) CheckNamedValue(nv *driver.NamedValue) error { + return nil +} + +// Implement the "QueryerContext" interface +func (c *conn) QueryContext(_ context.Context, query string, args []driver.NamedValue) (driver.Rows, error) { + c.Lock() + defer c.Unlock() + if c.closed { + return nil, fmt.Errorf("conn is closed") + } + + ctx, cancel := utils.ContextFromChan(c.abort) + defer cancel() + + rs, err := c.tx.QueryContext(ctx, query, mapNamedArgs(args)...) + if err != nil { + return nil, err + } + defer rs.Close() + + return buildRows(rs) +} + +// Implement the "ExecContext" interface +func (c *conn) ExecContext(_ context.Context, query string, args []driver.NamedValue) (driver.Result, error) { + c.Lock() + defer c.Unlock() + if c.closed { + return nil, fmt.Errorf("conn is closed") + } + ctx, cancel := utils.ContextFromChan(c.abort) + defer cancel() + + return c.tx.ExecContext(ctx, query, mapNamedArgs(args)...) +} + +// tryOpen attempts to increment the open count, but returns false if closed. +func (c *conn) tryOpen() bool { + c.Lock() + defer c.Unlock() + if c.closed { + return false + } + c.opened++ + return true +} + +// Close invalidates and potentially stops any current +// prepared statements and transactions, marking this +// connection as no longer in use. +// +// Because the sql package maintains a free pool of +// connections and only calls Close when there's a surplus of +// idle connections, it shouldn't be necessary for drivers to +// do their own connection caching. +// +// Drivers must ensure all network calls made by Close +// do not block indefinitely (e.g. apply a timeout). +func (c *conn) Close() (err error) { + newlyClosed, err := c.close() + if err != nil { + return err + } + if !newlyClosed { + return nil + } + + // Wait to remove self to avoid nesting locks. + if err = c.removeSelf(); err != nil { + return err + } + return +} + +func (c *conn) close() (bool, error) { + c.Lock() + defer c.Unlock() + if c.closed { + // Double close, should be a safe to make this a noop + // PGX allows double close + // See: https://github.com/jackc/pgx/blob/a457da8bffa4f90ad672fa093ee87f20cf06687b/conn.go#L249 + return false, nil + } + + c.opened-- + if c.opened > 0 { + return false, nil + } + if c.tx != nil { + if err := c.tx.Rollback(); err != nil { + return false, err + } + c.tx = nil + } + c.closed = true + return true, nil +} + +type tx struct { + tx *sql.Tx +} + +func (tx tx) Commit() error { + // Commit is a noop because the transaction will be rolled back at the end + return nil +} + +func (tx tx) Rollback() error { + // Rollback is a noop because the transaction will be rolled back at the end + return nil +} + +type stmt struct { + abort <-chan struct{} + st *sql.Stmt + conn *conn +} + +func (s stmt) Exec(args []driver.Value) (driver.Result, error) { + s.conn.Lock() + defer s.conn.Unlock() + if s.conn.closed { + return nil, fmt.Errorf("conn is closed") + } + return s.st.Exec(mapArgs(args)...) +} + +// Implement the "StmtExecContext" interface +func (s *stmt) ExecContext(_ context.Context, args []driver.NamedValue) (driver.Result, error) { + s.conn.Lock() + defer s.conn.Unlock() + if s.conn.closed { + return nil, fmt.Errorf("conn is closed") + } + + ctx, cancel := utils.ContextFromChan(s.abort) + defer cancel() + + return s.st.ExecContext(ctx, mapNamedArgs(args)...) +} + +func mapArgs(args []driver.Value) (res []interface{}) { + res = make([]interface{}, len(args)) + for i := range args { + res[i] = args[i] + } + return +} + +func (s stmt) NumInput() int { + return -1 +} + +func (s stmt) Query(args []driver.Value) (driver.Rows, error) { + s.conn.Lock() + defer s.conn.Unlock() + if s.conn.closed { + return nil, fmt.Errorf("conn is closed") + } + rows, err := s.st.Query(mapArgs(args)...) + defer func() { + err = multierr.Combine(err, rows.Close()) + }() + if err != nil { + return nil, err + } + return buildRows(rows) +} + +// Implement the "StmtQueryContext" interface +func (s *stmt) QueryContext(_ context.Context, args []driver.NamedValue) (driver.Rows, error) { + s.conn.Lock() + defer s.conn.Unlock() + if s.conn.closed { + return nil, fmt.Errorf("conn is closed") + } + + ctx, cancel := utils.ContextFromChan(s.abort) + defer cancel() + + rows, err := s.st.QueryContext(ctx, mapNamedArgs(args)...) + if err != nil { + return nil, err + } + return buildRows(rows) +} + +func (s stmt) Close() error { + s.conn.Lock() + defer s.conn.Unlock() + return s.st.Close() +} + +func buildRows(r *sql.Rows) (driver.Rows, error) { + set := &rowSets{} + rs := &rows{} + if err := rs.read(r); err != nil { + return set, err + } + set.sets = append(set.sets, rs) + for r.NextResultSet() { + rss := &rows{} + if err := rss.read(r); err != nil { + return set, err + } + set.sets = append(set.sets, rss) + } + return set, nil +} + +// Implement the "RowsNextResultSet" interface +func (rs *rowSets) HasNextResultSet() bool { + return rs.pos+1 < len(rs.sets) +} + +// Implement the "RowsNextResultSet" interface +func (rs *rowSets) NextResultSet() error { + if !rs.HasNextResultSet() { + return io.EOF + } + + rs.pos++ + return nil +} + +type rows struct { + rows [][]driver.Value + pos int + cols []string + colTypes []*sql.ColumnType +} + +func (r *rows) Columns() []string { + return r.cols +} + +func (r *rows) ColumnTypeDatabaseTypeName(index int) string { + return r.colTypes[index].DatabaseTypeName() +} + +func (r *rows) Next(dest []driver.Value) error { + r.pos++ + if r.pos > len(r.rows) { + return io.EOF + } + + for i, val := range r.rows[r.pos-1] { + dest[i] = *(val.(*interface{})) + } + + return nil +} + +func (r *rows) Close() error { + return nil +} + +func (r *rows) read(rs *sql.Rows) error { + var err error + r.cols, err = rs.Columns() + if err != nil { + return err + } + + r.colTypes, err = rs.ColumnTypes() + if err != nil { + return err + } + + for rs.Next() { + values := make([]interface{}, len(r.cols)) + for i := range values { + values[i] = new(interface{}) + } + if err := rs.Scan(values...); err != nil { + return err + } + row := make([]driver.Value, len(r.cols)) + for i, v := range values { + row[i] = driver.Value(v) + } + r.rows = append(r.rows, row) + } + return rs.Err() +} + +type rowSets struct { + sets []*rows + pos int +} + +func (rs *rowSets) Columns() []string { + return rs.sets[rs.pos].cols +} + +func (rs *rowSets) ColumnTypeDatabaseTypeName(index int) string { + return rs.sets[rs.pos].ColumnTypeDatabaseTypeName(index) +} + +func (rs *rowSets) Close() error { + return nil +} + +// advances to next row +func (rs *rowSets) Next(dest []driver.Value) error { + return rs.sets[rs.pos].Next(dest) +} + +func mapNamedArgs(args []driver.NamedValue) (res []interface{}) { + res = make([]interface{}, len(args)) + for i := range args { + name := args[i].Name + if name != "" { + res[i] = sql.Named(name, args[i].Value) + } else { + res[i] = args[i].Value + } + } + return +} diff --git a/pkg/sqlutil/pg/txdb_test.go b/pkg/sqlutil/pg/txdb_test.go new file mode 100644 index 000000000..853421581 --- /dev/null +++ b/pkg/sqlutil/pg/txdb_test.go @@ -0,0 +1,71 @@ +package pg + +import ( + "context" + "database/sql" + "sync" + "testing" + "time" + + "github.com/google/uuid" + "github.com/jmoiron/sqlx" + _ "github.com/marcboeker/go-duckdb" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" +) + +func TestTxDBDriver(t *testing.T) { + t.Parallel() + + t.Run("Make sure sql.Register() can be called concurrently without racing", func(t *testing.T) { + wg := sync.WaitGroup{} + for i := 0; i < 10; i++ { + wg.Add(1) + go func() { + err := RegisterTxDb(string(InMemoryPostgres)) + require.NoError(t, err) + wg.Done() + }() + } + wg.Wait() + drivers := sql.Drivers() + assert.Contains(t, drivers, "txdb") + }) + + db := NewTestDB(t, TestURL(t)) + dropTable := func() error { + _, err := db.Exec(`DROP TABLE IF EXISTS txdb_test`) + return err + } + // clean up, if previous tests failed + err := dropTable() + assert.NoError(t, err) + _, err = db.Exec(`CREATE TABLE txdb_test (id TEXT NOT NULL)`) + assert.NoError(t, err) + t.Cleanup(func() { + _ = dropTable() + }) + _, err = db.Exec(`INSERT INTO txdb_test VALUES ($1)`, uuid.New().String()) + assert.NoError(t, err) + ensureValuesPresent := func(t *testing.T, db *sqlx.DB) { + var ids []string + err = db.Select(&ids, `SELECT id from txdb_test`) + assert.NoError(t, err) + assert.Len(t, ids, 1) + } + + ensureValuesPresent(t, db) + t.Run("Cancel of tx's context does not trigger rollback of driver's tx", func(t *testing.T) { + ctx, cancel := context.WithCancel(tests.Context(t)) + _, err := db.BeginTx(ctx, nil) + assert.NoError(t, err) + cancel() + // BeginTx spawns separate goroutine that rollbacks the tx and tries to close underlying connection, unless + // db driver says that connection is still active. + // This approach is not ideal, but there is no better way to wait for independent goroutine to complete + time.Sleep(time.Second * 10) + ensureValuesPresent(t, db) + }) +} diff --git a/pkg/sqlutil/sqltest/data_source.go b/pkg/sqlutil/sqltest/data_source.go new file mode 100644 index 000000000..7b398d895 --- /dev/null +++ b/pkg/sqlutil/sqltest/data_source.go @@ -0,0 +1,76 @@ +package sqltest + +import ( + "context" + "database/sql" + "testing" + + "github.com/jmoiron/sqlx" + _ "github.com/marcboeker/go-duckdb" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" +) + +// NewInMemoryDataSource returns a new in-memory DataSource +func NewInMemoryDataSource(t *testing.T) sqlutil.DataSource { + db, err := sqlx.Open("duckdb", t.Name()) + require.NoError(t, err) + t.Cleanup(func() { require.NoError(t, db.Close()) }) + return db +} + +// NewNoOpDataSource returns an empty DataSource type which will satisfy the interface +func NewNoOpDataSource() sqlutil.DataSource { + return &noOpDataSource{} +} + +type noOpDataSource struct{} + +func (ds *noOpDataSource) BindNamed(s string, _ interface{}) (string, []interface{}, error) { + return "", nil, nil +} + +func (ds *noOpDataSource) DriverName() string { + return "" +} + +func (ds *noOpDataSource) ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { + return nil, nil +} + +func (ds *noOpDataSource) GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error { + return nil +} + +func (ds *noOpDataSource) NamedExecContext(ctx context.Context, query string, arg interface{}) (sql.Result, error) { + return nil, nil +} + +func (ds *noOpDataSource) PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) { + return nil, nil +} + +func (_m *noOpDataSource) PrepareNamedContext(ctx context.Context, query string) (*sqlx.NamedStmt, error) { + return nil, nil +} + +func (ds *noOpDataSource) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) { + return nil, nil +} + +func (ds *noOpDataSource) QueryRowxContext(ctx context.Context, query string, args ...interface{}) *sqlx.Row { + return nil +} + +func (ds *noOpDataSource) QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { + return nil, nil +} + +func (ds *noOpDataSource) Rebind(s string) string { + return "" +} + +func (ds *noOpDataSource) SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error { + return nil +} diff --git a/pkg/types/codec.go b/pkg/types/codec.go index 93ae8ce59..8b17209b9 100644 --- a/pkg/types/codec.go +++ b/pkg/types/codec.go @@ -29,6 +29,66 @@ type Decoder interface { GetMaxDecodingSize(ctx context.Context, n int, itemType string) (int, error) } +/* +Codec is an interface that provides encoding and decoding functionality for a specific type identified by a name. +Because there are many types that a [ContractReader] or [ContractWriter] can either accept or return, all encoding +instructions provided by the codec are based on the type name. + +Starting from the lowest level, take for instance a [big.Int] encoder where we want the output to be big endian binary +encoded. + + typeCodec, _ := binary.BigEndian().BigInt(32, true) + +This allows us to encode and decode [big.Int] values with big endian encoding using the [encodings.TypeCodec] interface. + + encodedBytes := []byte{} + + originalValue := big.NewInt(42) + encodedBytes, _ = typeCodec.Encode(originalValue, encodedBytes) // new encoded bytes are appended to existing + + value, _, _ := typeCodec.Decode(encodedBytes, value) + +The additional [encodings.TypeCodec] methods such as 'GetType() reflect.Type' allow composition. This is useful for +creating a struct codec such as the one defined in encodings/struct.go. + + tlCodec, _ := encodings.NewStructCodec([]encodings.NamedTypeCodec{{Name: "Value", Codec: typeCodec}}) + +This provides a [encodings.TopLevelCodec] which is a [encodings.TypeCodec] with a total size of all encoded elements. +Going up another level, we create a [Codec] from a map of [encodings.TypeCodec] instances using +[encodings.CodecFromTypeCodec]. + + codec := encodings.CodecFromTypeCodec{"SomeStruct": tlCodec} + + type SomeStruct struct { + Value *big.Int + } + + encodedStructBytes, _ := codec.Encode(ctx, SomeStruct{Value: big.NewInt(42)}, "SomeStruct") + + var someStruct SomeStruct + _ = codec.Decode(encodedStructBytes, &someStruct, "SomeStruct") + +Therefore 'itemType' passed to [Encode] and [Decode] references the key in the map of [encodings.TypeCodec] instances. +Also worth noting that a `TopLevelCodec` can also be added to a `CodecFromTypeCodec` map. This allows for the +[encodings.SizeAtTopLevel] method to be referenced when [encodings.GetMaxEncodingSize] is called on the [Codec]. + +Also, when the type is unknown to the caller, the decoded type for an 'itemName' can be retrieved from the codec to be +used for decoding. The `CreateType` method returns an instance of the expected type using reflection under the hood and +the overall composition of `TypeCodec` instances. This allows proper types to be conveyed to the caller through the +GRPC interface where data may be JSON encoded, passed through GRPC, and JSON decoded on the other side. + + decodedStruct, _ := codec.CreateType("SomeStruct", false) + _ = codec.Decode(encodedStructBytes, &decodedStruct, "SomeStruct") + +The `encodings` package provides a `Builder` interface that allows for the creation of any encoding type. This is useful +for creating custom encodings such as the EVM ABI encoding. An encoder implements the `Builder` interface and plugs +directly into `TypeCodec`. + +From the perspective of a `ContractReader` instance, the `itemType` at the top level is the `readIdentifier` which +can be imagined as `contractName + methodName` given that a contract method call returns some configured value that +would need its own codec. Each implementation of `ContractReader` maps the names to codecs differently on the inside, +but from the level of the interface, the `itemType` is the `readIdentifier`. +*/ type Codec interface { Encoder Decoder diff --git a/pkg/types/contract_reader.go b/pkg/types/contract_reader.go index 4ba2ec9a7..5d317de77 100644 --- a/pkg/types/contract_reader.go +++ b/pkg/types/contract_reader.go @@ -2,6 +2,7 @@ package types import ( "context" + "iter" "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/types/query" @@ -15,6 +16,7 @@ const ( ErrContractReaderConfigMissing = UnimplementedError("ContractReader entry missing from RelayConfig") ErrInternal = InternalError("internal error") ErrNotFound = NotFoundError("not found") + ErrFinalityViolated = InternalError("finality violated") ) // ContractReader defines essential read operations a chain should implement for reading contract values and events. @@ -46,8 +48,8 @@ type ContractReader interface { // Passing in a *values.Value as the returnVal will encode the return value as an appropriate value.Value instance. GetLatestValue(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) error - // GetLatestValueWithHeadData should be used in the same way as GetLatestValue, but also returns the head data. - GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (Head, error) + // GetLatestValueWithHeadData should be used in the same way as GetLatestValue, but also returns the head data if available. + GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (*Head, error) // BatchGetLatestValues batches get latest value calls based on request, which is grouped by contract names that each have a slice of BatchRead. // BatchGetLatestValuesRequest params and returnVal follow same rules as GetLatestValue params and returnVal arguments, with difference in how response is returned. @@ -65,9 +67,26 @@ type ContractReader interface { // QueryKey provides fetching chain agnostic events (Sequence) with general querying capability. QueryKey(ctx context.Context, contract BoundContract, filter query.KeyFilter, limitAndSort query.LimitAndSort, sequenceDataType any) ([]Sequence, error) + // QueryKeys provides fetching chain agnostic events (Sequence) of different types with general querying capability. + // The iterator returns a pair of key and sequence. + QueryKeys(ctx context.Context, filters []ContractKeyFilter, limitAndSort query.LimitAndSort) (iter.Seq2[string, Sequence], error) + + // HealthReport returns a full health report of the callee including its dependencies. + // Keys are based on Name(), with nil values when healthy or errors otherwise. + // Use CopyHealth to collect reports from sub-services. + // This should run very fast, so avoid doing computation and instead prefer reporting pre-calculated state. + // On finality violation report must contain at least one ErrFinalityViolation. + HealthReport() map[string]error + mustEmbedUnimplementedContractReader() } +type ContractKeyFilter struct { + query.KeyFilter + Contract BoundContract + SequenceDataType any +} + // BatchGetLatestValuesRequest string is contract name. type BatchGetLatestValuesRequest map[BoundContract]ContractBatch type ContractBatch []BatchRead @@ -133,8 +152,8 @@ func (UnimplementedContractReader) GetLatestValue(ctx context.Context, readIdent return UnimplementedError("ContractReader.GetLatestValue unimplemented") } -func (UnimplementedContractReader) GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (Head, error) { - return Head{}, UnimplementedError("ContractReader.GetLatestValueWithHeadData unimplemented") +func (UnimplementedContractReader) GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (*Head, error) { + return nil, UnimplementedError("ContractReader.GetLatestValueWithHeadData unimplemented") } func (UnimplementedContractReader) BatchGetLatestValues(ctx context.Context, request BatchGetLatestValuesRequest) (BatchGetLatestValuesResult, error) { @@ -153,6 +172,10 @@ func (UnimplementedContractReader) QueryKey(ctx context.Context, boundContract B return nil, UnimplementedError("ContractReader.QueryKey unimplemented") } +func (UnimplementedContractReader) QueryKeys(ctx context.Context, keyQueries []ContractKeyFilter, limitAndSort query.LimitAndSort) (iter.Seq2[string, Sequence], error) { + return nil, UnimplementedError("ContractReader.QueryKeys unimplemented") +} + func (UnimplementedContractReader) Start(context.Context) error { return UnimplementedError("ContractReader.Start unimplemented") } diff --git a/pkg/types/chain_writer.go b/pkg/types/contract_writer.go similarity index 91% rename from pkg/types/chain_writer.go rename to pkg/types/contract_writer.go index 28b243546..f556fcb66 100644 --- a/pkg/types/chain_writer.go +++ b/pkg/types/contract_writer.go @@ -11,10 +11,10 @@ const ( ErrSettingTransactionGasLimitNotSupported = InvalidArgumentError("setting transaction gas limit is not supported") ) -type ChainWriter interface { +type ContractWriter interface { services.Service - // SubmitTransaction packs and broadcasts a transaction to the underlying chain. + // SubmitTransaction packs and broadcasts a transaction to the underlying chain contract. // // - `args` should be any object which maps a set of method param into the contract and method specific method params. // - `transactionID` will be used by the underlying TXM as an idempotency key, and unique reference to track transaction attempts. @@ -31,8 +31,8 @@ type ChainWriter interface { type TxMeta struct { // Used for Keystone Workflows WorkflowExecutionID *string - // An optional maximum gas limit for the transaction. If not set the ChainWriter implementation will be responsible for - // setting a gas limit for the transaction. If it is set and the ChainWriter implementation does not support setting + // An optional maximum gas limit for the transaction. If not set the ContractWriter implementation will be responsible for + // setting a gas limit for the transaction. If it is set and the ContractWriter implementation does not support setting // this value per transaction it will return ErrSettingTransactionGasLimitNotSupported GasLimit *big.Int } diff --git a/pkg/types/core/capabilities_registry.go b/pkg/types/core/capabilities_registry.go index 0f139b066..e520cd8bc 100644 --- a/pkg/types/core/capabilities_registry.go +++ b/pkg/types/core/capabilities_registry.go @@ -17,4 +17,5 @@ type CapabilitiesRegistry interface { GetTarget(ctx context.Context, ID string) (capabilities.TargetCapability, error) List(ctx context.Context) ([]capabilities.BaseCapability, error) Add(ctx context.Context, c capabilities.BaseCapability) error + Remove(ctx context.Context, ID string) error } diff --git a/pkg/types/core/mocks/capabilities_registry.go b/pkg/types/core/mocks/capabilities_registry.go index 105b414af..aa0a3bce7 100644 --- a/pkg/types/core/mocks/capabilities_registry.go +++ b/pkg/types/core/mocks/capabilities_registry.go @@ -537,6 +537,53 @@ func (_c *CapabilitiesRegistry_LocalNode_Call) RunAndReturn(run func(context.Con return _c } +// Remove provides a mock function with given fields: ctx, ID +func (_m *CapabilitiesRegistry) Remove(ctx context.Context, ID string) error { + ret := _m.Called(ctx, ID) + + if len(ret) == 0 { + panic("no return value specified for Remove") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = rf(ctx, ID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// CapabilitiesRegistry_Remove_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Remove' +type CapabilitiesRegistry_Remove_Call struct { + *mock.Call +} + +// Remove is a helper method to define mock.On call +// - ctx context.Context +// - ID string +func (_e *CapabilitiesRegistry_Expecter) Remove(ctx interface{}, ID interface{}) *CapabilitiesRegistry_Remove_Call { + return &CapabilitiesRegistry_Remove_Call{Call: _e.mock.On("Remove", ctx, ID)} +} + +func (_c *CapabilitiesRegistry_Remove_Call) Run(run func(ctx context.Context, ID string)) *CapabilitiesRegistry_Remove_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *CapabilitiesRegistry_Remove_Call) Return(_a0 error) *CapabilitiesRegistry_Remove_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *CapabilitiesRegistry_Remove_Call) RunAndReturn(run func(context.Context, string) error) *CapabilitiesRegistry_Remove_Call { + _c.Call.Return(run) + return _c +} + // NewCapabilitiesRegistry creates a new instance of CapabilitiesRegistry. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewCapabilitiesRegistry(t interface { diff --git a/pkg/types/core/mocks/relayer.go b/pkg/types/core/mocks/relayer.go index f8dfc59a8..1b14433ae 100644 --- a/pkg/types/core/mocks/relayer.go +++ b/pkg/types/core/mocks/relayer.go @@ -217,29 +217,29 @@ func (_c *Relayer_Name_Call) RunAndReturn(run func() string) *Relayer_Name_Call return _c } -// NewChainWriter provides a mock function with given fields: _a0, chainWriterConfig -func (_m *Relayer) NewChainWriter(_a0 context.Context, chainWriterConfig []byte) (types.ChainWriter, error) { - ret := _m.Called(_a0, chainWriterConfig) +// NewContractReader provides a mock function with given fields: _a0, contractReaderConfig +func (_m *Relayer) NewContractReader(_a0 context.Context, contractReaderConfig []byte) (types.ContractReader, error) { + ret := _m.Called(_a0, contractReaderConfig) if len(ret) == 0 { - panic("no return value specified for NewChainWriter") + panic("no return value specified for NewContractReader") } - var r0 types.ChainWriter + var r0 types.ContractReader var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ChainWriter, error)); ok { - return rf(_a0, chainWriterConfig) + if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ContractReader, error)); ok { + return rf(_a0, contractReaderConfig) } - if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ChainWriter); ok { - r0 = rf(_a0, chainWriterConfig) + if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ContractReader); ok { + r0 = rf(_a0, contractReaderConfig) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(types.ChainWriter) + r0 = ret.Get(0).(types.ContractReader) } } if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { - r1 = rf(_a0, chainWriterConfig) + r1 = rf(_a0, contractReaderConfig) } else { r1 = ret.Error(1) } @@ -247,58 +247,58 @@ func (_m *Relayer) NewChainWriter(_a0 context.Context, chainWriterConfig []byte) return r0, r1 } -// Relayer_NewChainWriter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewChainWriter' -type Relayer_NewChainWriter_Call struct { +// Relayer_NewContractReader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewContractReader' +type Relayer_NewContractReader_Call struct { *mock.Call } -// NewChainWriter is a helper method to define mock.On call +// NewContractReader is a helper method to define mock.On call // - _a0 context.Context -// - chainWriterConfig []byte -func (_e *Relayer_Expecter) NewChainWriter(_a0 interface{}, chainWriterConfig interface{}) *Relayer_NewChainWriter_Call { - return &Relayer_NewChainWriter_Call{Call: _e.mock.On("NewChainWriter", _a0, chainWriterConfig)} +// - contractReaderConfig []byte +func (_e *Relayer_Expecter) NewContractReader(_a0 interface{}, contractReaderConfig interface{}) *Relayer_NewContractReader_Call { + return &Relayer_NewContractReader_Call{Call: _e.mock.On("NewContractReader", _a0, contractReaderConfig)} } -func (_c *Relayer_NewChainWriter_Call) Run(run func(_a0 context.Context, chainWriterConfig []byte)) *Relayer_NewChainWriter_Call { +func (_c *Relayer_NewContractReader_Call) Run(run func(_a0 context.Context, contractReaderConfig []byte)) *Relayer_NewContractReader_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].([]byte)) }) return _c } -func (_c *Relayer_NewChainWriter_Call) Return(_a0 types.ChainWriter, _a1 error) *Relayer_NewChainWriter_Call { +func (_c *Relayer_NewContractReader_Call) Return(_a0 types.ContractReader, _a1 error) *Relayer_NewContractReader_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Relayer_NewChainWriter_Call) RunAndReturn(run func(context.Context, []byte) (types.ChainWriter, error)) *Relayer_NewChainWriter_Call { +func (_c *Relayer_NewContractReader_Call) RunAndReturn(run func(context.Context, []byte) (types.ContractReader, error)) *Relayer_NewContractReader_Call { _c.Call.Return(run) return _c } -// NewContractReader provides a mock function with given fields: _a0, contractReaderConfig -func (_m *Relayer) NewContractReader(_a0 context.Context, contractReaderConfig []byte) (types.ContractReader, error) { - ret := _m.Called(_a0, contractReaderConfig) +// NewContractWriter provides a mock function with given fields: _a0, contractWriterConfig +func (_m *Relayer) NewContractWriter(_a0 context.Context, contractWriterConfig []byte) (types.ContractWriter, error) { + ret := _m.Called(_a0, contractWriterConfig) if len(ret) == 0 { - panic("no return value specified for NewContractReader") + panic("no return value specified for NewContractWriter") } - var r0 types.ContractReader + var r0 types.ContractWriter var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ContractReader, error)); ok { - return rf(_a0, contractReaderConfig) + if rf, ok := ret.Get(0).(func(context.Context, []byte) (types.ContractWriter, error)); ok { + return rf(_a0, contractWriterConfig) } - if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ContractReader); ok { - r0 = rf(_a0, contractReaderConfig) + if rf, ok := ret.Get(0).(func(context.Context, []byte) types.ContractWriter); ok { + r0 = rf(_a0, contractWriterConfig) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(types.ContractReader) + r0 = ret.Get(0).(types.ContractWriter) } } if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { - r1 = rf(_a0, contractReaderConfig) + r1 = rf(_a0, contractWriterConfig) } else { r1 = ret.Error(1) } @@ -306,31 +306,31 @@ func (_m *Relayer) NewContractReader(_a0 context.Context, contractReaderConfig [ return r0, r1 } -// Relayer_NewContractReader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewContractReader' -type Relayer_NewContractReader_Call struct { +// Relayer_NewContractWriter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NewContractWriter' +type Relayer_NewContractWriter_Call struct { *mock.Call } -// NewContractReader is a helper method to define mock.On call +// NewContractWriter is a helper method to define mock.On call // - _a0 context.Context -// - contractReaderConfig []byte -func (_e *Relayer_Expecter) NewContractReader(_a0 interface{}, contractReaderConfig interface{}) *Relayer_NewContractReader_Call { - return &Relayer_NewContractReader_Call{Call: _e.mock.On("NewContractReader", _a0, contractReaderConfig)} +// - contractWriterConfig []byte +func (_e *Relayer_Expecter) NewContractWriter(_a0 interface{}, contractWriterConfig interface{}) *Relayer_NewContractWriter_Call { + return &Relayer_NewContractWriter_Call{Call: _e.mock.On("NewContractWriter", _a0, contractWriterConfig)} } -func (_c *Relayer_NewContractReader_Call) Run(run func(_a0 context.Context, contractReaderConfig []byte)) *Relayer_NewContractReader_Call { +func (_c *Relayer_NewContractWriter_Call) Run(run func(_a0 context.Context, contractWriterConfig []byte)) *Relayer_NewContractWriter_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].([]byte)) }) return _c } -func (_c *Relayer_NewContractReader_Call) Return(_a0 types.ContractReader, _a1 error) *Relayer_NewContractReader_Call { +func (_c *Relayer_NewContractWriter_Call) Return(_a0 types.ContractWriter, _a1 error) *Relayer_NewContractWriter_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Relayer_NewContractReader_Call) RunAndReturn(run func(context.Context, []byte) (types.ContractReader, error)) *Relayer_NewContractReader_Call { +func (_c *Relayer_NewContractWriter_Call) RunAndReturn(run func(context.Context, []byte) (types.ContractWriter, error)) *Relayer_NewContractWriter_Call { _c.Call.Return(run) return _c } diff --git a/pkg/types/core/relayerset.go b/pkg/types/core/relayerset.go index bb1b84d45..3cc92ff39 100644 --- a/pkg/types/core/relayerset.go +++ b/pkg/types/core/relayerset.go @@ -31,6 +31,6 @@ type Relayer interface { services.Service NewPluginProvider(context.Context, RelayArgs, PluginArgs) (types.PluginProvider, error) NewContractReader(_ context.Context, contractReaderConfig []byte) (types.ContractReader, error) - NewChainWriter(_ context.Context, chainWriterConfig []byte) (types.ChainWriter, error) + NewContractWriter(_ context.Context, contractWriterConfig []byte) (types.ContractWriter, error) LatestHead(context.Context) (types.Head, error) } diff --git a/pkg/types/example_codec_test.go b/pkg/types/example_codec_test.go new file mode 100644 index 000000000..94dec8155 --- /dev/null +++ b/pkg/types/example_codec_test.go @@ -0,0 +1,46 @@ +package types_test + +import ( + "context" + "fmt" + "math/big" + + "github.com/smartcontractkit/chainlink-common/pkg/codec/encodings" + "github.com/smartcontractkit/chainlink-common/pkg/codec/encodings/binary" +) + +// ExampleCodec provides a minimal example of constructing and using a codec. +func ExampleCodec() { + ctx := context.Background() + typeCodec, _ := binary.BigEndian().BigInt(32, true) + + // start with empty encoded bytes + encodedBytes := []byte{} + originalValue := big.NewInt(42) + + encodedBytes, _ = typeCodec.Encode(originalValue, encodedBytes) // new encoded bytes are appended to existing + value, _, _ := typeCodec.Decode(encodedBytes) + + // originalValue is the same as value + fmt.Printf("%+v == %+v\n", originalValue, value) + + // TopLevelCodec is a TypeCodec that has a total size of all encoded elements + tlCodec, _ := encodings.NewStructCodec([]encodings.NamedTypeCodec{{Name: "Value", Codec: typeCodec}}) + codec := encodings.CodecFromTypeCodec{"SomeStruct": tlCodec} + + type SomeStruct struct { + Value *big.Int + } + + originalStruct := SomeStruct{Value: big.NewInt(42)} + encodedStructBytes, _ := codec.Encode(ctx, originalStruct, "SomeStruct") + + var someStruct SomeStruct + _ = codec.Decode(ctx, encodedStructBytes, &someStruct, "SomeStruct") + + decodedStruct, _ := codec.CreateType("SomeStruct", false) + _ = codec.Decode(ctx, encodedStructBytes, &decodedStruct, "SomeStruct") + + // encoded struct is equal to decoded struct using defined type and/or CreateType + fmt.Printf("%+v == %+v == %+v\n", originalStruct, someStruct, decodedStruct) +} diff --git a/pkg/types/interfacetests/chain_components_interface_tests.go b/pkg/types/interfacetests/chain_components_interface_tests.go index 06e622ee4..c65f9b6ff 100644 --- a/pkg/types/interfacetests/chain_components_interface_tests.go +++ b/pkg/types/interfacetests/chain_components_interface_tests.go @@ -16,18 +16,71 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/values" ) +// GetLatestValue method +const ( + ContractReaderGetLatestValueAsValuesDotValue = "Gets the latest value as a values.Value" + ContractReaderGetLatestValueNoArgumentsAndPrimitiveReturnAsValuesDotValue = "Get latest value without arguments and with primitive return as a values.Value" + ContractReaderGetLatestValueNoArgumentsAndSliceReturnAsValueDotValue = "Get latest value without arguments and with slice return as a values.Value" + ContractReaderGetLatestValue = "Gets the latest value" + ContractReaderGetLatestValueWithHeadData = "Gets the latest value with head data" + ContractReaderGetLatestValueWithPrimitiveReturn = "Get latest value without arguments and with primitive return" + ContractReaderGetLatestValueBasedOnConfidenceLevel = "Get latest value based on confidence level" + ContractReaderGetLatestValueFromMultipleContractsNamesSameFunction = "Get latest value allows multiple contract names to have the same function " + ContractReaderGetLatestValueWithModifiersUsingOwnMapstrctureOverrides = "Get latest value wraps config with modifiers using its own mapstructure overrides" + ContractReaderGetLatestValueNoArgumentsAndSliceReturn = "Get latest value without arguments and with slice return" +) + +// GetLatestValue event +const ( + ContractReaderGetLatestValueGetsLatestForEvent = "Get latest value gets latest event" + ContractReaderGetLatestValueBasedOnConfidenceLevelForEvent = "Get latest event based on provided confidence level" + ContractReaderGetLatestValueReturnsNotFoundWhenNotTriggeredForEvent = "Get latest value returns not found if event was never triggered" + ContractReaderGetLatestValueWithFilteringForEvent = "Get latest value gets latest event with filtering" +) + +// BatchGet +const ( + ContractReaderBatchGetLatestValue = "BatchGetLatestValues works" + ContractReaderBatchGetLatestValueNoArgumentsPrimitiveReturn = "BatchGetLatestValues works without arguments and with primitive return" + ContractReaderBatchGetLatestValueMultipleContractNamesSameFunction = "BatchGetLatestValues allows multiple contract names to have the same function Name" + ContractReaderBatchGetLatestValueNoArgumentsWithSliceReturn = "BatchGetLatestValue without arguments and with slice return" + ContractReaderBatchGetLatestValueWithModifiersOwnMapstructureOverride = "BatchGetLatestValues wraps config with modifiers using its own mapstructure overrides" + ContractReaderBatchGetLatestValueDifferentParamsResultsRetainOrder = "BatchGetLatestValues supports same read with different params and results retain order from request" + ContractReaderBatchGetLatestValueDifferentParamsResultsRetainOrderMultipleContracts = "BatchGetLatestValues supports same read with different params and results retain order from request even with multiple contracts" + ContractReaderBatchGetLatestValueSetsErrorsProperly = "BatchGetLatestValues sets errors properly" +) + +// Query key +const ( + ContractReaderQueryKeyNotFound = "QueryKey returns not found if sequence never happened" + ContractReaderQueryKeyReturnsData = "QueryKey returns sequence data properly" + ContractReaderQueryKeyReturnsDataAsValuesDotValue = "QueryKey returns sequence data properly as values.Value" + ContractReaderQueryKeyCanFilterWithValueComparator = "QueryKey can filter data with value comparator" + ContractReaderQueryKeyCanLimitResultsWithCursor = "QueryKey can limit results with cursor" +) + +// Query keys +const ( + ContractReaderQueryKeysReturnsDataTwoEventTypes = "QueryKeys returns sequence data properly for two event types" + ContractReaderQueryKeysNotFound = "QueryKeys returns not found if sequence never happened" + ContractReaderQueryKeysReturnsData = "QueryKeys returns sequence data properly" + ContractReaderQueryKeysReturnsDataAsValuesDotValue = "QueryKeys returns sequence data properly as values.Value" + ContractReaderQueryKeysCanFilterWithValueComparator = "QueryKeys can filter data with value comparator" + ContractReaderQueryKeysCanLimitResultsWithCursor = "QueryKeys can limit results with cursor" +) + type ChainComponentsInterfaceTester[T TestingT[T]] interface { BasicTester[T] GetContractReader(t T) types.ContractReader - GetChainWriter(t T) types.ChainWriter + GetContractWriter(t T) types.ContractWriter GetBindings(t T) []types.BoundContract // DirtyContracts signals to the underlying tester than the test contracts are dirty, i.e. the state has been changed such that // new, fresh contracts should be deployed. This usually happens after a value is written to the contract via - // the ChainWriter. + // the ContractWriter. DirtyContracts() MaxWaitTimeForEvents() time.Duration // GenerateBlocksTillConfidenceLevel is only used by the internal common tests, all other tests can/should - // rely on the ChainWriter waiting for actual blocks to be mined. + // rely on the ContractWriter waiting for actual blocks to be mined. GenerateBlocksTillConfidenceLevel(t T, contractName, readName string, confidenceLevel primitives.ConfidenceLevel) } @@ -42,6 +95,8 @@ const ( MethodSettingStruct = "addTestStruct" MethodSettingUint64 = "setAlterablePrimitiveValue" MethodTriggeringEvent = "triggerEvent" + MethodTriggeringEventWithDynamicTopic = "triggerEventWithDynamicTopic" + DynamicTopicEventName = "TriggeredEventWithDynamicTopic" EventName = "SomeEvent" EventNameField = EventName + ".Field" ProtoTest = "ProtoTest" @@ -56,32 +111,370 @@ var AnySliceToReadWithoutAnArgument = []uint64{3, 4} const AnyExtraValue = 3 -func RunContractReaderInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool) { - t.Run("GetLatestValue for "+tester.Name(), func(t T) { runContractReaderGetLatestValueInterfaceTests(t, tester, mockRun) }) - t.Run("BatchGetLatestValues for "+tester.Name(), func(t T) { runContractReaderBatchGetLatestValuesInterfaceTests(t, tester, mockRun) }) - t.Run("QueryKey for "+tester.Name(), func(t T) { runQueryKeyInterfaceTests(t, tester) }) +func RunContractReaderInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool, parallel bool) { + t.Run(tester.Name(), func(t T) { + t.Run("GetLatestValue", func(t T) { runContractReaderGetLatestValueInterfaceTests(t, tester, mockRun, parallel) }) + t.Run("BatchGetLatestValues", func(t T) { runContractReaderBatchGetLatestValuesInterfaceTests(t, tester, mockRun, parallel) }) + t.Run("QueryKey", func(t T) { runQueryKeyInterfaceTests(t, tester, parallel) }) + t.Run("QueryKeys", func(t T) { runQueryKeysInterfaceTests(t, tester, parallel) }) + }) +} + +type SomeDynamicTopicEvent struct { + Field string } -func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool) { - tests := []testcase[T]{ +type sequenceWithKey struct { + types.Sequence + Key string +} + +func runQueryKeysInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], parallel bool) { + tests := []Testcase[T]{ { - name: "Gets the latest value as a values.Value", - test: func(t T) { + Name: ContractReaderQueryKeysReturnsDataTwoEventTypes, + Test: func(t T) { ctx := tests.Context(t) - firstItem := CreateTestStruct(0, tester) + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) - contracts := tester.GetBindings(t) - _ = SubmitTransactionToCW(t, tester, MethodSettingStruct, firstItem, contracts[0], types.Unconfirmed) + bindings := tester.GetBindings(t) - secondItem := CreateTestStruct(1, tester) + require.NoError(t, cr.Bind(ctx, bindings)) + boundContract := BindingsByName(bindings, AnyContractName)[0] + + expectedSequenceData := createMixedEventTypeSequence(t, tester, cw, boundContract) + + ts := &TestStruct{} + require.Eventually(t, func() bool { + contractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: EventName}, + SequenceDataType: ts, + } + + ds := SomeDynamicTopicEvent{} + secondContractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: DynamicTopicEventName}, + SequenceDataType: &ds, + } + + sequencesIter, err := cr.QueryKeys(ctx, []types.ContractKeyFilter{secondContractFilter, contractFilter}, query.LimitAndSort{}) + if err != nil { + return false + } + + sequences := make([]sequenceWithKey, 0) + for k, s := range sequencesIter { + sequences = append(sequences, sequenceWithKey{Sequence: s, Key: k}) + } + + return sequenceDataEqual(expectedSequenceData, sequences) + }, tester.MaxWaitTimeForEvents(), time.Millisecond*10) + }, + }, + + { + Name: ContractReaderQueryKeysNotFound, + Test: func(t T) { + ctx := tests.Context(t) + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) + bound := BindingsByName(bindings, AnyContractName)[0] + + require.NoError(t, cr.Bind(ctx, bindings)) + + contractFilter := types.ContractKeyFilter{ + Contract: bound, + KeyFilter: query.KeyFilter{Key: EventName}, + SequenceDataType: &TestStruct{}, + } + + logsIter, err := cr.QueryKeys(ctx, []types.ContractKeyFilter{contractFilter}, query.LimitAndSort{}) + require.NoError(t, err) + var logs []types.Sequence + for _, log := range logsIter { + logs = append(logs, log) + } + assert.Len(t, logs, 0) + }, + }, + + { + Name: ContractReaderQueryKeysReturnsDataAsValuesDotValue, + Test: func(t T) { + ctx := tests.Context(t) + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + bindings := tester.GetBindings(t) + + require.NoError(t, cr.Bind(ctx, bindings)) + bound := BindingsByName(bindings, AnyContractName)[0] - _ = SubmitTransactionToCW(t, tester, MethodSettingStruct, secondItem, contracts[0], types.Unconfirmed) + expectedSequenceData := createMixedEventTypeSequence(t, tester, cw, bound) + var value values.Value + require.Eventually(t, func() bool { + contractFilter := types.ContractKeyFilter{ + Contract: bound, + KeyFilter: query.KeyFilter{Key: EventName}, + SequenceDataType: &value, + } + + secondContractFilter := types.ContractKeyFilter{ + Contract: bound, + KeyFilter: query.KeyFilter{Key: DynamicTopicEventName}, + SequenceDataType: &value, + } + + sequencesIter, err := cr.QueryKeys(ctx, []types.ContractKeyFilter{contractFilter, secondContractFilter}, query.LimitAndSort{}) + if err != nil { + return false + } + + sequences := make([]sequenceWithKey, 0) + for k, s := range sequencesIter { + sequences = append(sequences, sequenceWithKey{Sequence: s, Key: k}) + } + + if len(expectedSequenceData) != len(sequences) { + return false + } + + for i, sequence := range sequences { + switch sequence.Key { + case EventName: + val := *sequences[i].Data.(*values.Value) + ts := TestStruct{} + err = val.UnwrapTo(&ts) + require.NoError(t, err) + assert.Equal(t, expectedSequenceData[i], &ts) + case DynamicTopicEventName: + val := *sequences[i].Data.(*values.Value) + ds := SomeDynamicTopicEvent{} + err = val.UnwrapTo(&ds) + require.NoError(t, err) + assert.Equal(t, expectedSequenceData[i], &ds) + default: + return false + } + } + + return true + }, tester.MaxWaitTimeForEvents(), time.Millisecond*10) + }, + }, + + { + Name: ContractReaderQueryKeysCanFilterWithValueComparator, + Test: func(t T) { + ctx := tests.Context(t) + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + + bindings := tester.GetBindings(t) + + require.NoError(t, cr.Bind(ctx, bindings)) + boundContract := BindingsByName(bindings, AnyContractName)[0] + + expectedSequenceData := createMixedEventTypeSequence(t, tester, cw, boundContract) + fmt.Println("expectedSequenceData", expectedSequenceData) + + ts := &TestStruct{} + require.Eventually(t, func() bool { + contractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: EventName, + Expressions: []query.Expression{ + query.Comparator("Field", + primitives.ValueComparator{ + Value: 2, + Operator: primitives.Gte, + }, + primitives.ValueComparator{ + Value: 3, + Operator: primitives.Lte, + }), + }}, + SequenceDataType: ts, + } + + ds := SomeDynamicTopicEvent{} + secondContractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: DynamicTopicEventName}, + SequenceDataType: &ds, + } + + sequencesIter, err := cr.QueryKeys(ctx, []types.ContractKeyFilter{contractFilter, secondContractFilter}, query.LimitAndSort{}) + if err != nil { + return false + } + + sequences := make([]sequenceWithKey, 0) + for k, s := range sequencesIter { + sequences = append(sequences, sequenceWithKey{Sequence: s, Key: k}) + } + + expectedSequenceData = expectedSequenceData[2:] + return sequenceDataEqual(expectedSequenceData, sequences) + }, tester.MaxWaitTimeForEvents(), time.Millisecond*500) + }, + }, + { + Name: ContractReaderQueryKeysCanLimitResultsWithCursor, + Test: func(t T) { + ctx := tests.Context(t) cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) - bound := BindingsByName(bindings, AnyContractName)[0] // minimum of one bound contract expected, otherwise panics require.NoError(t, cr.Bind(ctx, bindings)) + boundContract := BindingsByName(bindings, AnyContractName)[0] + + var expectedSequenceData []any + + ts1 := CreateTestStruct[T](0, tester) + expectedSequenceData = append(expectedSequenceData, &ts1) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) + ts2 := CreateTestStruct[T](1, tester) + expectedSequenceData = append(expectedSequenceData, &ts2) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) + + ds1 := SomeDynamicTopicEvent{Field: "1"} + expectedSequenceData = append(expectedSequenceData, &ds1) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEventWithDynamicTopic, ds1, boundContract, types.Unconfirmed) + + ts3 := CreateTestStruct[T](2, tester) + expectedSequenceData = append(expectedSequenceData, &ts3) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts3, boundContract, types.Unconfirmed) + + ds2 := SomeDynamicTopicEvent{Field: "2"} + expectedSequenceData = append(expectedSequenceData, &ds2) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEventWithDynamicTopic, ds2, boundContract, types.Unconfirmed) + + ts4 := CreateTestStruct[T](3, tester) + expectedSequenceData = append(expectedSequenceData, &ts4) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts4, boundContract, types.Finalized) + + require.Eventually(t, func() bool { + var allSequences []sequenceWithKey + contractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: EventName, Expressions: []query.Expression{ + query.Confidence(primitives.Finalized), + }}, + SequenceDataType: &TestStruct{}, + } + + ds := SomeDynamicTopicEvent{} + secondContractFilter := types.ContractKeyFilter{ + Contract: boundContract, + KeyFilter: query.KeyFilter{Key: DynamicTopicEventName, Expressions: []query.Expression{ + query.Confidence(primitives.Finalized), + }}, + SequenceDataType: &ds, + } + + limit := query.LimitAndSort{ + SortBy: []query.SortBy{query.NewSortBySequence(query.Asc)}, + Limit: query.CountLimit(3), + } + + for idx := 0; idx < len(expectedSequenceData)/2; idx++ { + // sequences from queryKey without limit and sort should be in descending order + sequencesIter, err := cr.QueryKeys(ctx, []types.ContractKeyFilter{secondContractFilter, contractFilter}, limit) + require.NoError(t, err) + + sequences := make([]sequenceWithKey, 0) + for k, s := range sequencesIter { + sequences = append(sequences, sequenceWithKey{Sequence: s, Key: k}) + } + + if len(sequences) == 0 { + continue + } + + limit.Limit = query.CursorLimit(sequences[len(sequences)-1].Cursor, query.CursorFollowing, 3) + allSequences = append(allSequences, sequences...) + } + + return sequenceDataEqual(expectedSequenceData, allSequences) + }, tester.MaxWaitTimeForEvents(), 500*time.Millisecond) + }, + }, + } + + if parallel { + RunTestsInParallel(t, tester, tests) + } else { + RunTests(t, tester, tests) + } +} + +func createMixedEventTypeSequence[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], cw types.ContractWriter, boundContract types.BoundContract) []any { + var expectedSequenceData []any + + ts1 := CreateTestStruct[T](0, tester) + expectedSequenceData = append(expectedSequenceData, &ts1) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) + ts2 := CreateTestStruct[T](1, tester) + expectedSequenceData = append(expectedSequenceData, &ts2) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) + + ds1 := SomeDynamicTopicEvent{Field: "1"} + expectedSequenceData = append(expectedSequenceData, &ds1) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEventWithDynamicTopic, ds1, boundContract, types.Unconfirmed) + + ts3 := CreateTestStruct[T](2, tester) + expectedSequenceData = append(expectedSequenceData, &ts3) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts3, boundContract, types.Unconfirmed) + + ds2 := SomeDynamicTopicEvent{Field: "2"} + expectedSequenceData = append(expectedSequenceData, &ds2) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEventWithDynamicTopic, ds2, boundContract, types.Unconfirmed) + + ts4 := CreateTestStruct[T](3, tester) + expectedSequenceData = append(expectedSequenceData, &ts4) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts4, boundContract, types.Unconfirmed) + return expectedSequenceData +} + +func sequenceDataEqual(expectedSequenceData []any, sequences []sequenceWithKey) bool { + if len(expectedSequenceData) != len(sequences) { + return false + } + + for i, sequence := range sequences { + if !reflect.DeepEqual(sequence.Data, expectedSequenceData[i]) { + return false + } + } + + return true +} + +func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool, parallel bool) { + tests := []Testcase[T]{ + { + Name: ContractReaderGetLatestValueAsValuesDotValue, + Test: func(t T) { + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + contracts := tester.GetBindings(t) + ctx := tests.Context(t) + firstItem := CreateTestStruct(0, tester) + + _ = SubmitTransactionToCW(t, tester, cw, MethodSettingStruct, firstItem, contracts[0], types.Unconfirmed) + + secondItem := CreateTestStruct(1, tester) + + _ = SubmitTransactionToCW(t, tester, cw, MethodSettingStruct, secondItem, contracts[0], types.Unconfirmed) + + bound := BindingsByName(contracts, AnyContractName)[0] // minimum of one bound contract expected, otherwise panics + + require.NoError(t, cr.Bind(ctx, contracts)) params := &LatestParams{I: 1} var value values.Value @@ -106,11 +499,11 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, { - name: "Get latest value without arguments and with primitive return as a values.Value", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueNoArgumentsAndPrimitiveReturnAsValuesDotValue, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -127,11 +520,11 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value without arguments and with slice return as a values.Value", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueNoArgumentsAndSliceReturnAsValueDotValue, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -147,23 +540,23 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Gets the latest value", - test: func(t T) { + Name: ContractReaderGetLatestValue, + Test: func(t T) { + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + contracts := tester.GetBindings(t) ctx := tests.Context(t) firstItem := CreateTestStruct(0, tester) - contracts := tester.GetBindings(t) - _ = SubmitTransactionToCW(t, tester, MethodSettingStruct, firstItem, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodSettingStruct, firstItem, contracts[0], types.Unconfirmed) secondItem := CreateTestStruct(1, tester) - _ = SubmitTransactionToCW(t, tester, MethodSettingStruct, secondItem, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodSettingStruct, secondItem, contracts[0], types.Unconfirmed) - cr := tester.GetContractReader(t) - bindings := tester.GetBindings(t) - bound := BindingsByName(bindings, AnyContractName)[0] // minimum of one bound contract expected, otherwise panics + bound := BindingsByName(contracts, AnyContractName)[0] // minimum of one bound contract expected, otherwise panics - require.NoError(t, cr.Bind(ctx, bindings)) + require.NoError(t, cr.Bind(ctx, contracts)) actual := &TestStruct{} params := &LatestParams{I: 1} @@ -177,11 +570,12 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value without arguments and with primitive return", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueWithPrimitiveReturn, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) + bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -193,11 +587,12 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value based on confidence level", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueBasedOnConfidenceLevel, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) require.NoError(t, cr.Bind(ctx, bindings)) @@ -210,22 +605,18 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch ReturnVal: &returnVal1, } - contracts := tester.GetBindings(t) - - txID := SubmitTransactionToCW(t, tester, MethodSettingUint64, PrimitiveArgs{Value: 10}, contracts[0], types.Unconfirmed) + txID := SubmitTransactionToCW(t, tester, cw, MethodSettingUint64, PrimitiveArgs{Value: 10}, bindings[0], types.Unconfirmed) var prim1 uint64 bound := BindingsByName(bindings, callArgs.ContractName)[0] - require.Error(t, cr.GetLatestValue(ctx, bound.ReadIdentifier(callArgs.ReadName), primitives.Finalized, callArgs.Params, &prim1)) - - err := WaitForTransactionStatus(t, tester, txID, types.Finalized, mockRun) + err := WaitForTransactionStatus(t, tester, cw, txID, types.Finalized, mockRun) require.NoError(t, err) require.NoError(t, cr.GetLatestValue(ctx, bound.ReadIdentifier(MethodReturningAlterableUint64), primitives.Finalized, nil, &prim1)) assert.Equal(t, uint64(10), prim1) - _ = SubmitTransactionToCW(t, tester, MethodSettingUint64, PrimitiveArgs{Value: 20}, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodSettingUint64, PrimitiveArgs{Value: 20}, bindings[0], types.Unconfirmed) var prim2 uint64 require.NoError(t, cr.GetLatestValue(ctx, bound.ReadIdentifier(callArgs.ReadName), callArgs.ConfidenceLevel, callArgs.Params, &prim2)) @@ -233,11 +624,11 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value allows multiple contract names to have the same function name", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueFromMultipleContractsNamesSameFunction, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnySecondContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -249,11 +640,11 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value without arguments and with slice return", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueNoArgumentsAndSliceReturn, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -265,17 +656,18 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value wraps config with modifiers using its own mapstructure overrides", - test: func(t T) { + Name: ContractReaderGetLatestValueWithModifiersUsingOwnMapstrctureOverrides, + Test: func(t T) { + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) + ctx := tests.Context(t) testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) actual := &TestStructWithExtraField{} @@ -290,21 +682,21 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value gets latest event", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueGetsLatestForEvent, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) - contracts := tester.GetBindings(t) ts := CreateTestStruct[T](0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts, bindings[0], types.Unconfirmed) ts = CreateTestStruct[T](1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts, bindings[0], types.Unconfirmed) result := &TestStruct{} require.Eventually(t, func() bool { @@ -314,29 +706,26 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest event based on provided confidence level", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueBasedOnConfidenceLevelForEvent, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) ts1 := CreateTestStruct[T](2, tester) - txID := SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1, bindings[0], types.Unconfirmed) + txID := SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, bindings[0], types.Unconfirmed) result := &TestStruct{} - require.Eventually(t, func() bool { - err := cr.GetLatestValue(ctx, bound.ReadIdentifier(EventName), primitives.Finalized, nil, &result) - return err != nil && assert.ErrorContains(t, err, types.ErrNotFound.Error()) - }, tester.MaxWaitTimeForEvents(), time.Millisecond*10) - err := WaitForTransactionStatus(t, tester, txID, types.Finalized, mockRun) + err := WaitForTransactionStatus(t, tester, cw, txID, types.Finalized, mockRun) require.NoError(t, err) ts2 := CreateTestStruct[T](3, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2, bindings[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, bindings[0], types.Unconfirmed) require.Eventually(t, func() bool { err := cr.GetLatestValue(ctx, bound.ReadIdentifier(EventName), primitives.Finalized, nil, &result) @@ -350,11 +739,11 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value returns not found if event was never triggered", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueReturnsNotFoundWhenNotTriggeredForEvent, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) @@ -365,20 +754,21 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, { - name: "Get latest value gets latest event with filtering", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderGetLatestValueWithFilteringForEvent, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) + bound := BindingsByName(bindings, AnyContractName)[0] require.NoError(t, cr.Bind(ctx, bindings)) ts0 := CreateTestStruct(0, tester) - contracts := tester.GetBindings(t) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts0, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts0, bindings[0], types.Unconfirmed) ts1 := CreateTestStruct(1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1, contracts[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, bindings[0], types.Unconfirmed) filterParams := &FilterEventParams{Field: *ts0.Field} assert.Never(t, func() bool { @@ -396,22 +786,28 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch }, }, } - runTests(t, tester, tests) + if parallel { + RunTestsInParallel(t, tester, tests) + } else { + RunTests(t, tester, tests) + } } -func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool) { - testCases := []testcase[T]{ +func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], mockRun bool, parallel bool) { + testCases := []Testcase[T]{ { - name: "BatchGetLatestValues works", - test: func(t T) { + Name: ContractReaderBatchGetLatestValue, + Test: func(t T) { + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + bindings := tester.GetBindings(t) // setup test data firstItem := CreateTestStruct(1, tester) - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] batchCallEntry := make(BatchCallEntry) batchCallEntry[bound] = ContractBatchEntry{{Name: MethodTakingLatestParamsReturningTestStruct, ReturnValue: &firstItem}} - batchChainWrite(t, tester, batchCallEntry, mockRun) + batchContractWrite(t, tester, cw, bindings, batchCallEntry, mockRun) // setup call data params, actual := &LatestParams{I: 1}, &TestStruct{} @@ -425,7 +821,6 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes } ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) @@ -439,12 +834,13 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues works without arguments and with primitive return", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueNoArgumentsPrimitiveReturn, + Test: func(t T) { + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) // setup call data var primitiveReturnValue uint64 batchGetLatestValuesRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] batchGetLatestValuesRequest[bound] = []types.BatchRead{ @@ -456,7 +852,6 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes } ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValuesRequest) @@ -470,11 +865,13 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues allows multiple contract names to have the same function Name", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueMultipleContractNamesSameFunction, + Test: func(t T) { + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) + var primitiveReturnValueAnyContract, primitiveReturnValueAnySecondContract uint64 batchGetLatestValuesRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound1 := BindingsByName(bindings, AnyContractName)[0] bound2 := BindingsByName(bindings, AnySecondContractName)[0] @@ -482,7 +879,6 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes batchGetLatestValuesRequest[bound2] = []types.BatchRead{{ReadName: MethodReturningUint64, Params: nil, ReturnVal: &primitiveReturnValueAnySecondContract}} ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValuesRequest) @@ -500,18 +896,18 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValue without arguments and with slice return", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueNoArgumentsWithSliceReturn, + Test: func(t T) { + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) // setup call data var sliceReturnValue []uint64 batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] batchGetLatestValueRequest[bound] = []types.BatchRead{{ReadName: MethodReturningUint64Slice, Params: nil, ReturnVal: &sliceReturnValue}} ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) require.NoError(t, err) @@ -524,21 +920,21 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues wraps config with modifiers using its own mapstructure overrides", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueWithModifiersOwnMapstructureOverride, + Test: func(t T) { + cr := tester.GetContractReader(t) + bindings := tester.GetBindings(t) // setup call data testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil actual := &TestStructWithExtraField{} batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] batchGetLatestValueRequest[bound] = []types.BatchRead{{ReadName: MethodReturningSeenStruct, Params: testStruct, ReturnVal: actual}} ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) require.NoError(t, err) @@ -556,11 +952,13 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues supports same read with different params and results retain order from request", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueDifferentParamsResultsRetainOrder, + Test: func(t T) { + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + bindings := tester.GetBindings(t) batchCallEntry := make(BatchCallEntry) batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] for i := 0; i < 10; i++ { @@ -573,10 +971,9 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes types.BatchRead{ReadName: MethodTakingLatestParamsReturningTestStruct, Params: &LatestParams{I: 1 + i}, ReturnVal: &TestStruct{}}, ) } - batchChainWrite(t, tester, batchCallEntry, mockRun) + batchContractWrite(t, tester, cw, bindings, batchCallEntry, mockRun) ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) @@ -592,11 +989,13 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues supports same read with different params and results retain order from request even with multiple contracts", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueDifferentParamsResultsRetainOrderMultipleContracts, + Test: func(t T) { + cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) + bindings := tester.GetBindings(t) batchCallEntry := make(BatchCallEntry) batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) - bindings := tester.GetBindings(t) bound1 := BindingsByName(bindings, AnyContractName)[0] bound2 := BindingsByName(bindings, AnySecondContractName)[0] @@ -609,10 +1008,9 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes batchGetLatestValueRequest[bound1] = append(batchGetLatestValueRequest[bound1], types.BatchRead{ReadName: MethodTakingLatestParamsReturningTestStruct, Params: &LatestParams{I: 1 + i}, ReturnVal: &TestStruct{}}) batchGetLatestValueRequest[bound2] = append(batchGetLatestValueRequest[bound2], types.BatchRead{ReadName: MethodTakingLatestParamsReturningTestStruct, Params: &LatestParams{I: 1 + i}, ReturnVal: &TestStruct{}}) } - batchChainWrite(t, tester, batchCallEntry, mockRun) + batchContractWrite(t, tester, cw, bindings, batchCallEntry, mockRun) ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) @@ -640,9 +1038,10 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, { - name: "BatchGetLatestValues sets errors properly", - test: func(t T) { + Name: ContractReaderBatchGetLatestValueSetsErrorsProperly, + Test: func(t T) { batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) + cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) bound1 := BindingsByName(bindings, AnyContractName)[0] bound2 := BindingsByName(bindings, AnySecondContractName)[0] @@ -654,7 +1053,6 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes } ctx := tests.Context(t) - cr := tester.GetContractReader(t) require.NoError(t, cr.Bind(ctx, bindings)) result, err := cr.BatchGetLatestValues(ctx, batchGetLatestValueRequest) @@ -674,20 +1072,24 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes }, }, } - runTests(t, tester, testCases) + if parallel { + RunTestsInParallel(t, tester, testCases) + } else { + RunTests(t, tester, testCases) + } } -func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T]) { - tests := []testcase[T]{ +func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], parallel bool) { + tests := []Testcase[T]{ { - name: "QueryKey returns not found if sequence never happened", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderQueryKeyNotFound, + Test: func(t T) { cr := tester.GetContractReader(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) bound := BindingsByName(bindings, AnyContractName)[0] - require.NoError(t, cr.Bind(ctx, tester.GetBindings(t))) + require.NoError(t, cr.Bind(ctx, bindings)) logs, err := cr.QueryKey(ctx, bound, query.KeyFilter{Key: EventName}, query.LimitAndSort{}, &TestStruct{}) @@ -696,19 +1098,20 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa }, }, { - name: "QueryKey returns sequence data properly", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderQueryKeyReturnsData, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) require.NoError(t, cr.Bind(ctx, bindings)) boundContract := BindingsByName(bindings, AnyContractName)[0] ts1 := CreateTestStruct[T](0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) ts2 := CreateTestStruct[T](1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) ts := &TestStruct{} require.Eventually(t, func() bool { @@ -719,19 +1122,20 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa }, }, { - name: "QueryKey returns sequence data properly as values.Value", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderQueryKeyReturnsDataAsValuesDotValue, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) require.NoError(t, cr.Bind(ctx, bindings)) bound := BindingsByName(bindings, AnyContractName)[0] ts1 := CreateTestStruct[T](0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1, bindings[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, bindings[0], types.Unconfirmed) ts2 := CreateTestStruct[T](1, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2, bindings[0], types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, bindings[0], types.Unconfirmed) var value values.Value @@ -759,21 +1163,22 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa }, }, { - name: "QueryKey can filter data with value comparator", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderQueryKeyCanFilterWithValueComparator, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) require.NoError(t, cr.Bind(ctx, bindings)) boundContract := BindingsByName(bindings, AnyContractName)[0] ts1 := CreateTestStruct[T](0, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts1, boundContract, types.Unconfirmed) ts2 := CreateTestStruct[T](15, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts2, boundContract, types.Unconfirmed) ts3 := CreateTestStruct[T](35, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, ts3, boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, ts3, boundContract, types.Unconfirmed) ts := &TestStruct{} require.Eventually(t, func() bool { @@ -795,11 +1200,12 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa }, }, { - name: "QueryKey can limit results with cursor", - test: func(t T) { - ctx := tests.Context(t) + Name: ContractReaderQueryKeyCanLimitResultsWithCursor, + Test: func(t T) { cr := tester.GetContractReader(t) + cw := tester.GetContractWriter(t) bindings := tester.GetBindings(t) + ctx := tests.Context(t) require.NoError(t, cr.Bind(ctx, bindings)) boundContract := BindingsByName(bindings, AnyContractName)[0] @@ -811,7 +1217,7 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa for idx := range testStructs { testStructs[idx] = CreateTestStruct(idx*2, tester) - _ = SubmitTransactionToCW(t, tester, MethodTriggeringEvent, testStructs[idx], boundContract, types.Unconfirmed) + _ = SubmitTransactionToCW(t, tester, cw, MethodTriggeringEvent, testStructs[idx], boundContract, types.Unconfirmed) } require.Eventually(t, func() bool { @@ -847,7 +1253,11 @@ func runQueryKeyInterfaceTests[T TestingT[T]](t T, tester ChainComponentsInterfa }, } - runTests(t, tester, tests) + if parallel { + RunTestsInParallel(t, tester, tests) + } else { + RunTests(t, tester, tests) + } } func BindingsByName(bindings []types.BoundContract, name string) []types.BoundContract { diff --git a/pkg/types/interfacetests/codec_interface_fuzz_tests.go b/pkg/types/interfacetests/codec_interface_fuzz_tests.go index 2b8e2957a..38f845238 100644 --- a/pkg/types/interfacetests/codec_interface_fuzz_tests.go +++ b/pkg/types/interfacetests/codec_interface_fuzz_tests.go @@ -37,9 +37,12 @@ func RunCodecInterfaceFuzzTests(f *testing.F, tester CodecInterfaceTester) { DifferentField: differentField, OracleID: commontypes.OracleID(oracleId), OracleIDs: oids, - Account: tester.GetAccountBytes(accountSeed), - Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, - BigField: big.NewInt(bigField), + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(accountSeed), + AccountStr: tester.GetAccountString(accountSeed), + }, + Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, + BigField: big.NewInt(bigField), NestedDynamicStruct: MidLevelDynamicTestStruct{ FixedBytes: fb, Inner: InnerDynamicTestStruct{ diff --git a/pkg/types/interfacetests/codec_interface_tests.go b/pkg/types/interfacetests/codec_interface_tests.go index 3f3c125d2..9a90c7352 100644 --- a/pkg/types/interfacetests/codec_interface_tests.go +++ b/pkg/types/interfacetests/codec_interface_tests.go @@ -38,10 +38,10 @@ const ( ) func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { - tests := []testcase[*testing.T]{ + tests := []Testcase[*testing.T]{ { - name: "Encodes and decodes a single item", - test: func(t *testing.T) { + Name: "Encodes and decodes a single item", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} @@ -58,14 +58,14 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes compatible types", - test: func(t *testing.T) { + Name: "Encodes compatible types", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleItem := compatibleTestStruct{ - Account: item.Account, + AccountStruct: item.AccountStruct, Accounts: item.Accounts, BigField: item.BigField, DifferentField: item.DifferentField, @@ -87,14 +87,17 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes compatible maps", - test: func(t *testing.T) { + Name: "Encodes compatible maps", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleMap := map[string]any{ - "Account": item.Account, + "AccountStruct": map[string]any{ + "Account": item.AccountStruct.Account, + "AccountStr": item.AccountStruct.AccountStr, + }, "Accounts": item.Accounts, "BigField": item.BigField, "DifferentField": item.DifferentField, @@ -130,15 +133,15 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encode returns an error if a field is not provided", - test: func(t *testing.T) { + Name: "Encode returns an error if a field is not provided", + Test: func(t *testing.T) { ctx := tests.Context(t) ts := CreateTestStruct[*testing.T](0, tester) item := &TestStructMissingField{ DifferentField: ts.DifferentField, OracleID: ts.OracleID, OracleIDs: ts.OracleIDs, - Account: ts.Account, + AccountStruct: ts.AccountStruct, Accounts: ts.Accounts, BigField: ts.BigField, NestedDynamicStruct: ts.NestedDynamicStruct, @@ -151,8 +154,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes and decodes a slice", - test: func(t *testing.T) { + Name: "Encodes and decodes a slice", + Test: func(t *testing.T) { ctx := tests.Context(t) item1 := CreateTestStruct[*testing.T](0, tester) item2 := CreateTestStruct[*testing.T](1, tester) @@ -171,8 +174,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes and decodes a slices with one element", - test: func(t *testing.T) { + Name: "Encodes and decodes a slices with one element", + Test: func(t *testing.T) { ctx := tests.Context(t) item1 := CreateTestStruct[*testing.T](0, tester) items := []TestStruct{item1} @@ -191,8 +194,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes and decodes an array", - test: func(t *testing.T) { + Name: "Encodes and decodes an array", + Test: func(t *testing.T) { ctx := tests.Context(t) item1 := CreateTestStruct[*testing.T](0, tester) item2 := CreateTestStruct[*testing.T](1, tester) @@ -212,8 +215,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encodes and decodes an arrays with one element", - test: func(t *testing.T) { + Name: "Encodes and decodes an arrays with one element", + Test: func(t *testing.T) { ctx := tests.Context(t) item1 := CreateTestStruct[*testing.T](0, tester) items := [1]TestStruct{item1} @@ -232,8 +235,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Returns an error if type is undefined", - test: func(t *testing.T) { + Name: "Returns an error if type is undefined", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) codec := tester.GetCodec(t) @@ -246,8 +249,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Returns an error encoding if arrays are the too small to encode", - test: func(t *testing.T) { + Name: "Returns an error encoding if arrays are the too small to encode", + Test: func(t *testing.T) { ctx := tests.Context(t) if !tester.IncludeArrayEncodingSizeEnforcement() { return @@ -262,8 +265,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Returns an error encoding if arrays are the too large to encode", - test: func(t *testing.T) { + Name: "Returns an error encoding if arrays are the too large to encode", + Test: func(t *testing.T) { ctx := tests.Context(t) if !tester.IncludeArrayEncodingSizeEnforcement() { return @@ -279,8 +282,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "GetMaxEncodingSize returns errors for unknown types", - test: func(t *testing.T) { + Name: "GetMaxEncodingSize returns errors for unknown types", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) _, err := cr.GetMaxEncodingSize(ctx, 10, "not"+TestItemType) @@ -288,8 +291,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "GetMaxDecodingSize returns errors for unknown types", - test: func(t *testing.T) { + Name: "GetMaxDecodingSize returns errors for unknown types", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) _, err := cr.GetMaxDecodingSize(ctx, 10, "not"+TestItemType) @@ -297,8 +300,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Decode respects config", - test: func(t *testing.T) { + Name: "Decode respects config", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) original := CreateTestStruct[*testing.T](0, tester) @@ -316,13 +319,13 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encode respects config", - test: func(t *testing.T) { + Name: "Encode respects config", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) modified := CreateTestStruct[*testing.T](0, tester) modified.BigField = nil - modified.Account = nil + modified.AccountStruct.Account = nil actual, err := cr.Encode(ctx, modified, TestItemWithConfigExtra) require.NoError(t, err) @@ -334,8 +337,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encode allows nil params to be encoded, either as empty encoding or with prefix", - test: func(t *testing.T) { + Name: "Encode allows nil params to be encoded, either as empty encoding or with prefix", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) _, err := cr.Encode(ctx, nil, NilType) @@ -343,8 +346,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encode does not panic on nil field", - test: func(t *testing.T) { + Name: "Encode does not panic on nil field", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) nilArgs := &TestStruct{ @@ -352,7 +355,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { DifferentField: "", OracleID: 0, OracleIDs: [32]commontypes.OracleID{}, - Account: nil, + AccountStruct: AccountStruct{}, Accounts: nil, BigField: nil, NestedDynamicStruct: MidLevelDynamicTestStruct{}, @@ -363,8 +366,8 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, { - name: "Encode returns an error if the item isn't compatible", - test: func(t *testing.T) { + Name: "Encode returns an error if the item isn't compatible", + Test: func(t *testing.T) { ctx := tests.Context(t) cr := tester.GetCodec(t) notTestStruct := &MidLevelDynamicTestStruct{} @@ -373,7 +376,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { }, }, } - runTests(t, tester, tests) + RunTests(t, tester, tests) } // RunCodecWithStrictArgsInterfaceTest is meant to be used by codecs that don't pad @@ -383,10 +386,10 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { func RunCodecWithStrictArgsInterfaceTest(t *testing.T, tester CodecInterfaceTester) { RunCodecInterfaceTests(t, tester) - tests := []testcase[*testing.T]{ + tests := []Testcase[*testing.T]{ { - name: "Gives an error decoding extra fields on an item", - test: func(t *testing.T) { + Name: "Gives an error decoding extra fields on an item", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) req := &EncodeRequest{ @@ -401,8 +404,8 @@ func RunCodecWithStrictArgsInterfaceTest(t *testing.T, tester CodecInterfaceTest }, }, { - name: "Gives an error decoding missing fields on an item", - test: func(t *testing.T) { + Name: "Gives an error decoding missing fields on an item", + Test: func(t *testing.T) { ctx := tests.Context(t) item := CreateTestStruct[*testing.T](0, tester) req := &EncodeRequest{ @@ -417,8 +420,8 @@ func RunCodecWithStrictArgsInterfaceTest(t *testing.T, tester CodecInterfaceTest }, }, { - name: "Gives an error decoding extra fields on a slice", - test: func(t *testing.T) { + Name: "Gives an error decoding extra fields on a slice", + Test: func(t *testing.T) { ctx := tests.Context(t) items := []TestStruct{CreateTestStruct[*testing.T](0, tester)} req := &EncodeRequest{ @@ -433,8 +436,8 @@ func RunCodecWithStrictArgsInterfaceTest(t *testing.T, tester CodecInterfaceTest }, }, { - name: "Gives an error decoding missing fields on an slice", - test: func(t *testing.T) { + Name: "Gives an error decoding missing fields on an slice", + Test: func(t *testing.T) { ctx := tests.Context(t) items := []TestStruct{CreateTestStruct[*testing.T](0, tester)} req := &EncodeRequest{ @@ -450,5 +453,5 @@ func RunCodecWithStrictArgsInterfaceTest(t *testing.T, tester CodecInterfaceTest }, } - runTests(t, tester, tests) + RunTests(t, tester, tests) } diff --git a/pkg/types/interfacetests/utils.go b/pkg/types/interfacetests/utils.go index a9f009eb8..93c35c0fe 100644 --- a/pkg/types/interfacetests/utils.go +++ b/pkg/types/interfacetests/utils.go @@ -8,9 +8,10 @@ import ( "time" "github.com/google/uuid" - "github.com/smartcontractkit/libocr/commontypes" "github.com/stretchr/testify/require" + "github.com/smartcontractkit/libocr/commontypes" + "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink-common/pkg/types/query/primitives" "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" @@ -20,39 +21,78 @@ type BasicTester[T any] interface { Setup(t T) Name() string GetAccountBytes(i int) []byte + GetAccountString(i int) string + IsDisabled(testID string) bool + DisableTests(testIDs []string) +} + +type TestSelectionSupport struct { + disabledTests map[string]bool +} + +func (t TestSelectionSupport) IsDisabled(testID string) bool { + return t.disabledTests[testID] +} + +func (t *TestSelectionSupport) DisableTests(testIDs []string) { + if t.disabledTests == nil { + t.disabledTests = map[string]bool{} + } + for _, testID := range testIDs { + t.disabledTests[testID] = true + } } -type testcase[T any] struct { - name string - test func(t T) +type Testcase[T any] struct { + Name string + Test func(t T) } type TestingT[T any] interface { tests.TestingT Failed() bool Run(name string, f func(t T)) bool + Parallel() } -func runTests[T TestingT[T]](t T, tester BasicTester[T], tests []testcase[T]) { - for _, test := range tests { - t.Run(test.name+" for "+tester.Name(), func(t T) { - tester.Setup(t) - test.test(t) - }) - } +// Tests execution utility function that will consider enabled / disabled test cases according to +// Basic Tester configuration. +func RunTests[T TestingT[T]](t T, tester BasicTester[T], tests []Testcase[T]) { + t.Run(tester.Name(), func(t T) { + for _, test := range tests { + if !tester.IsDisabled(test.Name) { + t.Run(test.Name, func(t T) { + tester.Setup(t) + test.Test(t) + }) + } + } + }) +} + +func RunTestsInParallel[T TestingT[T]](t T, tester BasicTester[T], tests []Testcase[T]) { + // Assumes Setup() called on tester initialization to avoid race conditions on tester setup + t.Run(tester.Name(), func(t T) { + for _, test := range tests { + if !tester.IsDisabled(test.Name) { + t.Run(test.Name, func(t T) { + t.Parallel() + test.Test(t) + }) + } + } + }) } -// Batch chain write takes a batch call entry and writes it to the chain using the ChainWriter. -func batchChainWrite[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], batchCallEntry BatchCallEntry, mockRun bool) { +// Batch contract write takes a batch call entry and writes it to the chain using the ContractWriter. +func batchContractWrite[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], cw types.ContractWriter, boundContracts []types.BoundContract, batchCallEntry BatchCallEntry, mockRun bool) { // This is necessary because the mock helper function requires the entire batchCallEntry rather than an individual testStruct if mockRun { - cw := tester.GetChainWriter(t) - err := cw.SubmitTransaction(tests.Context(t), AnyContractName, "batchChainWrite", batchCallEntry, "", "", nil, big.NewInt(0)) + err := cw.SubmitTransaction(tests.Context(t), AnyContractName, "batchContractWrite", batchCallEntry, "", "", nil, big.NewInt(0)) require.NoError(t, err) return } nameToAddress := make(map[string]string) - boundContracts := tester.GetBindings(t) for _, bc := range boundContracts { nameToAddress[bc.Name] = bc.Address } @@ -65,27 +105,26 @@ func batchChainWrite[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T if !isOk { require.Fail(t, "expected *TestStruct for contract: %s read: %s, but received %T", contract.Name, readEntry.Name, readEntry.ReturnValue) } - SubmitTransactionToCW(t, tester, MethodSettingStruct, val, types.BoundContract{Name: contract.Name, Address: nameToAddress[contract.Name]}, types.Unconfirmed) + SubmitTransactionToCW(t, tester, cw, MethodSettingStruct, val, types.BoundContract{Name: contract.Name, Address: nameToAddress[contract.Name]}, types.Unconfirmed) } } } -// SubmitTransactionToCW submits a transaction to the ChainWriter and waits for it to reach the given status. -func SubmitTransactionToCW[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], method string, args any, contract types.BoundContract, status types.TransactionStatus) string { +// SubmitTransactionToCW submits a transaction to the ContractWriter and waits for it to reach the given status. +func SubmitTransactionToCW[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], cw types.ContractWriter, method string, args any, contract types.BoundContract, status types.TransactionStatus) string { tester.DirtyContracts() txID := uuid.New().String() - cw := tester.GetChainWriter(t) err := cw.SubmitTransaction(tests.Context(t), contract.Name, method, args, txID, contract.Address, nil, big.NewInt(0)) require.NoError(t, err) - err = WaitForTransactionStatus(t, tester, txID, status, false) + err = WaitForTransactionStatus(t, tester, cw, txID, status, false) require.NoError(t, err) return txID } // WaitForTransactionStatus waits for a transaction to reach the given status. -func WaitForTransactionStatus[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], txID string, status types.TransactionStatus, mockRun bool) error { +func WaitForTransactionStatus[T TestingT[T]](t T, tester ChainComponentsInterfaceTester[T], cw types.ContractWriter, txID string, status types.TransactionStatus, mockRun bool) error { ctx, cancel := context.WithTimeout(tests.Context(t), 15*time.Minute) defer cancel() @@ -101,7 +140,7 @@ func WaitForTransactionStatus[T TestingT[T]](t T, tester ChainComponentsInterfac tester.GenerateBlocksTillConfidenceLevel(t, "", "", primitives.Finalized) return nil } - current, err := tester.GetChainWriter(t).GetTransactionStatus(ctx, txID) + current, err := cw.GetTransactionStatus(ctx, txID) if err != nil { return fmt.Errorf("failed to get transaction status: %w", err) } @@ -152,11 +191,16 @@ type MidLevelStaticTestStruct struct { Inner InnerStaticTestStruct } +type AccountStruct struct { + Account []byte + AccountStr string +} + type TestStruct struct { Field *int32 OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte + AccountStruct AccountStruct Accounts [][]byte DifferentField string BigField *big.Int @@ -173,7 +217,7 @@ type TestStructMissingField struct { DifferentField string OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int NestedDynamicStruct MidLevelDynamicTestStruct @@ -182,7 +226,7 @@ type TestStructMissingField struct { // compatibleTestStruct has fields in a different order type compatibleTestStruct struct { - Account []byte + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int DifferentField string @@ -202,21 +246,26 @@ type FilterEventParams struct { Field int32 } -type BatchCallEntry map[types.BoundContract]ContractBatchEntry -type ContractBatchEntry []ReadEntry -type ReadEntry struct { - Name string - ReturnValue any -} +type ( + BatchCallEntry map[types.BoundContract]ContractBatchEntry + ContractBatchEntry []ReadEntry + ReadEntry struct { + Name string + ReturnValue any + } +) func CreateTestStruct[T any](i int, tester BasicTester[T]) TestStruct { s := fmt.Sprintf("field%v", i) fv := int32(i) return TestStruct{ - Field: &fv, - OracleID: commontypes.OracleID(i + 1), - OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, - Account: tester.GetAccountBytes(i + 3), + Field: &fv, + OracleID: commontypes.OracleID(i + 1), + OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(i), + AccountStr: tester.GetAccountString(i), + }, Accounts: [][]byte{tester.GetAccountBytes(i + 4), tester.GetAccountBytes(i + 5)}, DifferentField: s, BigField: big.NewInt(int64((i + 1) * (i + 2))), diff --git a/pkg/types/keystore/types.go b/pkg/types/keystore/types.go new file mode 100644 index 000000000..b1ab498df --- /dev/null +++ b/pkg/types/keystore/types.go @@ -0,0 +1,37 @@ +package keystore + +import "context" + +// Keystore This interface is exposed to keystore consumers +type Keystore interface { + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + // RunUDF executes a user-defined function (UDF) on the keystore. + // This method is designed to provide flexibility by allowing users to define custom + // logic that can be executed without breaking the existing interface. While it enables + // future extensibility, developers should ensure that UDF implementations are safe + // and do not compromise the security of the keystore or the integrity of the data. + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) +} + +// Management Core node exclusive +type Management interface { + AddPolicy(ctx context.Context, policy []byte) (string, error) + RemovePolicy(ctx context.Context, policyID string) error + ListPolicy(ctx context.Context) []byte + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/types/llo/types.go b/pkg/types/llo/types.go index 8aec019d0..3951afa4a 100644 --- a/pkg/types/llo/types.go +++ b/pkg/types/llo/types.go @@ -8,6 +8,7 @@ import ( "math" "github.com/smartcontractkit/libocr/offchainreporting2plus/ocr3types" + ocr2types "github.com/smartcontractkit/libocr/offchainreporting2plus/types" "github.com/smartcontractkit/chainlink-common/pkg/services" ) @@ -35,8 +36,15 @@ const ( // NOTE: Only add something here if you actually need it, because it has to // be supported forever and can't be changed + + // ReportFormatEVMPremiumLegacy maintains compatibility with the legacy + // Mercury v0.3 report format ReportFormatEVMPremiumLegacy ReportFormat = 1 - ReportFormatJSON ReportFormat = 2 + // ReportFormatJSON is a simple JSON format for reference and debugging + ReportFormatJSON ReportFormat = 2 + // ReportFormatRetirement is a special "capstone" report format to indicate + // a retired OCR instance, and handover crucial information to a new one + ReportFormatRetirement ReportFormat = 3 _ ReportFormat = math.MaxUint32 // reserved ) @@ -44,6 +52,7 @@ const ( var ReportFormats = []ReportFormat{ ReportFormatEVMPremiumLegacy, ReportFormatJSON, + ReportFormatRetirement, } func (rf ReportFormat) String() string { @@ -52,6 +61,8 @@ func (rf ReportFormat) String() string { return "evm_premium_legacy" case ReportFormatJSON: return "json" + case ReportFormatRetirement: + return "retirement" default: return fmt.Sprintf("unknown(%d)", rf) } @@ -63,6 +74,8 @@ func ReportFormatFromString(s string) (ReportFormat, error) { return ReportFormatEVMPremiumLegacy, nil case "json": return ReportFormatJSON, nil + case "retirement": + return ReportFormatRetirement, nil default: return 0, fmt.Errorf("unknown report format: %q", s) } @@ -302,3 +315,7 @@ type ChannelDefinitionCache interface { Definitions() ChannelDefinitions services.Service } + +type ShouldRetireCache interface { + ShouldRetire(digest ocr2types.ConfigDigest) (bool, error) +} diff --git a/pkg/types/provider_llo.go b/pkg/types/provider_llo.go index db5817607..6fc8a5ccf 100644 --- a/pkg/types/provider_llo.go +++ b/pkg/types/provider_llo.go @@ -1,11 +1,21 @@ package types import ( + ocrtypes "github.com/smartcontractkit/libocr/offchainreporting2plus/types" + "github.com/smartcontractkit/chainlink-common/pkg/types/llo" ) +type LLOConfigProvider interface { + OffchainConfigDigester() ocrtypes.OffchainConfigDigester + // One instance will be run per config tracker + ContractConfigTrackers() []ocrtypes.ContractConfigTracker +} + type LLOProvider interface { - ConfigProvider + Service + LLOConfigProvider + ShouldRetireCache() llo.ShouldRetireCache ContractTransmitter() llo.Transmitter ChannelDefinitionCache() llo.ChannelDefinitionCache } diff --git a/pkg/types/query/key_filter.go b/pkg/types/query/key_filter.go new file mode 100644 index 000000000..d5b5b78c3 --- /dev/null +++ b/pkg/types/query/key_filter.go @@ -0,0 +1,164 @@ +package query + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/types/query/primitives" +) + +// IndexedSequencesKeyFilter creates a KeyFilter that filters logs for the provided sequence property values at the +// specified property name. Sequence value filters are 'OR'ed together. A sequence read name is the value that +// identifies the sequence type. The signature value name is the sequence property to apply the filter to and the +// sequence values are the individual values to search for in the provided property. +func IndexedSequencesKeyFilter( + readName string, + comparatorName string, + values []string, + confidence primitives.ConfidenceLevel, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + filtersForValues(comparatorName, values), + Confidence(confidence), + }, + } +} + +// IndexedSequencesByBlockRangeKeyFilter creates a KeyFilter that filters sequences for the provided property values at +// the specified property name. Value filters are 'OR'ed together and results are limited by provided cursor range. A +// read name is the value that identifies the sequence type. The signature property name is the sequence property to +// apply the filter to and the sequence values are the individual values to search for in the provided property. +func IndexedSequencesByBlockRangeKeyFilter( + readName string, + start, end string, + comparatorName string, + values []string, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + filtersForValues(comparatorName, values), + Block(start, primitives.Gte), + Block(end, primitives.Lte), + }, + } +} + +// IndexedSequencesValueGreaterThanKeyFilter creates a KeyFilter that filters sequences for the provided property value +// and name at or above the specified confidence level. A sequence read name is the value that identifies the sequence +// type. The property name is the sequence property to apply the filter to and the value is the individual value to +// search for in the provided property. +func IndexedSequencesValueGreaterThanKeyFilter( + readName string, + comparatorName, value string, + confidence primitives.ConfidenceLevel, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + valueComparator(comparatorName, value, primitives.Gte), + Confidence(confidence), + }, + } +} + +// IndexedSequencesValueRangeKeyFilter creates a KeyFilter that filters logs on the provided sequence property between +// the provided min and max, endpoints inclusive. A sequence read name is the value that identifies the sequence type. +func IndexedSequencesValueRangeKeyFilter( + readName string, + comparatorName string, + min, max string, + confidence primitives.ConfidenceLevel, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + valueComparator(comparatorName, min, primitives.Gte), + valueComparator(comparatorName, max, primitives.Lte), + Confidence(confidence), + }, + } +} + +// IndexedSequencesByTxHashKeyFilter creates a KeyFilter that filters logs for the provided transaction hash. A sequence +// read name is the value that identifies the sequence type. +func IndexedSequencesByTxHashKeyFilter( + readName, txHash string, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + TxHash(txHash), + }, + } +} + +// SequencesByBlockRangeKeyFilter creates a KeyFilter that filters sequences for the provided block range, endpoints inclusive. +func SequencesByBlockRangeKeyFilter( + readName string, + start, end string, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + Block(start, primitives.Gte), + Block(end, primitives.Lte), + }, + } +} + +// SequencesCreatedAfterKeyFilter creates a KeyFilter that filters sequences for after but not equal to the provided time value. +func SequencesCreatedAfterKeyFilter( + readName string, + timestamp time.Time, + confidence primitives.ConfidenceLevel, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + Timestamp(uint64(timestamp.Unix()), primitives.Gt), + Confidence(confidence), + }, + } +} + +// IndexedSequencesCreatedAfterKeyFilter creates a KeyFilter that filters sequences for the provided property and values +// created after the provided time value. Sequence property values filters are 'OR'ed. A sequence read name is the value +// that identifies the sequence type. +func IndexedSequencesCreatedAfterKeyFilter( + readName string, + comparatorName string, + values []string, + timestamp time.Time, + confidence primitives.ConfidenceLevel, +) KeyFilter { + return KeyFilter{ + Key: readName, + Expressions: []Expression{ + filtersForValues(comparatorName, values), + Timestamp(uint64(timestamp.Unix()), primitives.Gt), + Confidence(confidence), + }, + } +} + +func valueComparator(comparatorName, value string, op primitives.ComparisonOperator) Expression { + return Comparator(comparatorName, primitives.ValueComparator{ + Value: value, + Operator: op, + }) +} + +func filtersForValues(comparatorName string, values []string) Expression { + valueFilters := BoolExpression{ + Expressions: make([]Expression, len(values)), + BoolOperator: OR, + } + + for idx, value := range values { + valueFilters.Expressions[idx] = valueComparator(comparatorName, value, primitives.Eq) + } + + return Expression{BoolExpression: valueFilters} +} diff --git a/pkg/types/query/key_filter_test.go b/pkg/types/query/key_filter_test.go new file mode 100644 index 000000000..5f7cf0d9a --- /dev/null +++ b/pkg/types/query/key_filter_test.go @@ -0,0 +1,200 @@ +package query_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/types/query" + "github.com/smartcontractkit/chainlink-common/pkg/types/query/primitives" +) + +func TestIndexedSequencesKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.IndexedSequencesKeyFilter("readName", "property", []string{"value1", "value2"}, primitives.Finalized) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {BoolExpression: query.BoolExpression{ + Expressions: []query.Expression{ + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value1", Operator: primitives.Eq}}, + }, + }, + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value2", Operator: primitives.Eq}}, + }, + }, + }, + BoolOperator: query.OR, + }}, + {Primitive: &primitives.Confidence{ConfidenceLevel: primitives.Finalized}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestIndexedSequencesByBlockRangeKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.IndexedSequencesByBlockRangeKeyFilter("readName", "start", "end", "property", []string{"value1", "value2"}) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {BoolExpression: query.BoolExpression{ + Expressions: []query.Expression{ + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value1", Operator: primitives.Eq}}, + }, + }, + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value2", Operator: primitives.Eq}}, + }, + }, + }, + BoolOperator: query.OR, + }}, + {Primitive: &primitives.Block{Block: "start", Operator: primitives.Gte}}, + {Primitive: &primitives.Block{Block: "end", Operator: primitives.Lte}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestIndexedSequencesValueGreaterThanKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.IndexedSequencesValueGreaterThanKeyFilter("readName", "property", "value1", primitives.Finalized) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value1", Operator: primitives.Gte}}, + }, + }, + {Primitive: &primitives.Confidence{ConfidenceLevel: primitives.Finalized}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestIndexedSequencesValueRangeKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.IndexedSequencesValueRangeKeyFilter("readName", "property", "min", "max", primitives.Finalized) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "min", Operator: primitives.Gte}}, + }, + }, + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "max", Operator: primitives.Lte}}, + }, + }, + {Primitive: &primitives.Confidence{ConfidenceLevel: primitives.Finalized}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestIndexedSequencesByTxHashKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.IndexedSequencesByTxHashKeyFilter("readName", "hash") + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {Primitive: &primitives.TxHash{TxHash: "hash"}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestSequencesByBlockRangeKeyFilter(t *testing.T) { + t.Parallel() + + filter := query.SequencesByBlockRangeKeyFilter("readName", "start", "end") + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {Primitive: &primitives.Block{Block: "start", Operator: primitives.Gte}}, + {Primitive: &primitives.Block{Block: "end", Operator: primitives.Lte}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestSequencesCreatedAfterKeyFilter(t *testing.T) { + t.Parallel() + + now := time.Now() + + filter := query.SequencesCreatedAfterKeyFilter("readName", now, primitives.Finalized) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {Primitive: &primitives.Timestamp{Timestamp: uint64(now.Unix()), Operator: primitives.Gt}}, + {Primitive: &primitives.Confidence{ConfidenceLevel: primitives.Finalized}}, + }, + } + + require.Equal(t, expected, filter) +} + +func TestIndexedSequencesCreatedAfterKeyFilter(t *testing.T) { + t.Parallel() + + now := time.Now() + + filter := query.IndexedSequencesCreatedAfterKeyFilter("readName", "property", []string{"value1", "value2"}, now, primitives.Finalized) + expected := query.KeyFilter{ + Key: "readName", + Expressions: []query.Expression{ + {BoolExpression: query.BoolExpression{ + Expressions: []query.Expression{ + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value1", Operator: primitives.Eq}}, + }, + }, + { + Primitive: &primitives.Comparator{ + Name: "property", + ValueComparators: []primitives.ValueComparator{{Value: "value2", Operator: primitives.Eq}}, + }, + }, + }, + BoolOperator: query.OR, + }}, + {Primitive: &primitives.Timestamp{Timestamp: uint64(now.Unix()), Operator: primitives.Gt}}, + {Primitive: &primitives.Confidence{ConfidenceLevel: primitives.Finalized}}, + }, + } + + require.Equal(t, expected, filter) +} diff --git a/pkg/types/relayer.go b/pkg/types/relayer.go index ea87f1d17..d482e75e8 100644 --- a/pkg/types/relayer.go +++ b/pkg/types/relayer.go @@ -97,9 +97,9 @@ type ChainService interface { type Relayer interface { ChainService - // NewChainWriter returns a new ChainWriter. + // NewContractWriter returns a new ContractWriter. // The format of config depends on the implementation. - NewChainWriter(ctx context.Context, config []byte) (ChainWriter, error) + NewContractWriter(ctx context.Context, config []byte) (ContractWriter, error) // NewContractReader returns a new ContractReader. // The format of contractReaderConfig depends on the implementation. diff --git a/pkg/utils/sleeper_task.go b/pkg/utils/sleeper_task.go index 0a65ea890..02dc970b3 100644 --- a/pkg/utils/sleeper_task.go +++ b/pkg/utils/sleeper_task.go @@ -1,6 +1,7 @@ package utils import ( + "context" "fmt" "time" @@ -13,12 +14,18 @@ type Worker interface { Name() string } +// WorkerCtx is like Worker but includes [context.Context]. +type WorkerCtx interface { + Work(ctx context.Context) + Name() string +} + // SleeperTask represents a task that waits in the background to process some work. type SleeperTask struct { services.StateMachine - worker Worker + worker WorkerCtx chQueue chan struct{} - chStop chan struct{} + chStop services.StopChan chDone chan struct{} chWorkDone chan struct{} } @@ -31,16 +38,27 @@ type SleeperTask struct { // immediately after it is finished. For this reason you should take care to // make sure that Worker is idempotent. // WakeUp does not block. -func NewSleeperTask(worker Worker) *SleeperTask { +func NewSleeperTask(w Worker) *SleeperTask { + return NewSleeperTaskCtx(&worker{w}) +} + +type worker struct { + Worker +} + +func (w *worker) Work(ctx context.Context) { w.Worker.Work() } + +// NewSleeperTaskCtx is like NewSleeperTask but accepts a WorkerCtx with a [context.Context]. +func NewSleeperTaskCtx(w WorkerCtx) *SleeperTask { s := &SleeperTask{ - worker: worker, + worker: w, chQueue: make(chan struct{}, 1), chStop: make(chan struct{}), chDone: make(chan struct{}), chWorkDone: make(chan struct{}, 10), } - _ = s.StartOnce("SleeperTask-"+worker.Name(), func() error { + _ = s.StartOnce("SleeperTask-"+w.Name(), func() error { go s.workerLoop() return nil }) @@ -98,10 +116,13 @@ func (s *SleeperTask) WorkDone() <-chan struct{} { func (s *SleeperTask) workerLoop() { defer close(s.chDone) + ctx, cancel := s.chStop.NewCtx() + defer cancel() + for { select { case <-s.chQueue: - s.worker.Work() + s.worker.Work(ctx) s.workDone() case <-s.chStop: return diff --git a/pkg/utils/tests/tests.go b/pkg/utils/tests/tests.go index 68aefbc1c..374ef23fb 100644 --- a/pkg/utils/tests/tests.go +++ b/pkg/utils/tests/tests.go @@ -68,3 +68,14 @@ func RequireSignal(t *testing.T, ch <-chan struct{}, failMsg string) { t.Fatal(failMsg) } } + +// SkipShort skips tb during -short runs, and notes why. +func SkipShort(tb testing.TB, why string) { + if testing.Short() { + tb.Skipf("skipping: %s", why) + } +} + +func SkipFlakey(t *testing.T, ticketURL string) { + t.Skip("Flakey", ticketURL) +} diff --git a/pkg/values/map.go b/pkg/values/map.go index 076831102..bfe5fb494 100644 --- a/pkg/values/map.go +++ b/pkg/values/map.go @@ -20,7 +20,7 @@ func EmptyMap() *Map { } } -func NewMap(m map[string]any) (*Map, error) { +func NewMap[T any](m map[string]T) (*Map, error) { mv := map[string]Value{} for k, v := range m { val, err := Wrap(v) diff --git a/pkg/values/value.go b/pkg/values/value.go index 7d9d75ff7..ac109fd9c 100644 --- a/pkg/values/value.go +++ b/pkg/values/value.go @@ -6,8 +6,8 @@ import ( "math" "math/big" "reflect" + "time" - "github.com/go-viper/mapstructure/v2" "github.com/shopspring/decimal" "github.com/smartcontractkit/chainlink-common/pkg/values/pb" @@ -76,6 +76,8 @@ func Wrap(v any) (Value, error) { return NewFloat64(float64(tv)), nil case *big.Int: return NewBigInt(tv), nil + case time.Time: + return NewTime(tv), nil case nil: return nil, nil @@ -95,6 +97,12 @@ func Wrap(v any) (Value, error) { return tv, nil case *Float64: return tv, nil + case *Bool: + return tv, nil + case *BigInt: + return tv, nil + case *Time: + return tv, nil } // Handle slices, structs, and pointers to structs @@ -294,12 +302,32 @@ func fromBigIntValueProto(biv *pb.BigInt) *BigInt { } func CreateMapFromStruct(v any) (*Map, error) { - var resultMap map[string]interface{} + resultMap := map[string]any{} - err := mapstructure.Decode(v, &resultMap) - if err != nil { - return nil, err + // use reflect to handle nested types as an interface + rv := reflect.ValueOf(v) + rt := reflect.TypeOf(v) + + if rv.Kind() != reflect.Struct { + return nil, errors.New("input must be of struct type") } + + for i := 0; i < rv.NumField(); i++ { + field := rt.Field(i) + // ignore private fields + if !field.IsExported() { + continue + } + // for backwards compatibility, use tagged mapstructure tag as key if provided + msTag := field.Tag.Get("mapstructure") + key := msTag + if key == "" { + key = field.Name + } + + resultMap[key] = rv.Field(i).Interface() + } + return NewMap(resultMap) } diff --git a/pkg/values/value_test.go b/pkg/values/value_test.go index 23d02b728..9dbc9fb64 100644 --- a/pkg/values/value_test.go +++ b/pkg/values/value_test.go @@ -1,6 +1,7 @@ package values import ( + "bytes" "math" "math/big" "reflect" @@ -334,6 +335,58 @@ func Test_StructWrapUnwrap(t *testing.T) { assert.Equal(t, expected, unwrapped) } +func Test_NestedValueWrapUnwrap(t *testing.T) { + now := time.Now() + + wrapInt, err := Wrap(int64(100)) + require.NoError(t, err) + wrapDeci, err := Wrap(decimal.NewFromInt(32)) + require.NoError(t, err) + wrapFloat, err := Wrap(float64(1.2)) + require.NoError(t, err) + wrapBuffer, err := Wrap(bytes.NewBufferString("immabuffer").Bytes()) + require.NoError(t, err) + wrapString, err := Wrap("wrapme") + require.NoError(t, err) + wrapBool, err := Wrap(false) + require.NoError(t, err) + wrapBI, err := Wrap(big.NewInt(1)) + require.NoError(t, err) + wrapT, err := Wrap(now) + require.NoError(t, err) + + valuesMap, err := NewMap(map[string]any{ + "Int64": wrapInt, + "Decimal": wrapDeci, + "Float": wrapFloat, + "Buffer": wrapBuffer, + "String": wrapString, + "Bool": wrapBool, + "BI": wrapBI, + "T": wrapT, + }) + require.NoError(t, err) + + unwrappedMap, err := valuesMap.Unwrap() + require.NoError(t, err) + + expectedMap := map[string]any{ + "Int64": int64(100), + "Decimal": decimal.NewFromInt(32), + "Float": float64(1.2), + "Buffer": bytes.NewBufferString("immabuffer").Bytes(), + "String": "wrapme", + "Bool": false, + "BI": big.NewInt(1), + "T": now, + } + require.Equal( + t, + expectedMap, + unwrappedMap, + ) +} + func Test_SameUnderlyingTypes(t *testing.T) { type str string type i int diff --git a/pkg/workflows/exec/interpolation.go b/pkg/workflows/exec/interpolation.go index 27e6eb9a7..905036b41 100644 --- a/pkg/workflows/exec/interpolation.go +++ b/pkg/workflows/exec/interpolation.go @@ -104,8 +104,9 @@ func FindAndInterpolateAllKeys(input any, state Results) (any, error) { } type Env struct { - Binary []byte - Config []byte + Binary []byte + Config []byte + Secrets map[string]string } // FindAndInterpolateEnv takes a `config` any value, and recursively @@ -126,7 +127,7 @@ func FindAndInterpolateEnvVars(input any, env Env) (any, error) { } splitToken := strings.Split(matches[1], ".") - if len(splitToken) != 2 { + if len(splitToken) < 2 { return el, nil } @@ -139,8 +140,26 @@ func FindAndInterpolateEnvVars(input any, env Env) (any, error) { return env.Config, nil case "binary": return env.Binary, nil + case "secrets": + switch len(splitToken) { + // A token of the form: + // ENV.secrets. + case 3: + got, ok := env.Secrets[splitToken[2]] + if !ok { + return "", fmt.Errorf("invalid env token: could not find %q in ENV.secrets", splitToken[2]) + } + + return got, nil + // A token of the form: + // ENV.secrets + case 2: + return env.Secrets, nil + } + + return nil, fmt.Errorf("invalid env token: must contain two or three elements, got %q", el.(string)) default: - return "", fmt.Errorf("invalid env token: must be of the form $(ENV.): got %s", el) + return "", fmt.Errorf("invalid env token: must be of the form $(ENV.): got %s", el) } }, ) diff --git a/pkg/workflows/exec/interpolation_test.go b/pkg/workflows/exec/interpolation_test.go index d497aa02b..ffc2e43fb 100644 --- a/pkg/workflows/exec/interpolation_test.go +++ b/pkg/workflows/exec/interpolation_test.go @@ -247,6 +247,50 @@ func TestInterpolateEnv(t *testing.T) { assert.NoError(t, err) } +func TestInterpolateEnv_Secrets(t *testing.T) { + c := map[string]any{ + "fidelityAPIKey": "$(ENV.secrets.fidelity)", + } + _, err := exec.FindAndInterpolateEnvVars(c, exec.Env{}) + assert.ErrorContains(t, err, `invalid env token: could not find "fidelity" in ENV.secrets`) + + c = map[string]any{ + "fidelityAPIKey": "$(ENV.secrets.fidelity.foo)", + } + _, err = exec.FindAndInterpolateEnvVars(c, exec.Env{}) + assert.ErrorContains(t, err, `invalid env token: must contain two or three elements`) + + c = map[string]any{ + "secrets": "$(ENV.secrets)", + } + secrets := map[string]string{ + "foo": "fooSecret", + "bar": "barSecret", + } + got, err := exec.FindAndInterpolateEnvVars( + c, + exec.Env{Secrets: secrets}) + require.NoError(t, err) + assert.Equal(t, got, map[string]any{ + "secrets": secrets, + }) + + c = map[string]any{ + "secrets": "$(ENV.secrets.foo)", + } + secrets = map[string]string{ + "foo": "fooSecret", + "bar": "barSecret", + } + got, err = exec.FindAndInterpolateEnvVars( + c, + exec.Env{Secrets: secrets}) + require.NoError(t, err) + assert.Equal(t, got, map[string]any{ + "secrets": "fooSecret", + }) +} + type fakeResults map[string]*exec.Result func (f fakeResults) ResultForStep(s string) (*exec.Result, bool) { diff --git a/pkg/workflows/sdk/builder.go b/pkg/workflows/sdk/builder.go index ad3e3e462..138e55cf3 100644 --- a/pkg/workflows/sdk/builder.go +++ b/pkg/workflows/sdk/builder.go @@ -1,6 +1,7 @@ package sdk import ( + "errors" "fmt" "reflect" "strconv" @@ -15,6 +16,7 @@ type WorkflowSpecFactory struct { duplicateNames map[string]bool emptyNames bool badCapTypes []string + errors []error fns map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) } @@ -123,18 +125,9 @@ func (c *capDefinitionImpl[O]) self() CapDefinition[O] { func (c *capDefinitionImpl[O]) private() {} -type NewWorkflowParams struct { - Owner string - Name string -} - -func NewWorkflowSpecFactory( - params NewWorkflowParams, -) *WorkflowSpecFactory { +func NewWorkflowSpecFactory() *WorkflowSpecFactory { return &WorkflowSpecFactory{ spec: &WorkflowSpec{ - Owner: params.Owner, - Name: params.Name, Triggers: make([]StepDefinition, 0), Actions: make([]StepDefinition, 0), Consensus: make([]StepDefinition, 0), @@ -142,6 +135,7 @@ func NewWorkflowSpecFactory( }, names: map[string]bool{}, duplicateNames: map[string]bool{}, + errors: []error{}, emptyNames: false, } } @@ -182,7 +176,15 @@ func AccessField[I, O any](c CapDefinition[I], fieldName string) CapDefinition[O return &capDefinitionImpl[O]{ref: originalRef[:len(originalRef)-1] + "." + fieldName + ")"} } +func (w *WorkflowSpecFactory) AddErr(err error) { + w.errors = append(w.errors, err) +} + func (w *WorkflowSpecFactory) Spec() (WorkflowSpec, error) { + if len(w.errors) > 0 { + return WorkflowSpec{}, errors.Join(w.errors...) + } + if len(w.duplicateNames) > 0 { duplicates := make([]string, 0, len(w.duplicateNames)) for k := range w.duplicateNames { @@ -238,3 +240,23 @@ func AnyMap[M ~map[string]any](inputs CapMap) CapDefinition[M] { return components } + +type SecretValue string + +func (s SecretValue) Ref() any { + return s +} + +func (s SecretValue) private() {} + +func (s SecretValue) self() CapDefinition[string] { + return s +} + +func Secrets() SecretValue { + return "$(ENV.secrets)" +} + +func Secret(named string) SecretValue { + return SecretValue(fmt.Sprintf("$(ENV.secrets.%s)", named)) +} diff --git a/pkg/workflows/sdk/builder_test.go b/pkg/workflows/sdk/builder_test.go index 68133913f..ba3976661 100644 --- a/pkg/workflows/sdk/builder_test.go +++ b/pkg/workflows/sdk/builder_test.go @@ -28,7 +28,6 @@ import ( // This implicitly tests the code generators functionally, as the generated code is used in the tests. type Config struct { - Workflow sdk.NewWorkflowParams Streams *streams.TriggerConfig Ocr *ocr3.DataFeedsConsensusConfig ChainWriter *chainwriter.TargetConfig @@ -41,7 +40,7 @@ func NewWorkflowSpec(rawConfig []byte) (*sdk.WorkflowSpecFactory, error) { return nil, err } - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() streamsTrigger := conf.Streams.New(workflow) consensus := conf.Ocr.New(workflow, "ccip_feeds", ocr3.DataFeedsConsensusInput{ Observations: sdk.ListOf[streams.Feed](streamsTrigger)}, @@ -55,13 +54,13 @@ func NewWorkflowSpec(rawConfig []byte) (*sdk.WorkflowSpecFactory, error) { // ModifiedConfig, and the test it's used in, show how you can structure config to remove copy/paste issues when data // needs to be repeated in multiple capability configurations. type ModifiedConfig struct { - Workflow sdk.NewWorkflowParams AllowedPartialStaleness string MaxFrequencyMs uint64 DefaultHeartbeat uint64 `yaml:"default_heartbeat" json:"default_heartbeat"` DefaultDeviation string `yaml:"default_deviation" json:"default_deviation"` FeedInfo []FeedInfo `yaml:"feed_info" json:"feed_info"` ReportID ocr3.ReportId `yaml:"report_id" json:"report_id"` + KeyID ocr3.KeyId `yaml:"key_id" json:"key_id"` Encoder ocr3.Encoder EncoderConfig ocr3.EncoderConfig `yaml:"encoder_config" json:"encoder_config"` ChainWriter *chainwriter.TargetConfig @@ -87,6 +86,7 @@ func NewWorkflowRemapped(rawConfig []byte) (*sdk.WorkflowSpecFactory, error) { Encoder: conf.Encoder, EncoderConfig: conf.EncoderConfig, ReportId: conf.ReportID, + KeyId: conf.KeyID, AggregationConfig: ocr3.DataFeedsConsensusConfigAggregationConfig{ AllowedPartialStaleness: conf.AllowedPartialStaleness, }, @@ -112,7 +112,7 @@ func NewWorkflowRemapped(rawConfig []byte) (*sdk.WorkflowSpecFactory, error) { } ocr3Config.AggregationConfig.Feeds = feeds - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() streamsTrigger := streamsConfig.New(workflow) consensus := ocr3Config.New(workflow, "ccip_feeds", ocr3.DataFeedsConsensusInput{ @@ -132,7 +132,7 @@ func NewWorkflowSpecFromPrimitives(rawConfig []byte) (*sdk.WorkflowSpecFactory, return nil, err } - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() notStreamsTrigger := conf.NotStream.New(workflow) md := streams.NewSignersMetadataFromFields( @@ -167,6 +167,7 @@ func NewWorkflowSpecFromPrimitives(rawConfig []byte) (*sdk.WorkflowSpecFactory, Encoder: conf.Ocr.Encoder, EncoderConfig: conf.Ocr.EncoderConfig, ReportId: conf.Ocr.ReportID, + KeyId: conf.Ocr.KeyID, } consensus := ocrConfig.New(workflow, "data-feeds-report", ocr3.DataFeedsConsensusInput{ @@ -210,8 +211,6 @@ func TestBuilder_ValidSpec(t *testing.T) { require.NoError(t, err) expected := sdk.WorkflowSpec{ - Name: "notccipethsep", - Owner: "0x00000000000000000000000000000000000000aa", Triggers: []sdk.StepDefinition{ { ID: "notstreams@1.0.0", @@ -263,6 +262,7 @@ func TestBuilder_ValidSpec(t *testing.T) { "Abi": "(bytes32 FeedID, uint224 Price, uint32 Timestamp)[] Reports", }, "report_id": "0001", + "key_id": "evm", }, CapabilityType: capabilities.CapabilityTypeConsensus, }, @@ -287,14 +287,12 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("maps work correctly", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "1", Number: 1}.New(workflow) mapaction.ActionConfig{}.New(workflow, "ref", mapaction.ActionInput{Payload: sdk.Map[string, mapaction.ActionInputsPayload](map[string]sdk.CapDefinition[string]{"Foo": trigger.CoolOutput()})}) spec, err := workflow.Spec() require.NoError(t, err) testutils.AssertWorkflowSpec(t, sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "basic-test-trigger@1.0.0", @@ -324,14 +322,12 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("any maps work correctly", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "1", Number: 1}.New(workflow) anymapaction.MapActionConfig{}.New(workflow, "ref", anymapaction.MapActionInput{Payload: sdk.AnyMap[anymapaction.MapActionInputsPayload](sdk.CapMap{"Foo": trigger.CoolOutput()})}) spec, err := workflow.Spec() require.NoError(t, err) testutils.AssertWorkflowSpec(t, sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "basic-test-trigger@1.0.0", @@ -361,7 +357,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("ToListDefinition works correctly for list elements", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := listtrigger.TriggerConfig{Name: "1"}.New(workflow) asList := sdk.ToListDefinition[string](trigger.CoolOutput()) sdk.Compute1(workflow, "compute", sdk.Compute1Inputs[[]string]{Arg0: asList}, func(_ sdk.Runtime, inputs []string) (string, error) { @@ -375,8 +371,6 @@ func TestBuilder_ValidSpec(t *testing.T) { require.NoError(t, err) testutils.AssertWorkflowSpec(t, sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "list@1.0.0", @@ -388,7 +382,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }, Actions: []sdk.StepDefinition{ { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute", Inputs: sdk.StepInputs{ Mapping: map[string]any{"Arg0": "$(trigger.outputs.cool_output)"}, @@ -400,7 +394,7 @@ func TestBuilder_ValidSpec(t *testing.T) { CapabilityType: capabilities.CapabilityTypeAction, }, { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute again", Inputs: sdk.StepInputs{ Mapping: map[string]any{"Arg0": "$(trigger.outputs.cool_output.0)"}, @@ -418,7 +412,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("ToListDefinition works correctly for built up lists", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "1"}.New(workflow) asList := sdk.ToListDefinition(sdk.ListOf(trigger.CoolOutput())) sdk.Compute1(workflow, "compute", sdk.Compute1Inputs[[]string]{Arg0: asList}, func(_ sdk.Runtime, inputs []string) (string, error) { @@ -432,8 +426,6 @@ func TestBuilder_ValidSpec(t *testing.T) { require.NoError(t, err) testutils.AssertWorkflowSpec(t, sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "basic-test-trigger@1.0.0", @@ -445,7 +437,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }, Actions: []sdk.StepDefinition{ { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute", Inputs: sdk.StepInputs{ Mapping: map[string]any{"Arg0": []any{"$(trigger.outputs.cool_output)"}}, @@ -457,7 +449,7 @@ func TestBuilder_ValidSpec(t *testing.T) { CapabilityType: capabilities.CapabilityTypeAction, }, { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute again", Inputs: sdk.StepInputs{ Mapping: map[string]any{"Arg0": "$(trigger.outputs.cool_output)"}, @@ -475,7 +467,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("ToListDefinition works correctly for hard-coded lists", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "1"}.New(workflow) list := sdk.ToListDefinition(sdk.ConstantDefinition([]string{"1", "2"})) sdk.Compute2(workflow, "compute", sdk.Compute2Inputs[string, []string]{Arg0: trigger.CoolOutput(), Arg1: list}, func(_ sdk.Runtime, t string, l []string) (string, error) { @@ -489,8 +481,6 @@ func TestBuilder_ValidSpec(t *testing.T) { require.NoError(t, err) testutils.AssertWorkflowSpec(t, sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "basic-test-trigger@1.0.0", @@ -502,7 +492,7 @@ func TestBuilder_ValidSpec(t *testing.T) { }, Actions: []sdk.StepDefinition{ { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute", Inputs: sdk.StepInputs{ Mapping: map[string]any{ @@ -517,7 +507,7 @@ func TestBuilder_ValidSpec(t *testing.T) { CapabilityType: capabilities.CapabilityTypeAction, }, { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "compute again", Inputs: sdk.StepInputs{ Mapping: map[string]any{ @@ -538,14 +528,14 @@ func TestBuilder_ValidSpec(t *testing.T) { }) t.Run("AnyListOf works like list of but returns a type any", func(t *testing.T) { - workflow1 := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow1 := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "foo", Number: 0} list := sdk.ListOf(trigger.New(workflow1).CoolOutput()) sdk.Compute1(workflow1, "compute", sdk.Compute1Inputs[[]string]{Arg0: list}, func(_ sdk.Runtime, inputs []string) (string, error) { return inputs[0], nil }) - workflow2 := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow2 := sdk.NewWorkflowSpecFactory() anyList := sdk.AnyListOf(trigger.New(workflow2).CoolOutput()) sdk.Compute1(workflow2, "compute", sdk.Compute1Inputs[[]any]{Arg0: anyList}, func(_ sdk.Runtime, inputs []any) (any, error) { return inputs[0], nil @@ -563,7 +553,7 @@ func TestBuilder_ValidSpec(t *testing.T) { conf, err := UnmarshalYaml[Config](sepoliaConfig) require.NoError(t, err) - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() streamsTrigger := conf.Streams.New(workflow) consensus := conf.Ocr.New(workflow, "ccip_feeds", ocr3.DataFeedsConsensusInput{ Observations: sdk.ListOf[streams.Feed](streamsTrigger)}, @@ -585,7 +575,7 @@ func TestBuilder_ValidSpec(t *testing.T) { conf, err := UnmarshalYaml[Config](sepoliaConfig) require.NoError(t, err) - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() streamsTrigger := conf.Streams.New(workflow) consensus := conf.Ocr.New(workflow, "", ocr3.DataFeedsConsensusInput{ Observations: sdk.ListOf[streams.Feed](streamsTrigger)}, @@ -601,7 +591,7 @@ func TestBuilder_ValidSpec(t *testing.T) { conf, err := UnmarshalYaml[Config](sepoliaConfig) require.NoError(t, err) - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() badStep := sdk.Step[streams.Feed]{ Definition: sdk.StepDefinition{ ID: "streams-trigger@1.0.0", @@ -628,7 +618,7 @@ func TestBuilder_ValidSpec(t *testing.T) { conf, err := UnmarshalYaml[Config](sepoliaConfig) require.NoError(t, err) - workflow := sdk.NewWorkflowSpecFactory(conf.Workflow) + workflow := sdk.NewWorkflowSpecFactory() streamsTrigger := conf.Streams.New(workflow) consensus := conf.Ocr.New(workflow, "ccip_feeds", ocr3.DataFeedsConsensusInput{ Observations: sdk.ListOf[streams.Feed](streamsTrigger)}, @@ -661,7 +651,6 @@ func runSepoliaStagingTest(t *testing.T, config []byte, gen func([]byte) (*sdk.W } type NotStreamsConfig struct { - Workflow sdk.NewWorkflowParams NotStream *notstreams.TriggerConfig `yaml:"not_stream" json:"not_stream"` Ocr *ModifiedConsensusConfig ChainWriter *chainwriter.TargetConfig @@ -676,6 +665,7 @@ type ModifiedConsensusConfig struct { Encoder ocr3.Encoder `json:"encoder" yaml:"encoder" mapstructure:"encoder"` EncoderConfig ocr3.EncoderConfig `json:"encoder_config" yaml:"encoder_config" mapstructure:"encoder_config"` ReportID ocr3.ReportId `json:"report_id" yaml:"report_id" mapstructure:"report_id"` + KeyID ocr3.KeyId `json:"key_id" yaml:"key_id" mapstructure:"key_id"` } func UnmarshalYaml[T any](raw []byte) (*T, error) { diff --git a/pkg/workflows/sdk/compute.go b/pkg/workflows/sdk/compute.go index e247984e2..fb762d356 100644 --- a/pkg/workflows/sdk/compute.go +++ b/pkg/workflows/sdk/compute.go @@ -1,5 +1,11 @@ package sdk +import ( + "errors" + + "github.com/smartcontractkit/chainlink-common/pkg/values" +) + //go:generate go run ./gen type ComputeOutput[T any] struct { @@ -20,3 +26,48 @@ func (c *computeOutputCap[T]) Value() CapDefinition[T] { } var _ ComputeOutputCap[struct{}] = &computeOutputCap[struct{}]{} + +type ComputeConfig[C any] struct { + Config C +} + +func (c *ComputeConfig[C]) ToMap() (map[string]any, error) { + var m map[string]any + switch cm := any(c.Config).(type) { + case map[string]any: + m = cm + default: + wc, err := values.WrapMap(c.Config) + if err != nil { + return nil, err + } + + uc, err := wc.Unwrap() + if err != nil { + return nil, err + } + + tm, ok := uc.(map[string]any) + if !ok { + return nil, errors.New("could not convert config into map") + } + + m = tm + } + + if _, ok := m["config"]; ok { + return nil, errors.New("`config` is a reserved keyword inside Compute config") + } + m["config"] = "$(ENV.config)" + + if _, ok := m["binary"]; ok { + return nil, errors.New("`binary` is a reserved keyword inside Compute config") + } + m["binary"] = "$(ENV.binary)" + + return m, nil +} + +func EmptyComputeConfig() *ComputeConfig[struct{}] { + return &ComputeConfig[struct{}]{Config: struct{}{}} +} diff --git a/pkg/workflows/sdk/compute_generated.go b/pkg/workflows/sdk/compute_generated.go index 2b165573f..5098ddc03 100644 --- a/pkg/workflows/sdk/compute_generated.go +++ b/pkg/workflows/sdk/compute_generated.go @@ -4,6 +4,7 @@ package sdk import ( "encoding/json" + "fmt" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" "github.com/smartcontractkit/chainlink-common/pkg/values" @@ -26,8 +27,78 @@ func (input Compute1Inputs[I0]) ToSteps() StepInputs { } func Compute1[I0 any, O any](w *WorkflowSpecFactory, ref string, input Compute1Inputs[I0], compute func(Runtime, I0) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0) (O, error) { + return compute(r, i0) + } + return Compute1WithConfig[I0, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute1WithConfig[I0 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute1Inputs[I0], compute func(Runtime, C, I0) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime1Inputs[I0] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime1Inputs[I0] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute1WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute1WithMetadata[I0 any, O any](w *WorkflowSpecFactory, ref string, input Compute1Inputs[I0], compute func(Runtime, I0, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -55,7 +126,7 @@ func Compute1[I0 any, O any](w *WorkflowSpecFactory, ref string, input Compute1I return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0) + output, err := compute(runtime, inputs.Arg0, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -96,8 +167,78 @@ func (input Compute2Inputs[I0, I1]) ToSteps() StepInputs { } func Compute2[I0 any, I1 any, O any](w *WorkflowSpecFactory, ref string, input Compute2Inputs[I0, I1], compute func(Runtime, I0, I1) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1) (O, error) { + return compute(r, i0, i1) + } + return Compute2WithConfig[I0, I1, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute2WithConfig[I0 any, I1 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute2Inputs[I0, I1], compute func(Runtime, C, I0, I1) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime2Inputs[I0, I1] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime2Inputs[I0, I1] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute2WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute2WithMetadata[I0 any, I1 any, O any](w *WorkflowSpecFactory, ref string, input Compute2Inputs[I0, I1], compute func(Runtime, I0, I1, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { + def := StepDefinition{ + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -125,7 +266,7 @@ func Compute2[I0 any, I1 any, O any](w *WorkflowSpecFactory, ref string, input C return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -169,8 +310,78 @@ func (input Compute3Inputs[I0, I1, I2]) ToSteps() StepInputs { } func Compute3[I0 any, I1 any, I2 any, O any](w *WorkflowSpecFactory, ref string, input Compute3Inputs[I0, I1, I2], compute func(Runtime, I0, I1, I2) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2) (O, error) { + return compute(r, i0, i1, i2) + } + return Compute3WithConfig[I0, I1, I2, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute3WithConfig[I0 any, I1 any, I2 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute3Inputs[I0, I1, I2], compute func(Runtime, C, I0, I1, I2) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime3Inputs[I0, I1, I2] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime3Inputs[I0, I1, I2] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute3WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute3WithMetadata[I0 any, I1 any, I2 any, O any](w *WorkflowSpecFactory, ref string, input Compute3Inputs[I0, I1, I2], compute func(Runtime, I0, I1, I2, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { + def := StepDefinition{ + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -198,7 +409,7 @@ func Compute3[I0 any, I1 any, I2 any, O any](w *WorkflowSpecFactory, ref string, return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -245,8 +456,78 @@ func (input Compute4Inputs[I0, I1, I2, I3]) ToSteps() StepInputs { } func Compute4[I0 any, I1 any, I2 any, I3 any, O any](w *WorkflowSpecFactory, ref string, input Compute4Inputs[I0, I1, I2, I3], compute func(Runtime, I0, I1, I2, I3) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3) (O, error) { + return compute(r, i0, i1, i2, i3) + } + return Compute4WithConfig[I0, I1, I2, I3, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute4WithConfig[I0 any, I1 any, I2 any, I3 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute4Inputs[I0, I1, I2, I3], compute func(Runtime, C, I0, I1, I2, I3) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime4Inputs[I0, I1, I2, I3] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime4Inputs[I0, I1, I2, I3] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute4WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute4WithMetadata[I0 any, I1 any, I2 any, I3 any, O any](w *WorkflowSpecFactory, ref string, input Compute4Inputs[I0, I1, I2, I3], compute func(Runtime, I0, I1, I2, I3, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -274,7 +555,7 @@ func Compute4[I0 any, I1 any, I2 any, I3 any, O any](w *WorkflowSpecFactory, ref return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -324,8 +605,78 @@ func (input Compute5Inputs[I0, I1, I2, I3, I4]) ToSteps() StepInputs { } func Compute5[I0 any, I1 any, I2 any, I3 any, I4 any, O any](w *WorkflowSpecFactory, ref string, input Compute5Inputs[I0, I1, I2, I3, I4], compute func(Runtime, I0, I1, I2, I3, I4) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (O, error) { + return compute(r, i0, i1, i2, i3, i4) + } + return Compute5WithConfig[I0, I1, I2, I3, I4, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute5WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute5Inputs[I0, I1, I2, I3, I4], compute func(Runtime, C, I0, I1, I2, I3, I4) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime5Inputs[I0, I1, I2, I3, I4] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime5Inputs[I0, I1, I2, I3, I4] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute5WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute5WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, O any](w *WorkflowSpecFactory, ref string, input Compute5Inputs[I0, I1, I2, I3, I4], compute func(Runtime, I0, I1, I2, I3, I4, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -353,7 +704,7 @@ func Compute5[I0 any, I1 any, I2 any, I3 any, I4 any, O any](w *WorkflowSpecFact return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -406,8 +757,78 @@ func (input Compute6Inputs[I0, I1, I2, I3, I4, I5]) ToSteps() StepInputs { } func Compute6[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any](w *WorkflowSpecFactory, ref string, input Compute6Inputs[I0, I1, I2, I3, I4, I5], compute func(Runtime, I0, I1, I2, I3, I4, I5) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5) + } + return Compute6WithConfig[I0, I1, I2, I3, I4, I5, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute6WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute6Inputs[I0, I1, I2, I3, I4, I5], compute func(Runtime, C, I0, I1, I2, I3, I4, I5) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime6Inputs[I0, I1, I2, I3, I4, I5] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime6Inputs[I0, I1, I2, I3, I4, I5] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute6WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute6WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any](w *WorkflowSpecFactory, ref string, input Compute6Inputs[I0, I1, I2, I3, I4, I5], compute func(Runtime, I0, I1, I2, I3, I4, I5, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -435,7 +856,7 @@ func Compute6[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any](w *Workflow return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -491,8 +912,78 @@ func (input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6]) ToSteps() StepInputs { } func Compute7[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any](w *WorkflowSpecFactory, ref string, input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6) + } + return Compute7WithConfig[I0, I1, I2, I3, I4, I5, I6, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute7WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime7Inputs[I0, I1, I2, I3, I4, I5, I6] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime7Inputs[I0, I1, I2, I3, I4, I5, I6] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute7WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute7WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any](w *WorkflowSpecFactory, ref string, input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { + def := StepDefinition{ + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -520,7 +1011,7 @@ func Compute7[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any](w * return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -579,8 +1070,78 @@ func (input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7]) ToSteps() StepInputs } func Compute8[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O any](w *WorkflowSpecFactory, ref string, input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7) + } + return Compute8WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute8WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime8Inputs[I0, I1, I2, I3, I4, I5, I6, I7] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime8Inputs[I0, I1, I2, I3, I4, I5, I6, I7] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute8WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute8WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O any](w *WorkflowSpecFactory, ref string, input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { + def := StepDefinition{ + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -608,7 +1169,7 @@ func Compute8[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -670,8 +1231,78 @@ func (input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8]) ToSteps() StepIn } func Compute9[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, O any](w *WorkflowSpecFactory, ref string, input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7, i8) + } + return Compute9WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, I8, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute9WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7, I8) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute9WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute9WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, O any](w *WorkflowSpecFactory, ref string, input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -699,7 +1330,7 @@ func Compute9[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -764,8 +1395,78 @@ func (input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) ToSteps() S } func Compute10[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, I9 any, O any](w *WorkflowSpecFactory, ref string, input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7, i8, i9) + } + return Compute10WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute10WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, I9 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, inputs.Arg9) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} + +// Compute10WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute10WithMetadata[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, I9 any, O any](w *WorkflowSpecFactory, ref string, input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), Config: map[string]any{ @@ -793,7 +1494,7 @@ func Compute10[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, inputs.Arg9) + output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, inputs.Arg9, request.Metadata) if err != nil { return capabilities.CapabilityResponse{}, err } diff --git a/pkg/workflows/sdk/compute_test.go b/pkg/workflows/sdk/compute_test.go index 9253b5b03..5f024a81e 100644 --- a/pkg/workflows/sdk/compute_test.go +++ b/pkg/workflows/sdk/compute_test.go @@ -1,6 +1,7 @@ package sdk_test import ( + "fmt" "testing" "github.com/stretchr/testify/assert" @@ -12,6 +13,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" ocr3 "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/targets/chainwriter" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" @@ -31,7 +33,10 @@ func TestCompute(t *testing.T) { }, Timestamp: 1690838088, } - nsf, err := values.CreateMapFromStruct(map[string]any{"Arg0": anyNotStreamsInput}) + structToMap, err := values.CreateMapFromStruct(anyNotStreamsInput) + require.NoError(t, err) + + nsf, err := values.NewMap(map[string]any{"Arg0": structToMap}) require.NoError(t, err) t.Run("creates correct workflow spec", func(t *testing.T) { @@ -40,8 +45,6 @@ func TestCompute(t *testing.T) { spec, err2 := workflow.Spec() require.NoError(t, err2) expectedSpec := sdk.WorkflowSpec{ - Name: "name", - Owner: "owner", Triggers: []sdk.StepDefinition{ { ID: "notstreams@1.0.0", @@ -53,7 +56,7 @@ func TestCompute(t *testing.T) { }, Actions: []sdk.StepDefinition{ { - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: "Compute", Inputs: sdk.StepInputs{ Mapping: map[string]any{"Arg0": "$(trigger.outputs)"}, @@ -86,6 +89,7 @@ func TestCompute(t *testing.T) { "encoder": ocr3.EncoderEVM, "encoder_config": ocr3.EncoderConfig{}, "report_id": "0001", + "key_id": "evm", }, CapabilityType: capabilities.CapabilityTypeConsensus, }, @@ -128,13 +132,75 @@ func TestCompute(t *testing.T) { assert.Equal(t, expected, computed.Value) }) + + t.Run("compute supports passing in config via a struct", func(t *testing.T) { + computeFn := func(_ sdk.Runtime, config ComputeConfig, inputs basictrigger.TriggerOutputs) (ComputeOutput, error) { + return ComputeOutput{ + MySecret: string(config.Fidelity), + }, nil + } + conf := ComputeConfig{Fidelity: sdk.Secret("fidelity")} + workflow := createComputeWithConfigWorkflow( + conf, + computeFn, + ) + _, err := workflow.Spec() + require.NoError(t, err) + + fn := workflow.GetFn("Compute") + require.NotNil(t, fn) + + mc, err := values.WrapMap(conf) + require.NoError(t, err) + + req := capabilities.CapabilityRequest{Inputs: nsf, Config: mc} + actual, err := fn(&testutils.NoopRuntime{}, req) + require.NoError(t, err) + + expected, err := computeFn(nil, conf, basictrigger.TriggerOutputs{}) + require.NoError(t, err) + + uw, _ := actual.Value.Unwrap() + fmt.Printf("%+v", uw) + + computed := &sdk.ComputeOutput[ComputeOutput]{} + err = actual.Value.UnwrapTo(computed) + require.NoError(t, err) + + assert.Equal(t, expected, computed.Value) + }) +} + +type ComputeConfig struct { + Fidelity sdk.SecretValue +} + +type ComputeOutput struct { + MySecret string +} + +func createComputeWithConfigWorkflow(config ComputeConfig, fn func(_ sdk.Runtime, config ComputeConfig, input basictrigger.TriggerOutputs) (ComputeOutput, error)) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory() + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + cc := &sdk.ComputeConfig[ComputeConfig]{ + Config: config, + } + sdk.Compute1WithConfig( + workflow, + "Compute", + cc, + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + fn, + ) + + return workflow } func createWorkflow(fn func(_ sdk.Runtime, inputFeed notstreams.Feed) ([]streams.Feed, error)) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{ - Owner: "owner", - Name: "name", - }) + workflow := sdk.NewWorkflowSpecFactory() trigger := notstreams.TriggerConfig{MaxFrequencyMs: 5000}.New(workflow) computed := sdk.Compute1(workflow, "Compute", sdk.Compute1Inputs[notstreams.Feed]{Arg0: trigger}, fn) @@ -153,6 +219,7 @@ func createWorkflow(fn func(_ sdk.Runtime, inputFeed notstreams.Feed) ([]streams Encoder: ocr3.EncoderEVM, EncoderConfig: ocr3.EncoderConfig{}, ReportId: "0001", + KeyId: "evm", }.New(workflow, "data-feeds-report", ocr3.DataFeedsConsensusInput{ Observations: computed.Value(), }) diff --git a/pkg/workflows/sdk/gen/compute.go.tmpl b/pkg/workflows/sdk/gen/compute.go.tmpl index c944ca2fa..00c0ffc29 100644 --- a/pkg/workflows/sdk/gen/compute.go.tmpl +++ b/pkg/workflows/sdk/gen/compute.go.tmpl @@ -31,16 +31,26 @@ func (input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}]) ToSteps() } func Compute{{.}}[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSpecFactory, ref string, input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}], compute func(Runtime, {{range RangeNum . }}I{{.}},{{ end }})(O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, {{range RangeNum .}}i{{.}} I{{.}},{{end}}) (O, error) { + return compute(r, {{range RangeNum .}}i{{.}},{{end}}) + } + return Compute{{.}}WithConfig[{{range RangeNum .}}I{{.}}, {{ end }}O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute{{.}}WithConfig[{{range RangeNum .}}I{{.}} any, {{ end }}O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}], compute func(Runtime, C, {{range RangeNum . }}I{{.}},{{ end }})(O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", + ID: "custom-compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, - } + } capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { var inputs runtime{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}] @@ -60,7 +70,15 @@ func Compute{{.}}[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSp return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, {{range RangeNum . }}inputs.Arg{{.}},{{ end }}) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, {{range RangeNum . }}inputs.Arg{{.}},{{ end }}) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -80,4 +98,56 @@ func Compute{{.}}[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSp w.fns[ref] = capFn return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} } + +// Compute{{.}}WithMetadata DO NOT USE, this functions is for internal local testing while other tools are being developed and is temporary +func Compute{{.}}WithMetadata[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSpecFactory, ref string, input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}], compute func(Runtime, {{range RangeNum . }}I{{.}},{{ end }} capabilities.RequestMetadata) (O, error)) ComputeOutputCap[O] { + def := StepDefinition{ + ID: "custom-compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: map[string]any{ + "config": "$(ENV.config)", + "binary": "$(ENV.binary)", + }, + CapabilityType: capabilities.CapabilityTypeAction, + } + + capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + var inputs runtime{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}] + if err := request.Inputs.UnwrapTo(&inputs); err != nil { + return capabilities.CapabilityResponse{}, err + } + + // verify against any schema by marshalling and unmarshalling + ji, err := json.Marshal(inputs) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + // use a temp variable to unmarshal to avoid type loss if the inputs has an any in it + var tmp runtime{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}] + if err := json.Unmarshal(ji, &tmp); err != nil { + return capabilities.CapabilityResponse{}, err + } + + output, err := compute(runtime, {{range RangeNum . }}inputs.Arg{{.}},{{ end }} request.Metadata) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + computeOutput := ComputeOutput[O]{Value: output} + wrapped, err := values.CreateMapFromStruct(computeOutput) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + + return capabilities.CapabilityResponse{Value: wrapped}, nil + } + + if w.fns == nil { + w.fns = map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error){} + } + w.fns[ref] = capFn + return &computeOutputCap[O]{(&Step[ComputeOutput[O]]{Definition: def}).AddTo(w)} +} {{- end }} diff --git a/pkg/workflows/sdk/runtime.go b/pkg/workflows/sdk/runtime.go index de254acaf..d6403717e 100644 --- a/pkg/workflows/sdk/runtime.go +++ b/pkg/workflows/sdk/runtime.go @@ -7,9 +7,22 @@ import ( var BreakErr = capabilities.ErrStopExecution +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(string) error + + // With sets the labels for the message to be emitted. Labels are passed as key-value pairs + // and are cumulative. + With(kvs ...string) MessageEmitter +} + +// Guest interface type Runtime interface { Logger() logger.Logger Fetch(req FetchRequest) (FetchResponse, error) + + // Emitter sends the given message and labels to the configured collector. + Emitter() MessageEmitter } type FetchRequest struct { diff --git a/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go b/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go index c6a9d5be9..c9bececc6 100644 --- a/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go +++ b/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go @@ -19,7 +19,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) TriggerOutputsCap { } step := sdk.Step[TriggerOutputs]{Definition: def} - return TriggerOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return TriggerOutputsWrapper(raw) +} + +// TriggerOutputsWrapper allows access to field from an sdk.CapDefinition[TriggerOutputs] +func TriggerOutputsWrapper(raw sdk.CapDefinition[TriggerOutputs]) TriggerOutputsCap { + wrapped, ok := raw.(TriggerOutputsCap) + if ok { + return wrapped + } + return &triggerOutputsCap{CapDefinition: raw} } type TriggerOutputsCap interface { @@ -28,21 +38,19 @@ type TriggerOutputsCap interface { private() } -// TriggerOutputsCapFromStep should only be called from generated code to assure type safety -func TriggerOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[TriggerOutputs]) TriggerOutputsCap { - raw := step.AddTo(w) - return &triggerOutputs{CapDefinition: raw} -} - -type triggerOutputs struct { +type triggerOutputsCap struct { sdk.CapDefinition[TriggerOutputs] } -func (*triggerOutputs) private() {} -func (c *triggerOutputs) CoolOutput() sdk.CapDefinition[[]string] { +func (*triggerOutputsCap) private() {} +func (c *triggerOutputsCap) CoolOutput() sdk.CapDefinition[[]string] { return sdk.AccessField[TriggerOutputs, []string](c.CapDefinition, "cool_output") } +func ConstantTriggerOutputs(value TriggerOutputs) TriggerOutputsCap { + return &triggerOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewTriggerOutputsFromFields( coolOutput sdk.CapDefinition[[]string]) TriggerOutputsCap { return &simpleTriggerOutputs{ diff --git a/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go b/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go index 4244079d4..42998133c 100644 --- a/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go +++ b/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go @@ -19,7 +19,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) FeedCap { } step := sdk.Step[Feed]{Definition: def} - return FeedCapFromStep(w, step) + raw := step.AddTo(w) + return FeedWrapper(raw) +} + +// FeedWrapper allows access to field from an sdk.CapDefinition[Feed] +func FeedWrapper(raw sdk.CapDefinition[Feed]) FeedCap { + wrapped, ok := raw.(FeedCap) + if ok { + return wrapped + } + return &feedCap{CapDefinition: raw} } type FeedCap interface { @@ -30,27 +40,25 @@ type FeedCap interface { private() } -// FeedCapFromStep should only be called from generated code to assure type safety -func FeedCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[Feed]) FeedCap { - raw := step.AddTo(w) - return &feed{CapDefinition: raw} -} - -type feed struct { +type feedCap struct { sdk.CapDefinition[Feed] } -func (*feed) private() {} -func (c *feed) Metadata() SignerMetadataCap { - return &signerMetadata{CapDefinition: sdk.AccessField[Feed, SignerMetadata](c.CapDefinition, "Metadata")} +func (*feedCap) private() {} +func (c *feedCap) Metadata() SignerMetadataCap { + return SignerMetadataWrapper(sdk.AccessField[Feed, SignerMetadata](c.CapDefinition, "Metadata")) } -func (c *feed) Payload() FeedReportCap { - return &feedReport{CapDefinition: sdk.AccessField[Feed, FeedReport](c.CapDefinition, "Payload")} +func (c *feedCap) Payload() FeedReportCap { + return FeedReportWrapper(sdk.AccessField[Feed, FeedReport](c.CapDefinition, "Payload")) } -func (c *feed) Timestamp() sdk.CapDefinition[int64] { +func (c *feedCap) Timestamp() sdk.CapDefinition[int64] { return sdk.AccessField[Feed, int64](c.CapDefinition, "Timestamp") } +func ConstantFeed(value Feed) FeedCap { + return &feedCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedFromFields( metadata SignerMetadataCap, payload FeedReportCap, @@ -86,6 +94,15 @@ func (c *simpleFeed) Timestamp() sdk.CapDefinition[int64] { func (c *simpleFeed) private() {} +// FeedReportWrapper allows access to field from an sdk.CapDefinition[FeedReport] +func FeedReportWrapper(raw sdk.CapDefinition[FeedReport]) FeedReportCap { + wrapped, ok := raw.(FeedReportCap) + if ok { + return wrapped + } + return &feedReportCap{CapDefinition: raw} +} + type FeedReportCap interface { sdk.CapDefinition[FeedReport] BuyPrice() sdk.CapDefinition[[]uint8] @@ -97,36 +114,34 @@ type FeedReportCap interface { private() } -// FeedReportCapFromStep should only be called from generated code to assure type safety -func FeedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedReport]) FeedReportCap { - raw := step.AddTo(w) - return &feedReport{CapDefinition: raw} -} - -type feedReport struct { +type feedReportCap struct { sdk.CapDefinition[FeedReport] } -func (*feedReport) private() {} -func (c *feedReport) BuyPrice() sdk.CapDefinition[[]uint8] { +func (*feedReportCap) private() {} +func (c *feedReportCap) BuyPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "BuyPrice") } -func (c *feedReport) FullReport() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) FullReport() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "FullReport") } -func (c *feedReport) ObservationTimestamp() sdk.CapDefinition[int64] { +func (c *feedReportCap) ObservationTimestamp() sdk.CapDefinition[int64] { return sdk.AccessField[FeedReport, int64](c.CapDefinition, "ObservationTimestamp") } -func (c *feedReport) ReportContext() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) ReportContext() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "ReportContext") } -func (c *feedReport) SellPrice() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) SellPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "SellPrice") } -func (c *feedReport) Signature() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) Signature() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "Signature") } +func ConstantFeedReport(value FeedReport) FeedReportCap { + return &feedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedReportFromFields( buyPrice sdk.CapDefinition[[]uint8], fullReport sdk.CapDefinition[[]uint8], @@ -183,27 +198,34 @@ func (c *simpleFeedReport) Signature() sdk.CapDefinition[[]uint8] { func (c *simpleFeedReport) private() {} +// SignerMetadataWrapper allows access to field from an sdk.CapDefinition[SignerMetadata] +func SignerMetadataWrapper(raw sdk.CapDefinition[SignerMetadata]) SignerMetadataCap { + wrapped, ok := raw.(SignerMetadataCap) + if ok { + return wrapped + } + return &signerMetadataCap{CapDefinition: raw} +} + type SignerMetadataCap interface { sdk.CapDefinition[SignerMetadata] Signer() sdk.CapDefinition[string] private() } -// SignerMetadataCapFromStep should only be called from generated code to assure type safety -func SignerMetadataCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignerMetadata]) SignerMetadataCap { - raw := step.AddTo(w) - return &signerMetadata{CapDefinition: raw} -} - -type signerMetadata struct { +type signerMetadataCap struct { sdk.CapDefinition[SignerMetadata] } -func (*signerMetadata) private() {} -func (c *signerMetadata) Signer() sdk.CapDefinition[string] { +func (*signerMetadataCap) private() {} +func (c *signerMetadataCap) Signer() sdk.CapDefinition[string] { return sdk.AccessField[SignerMetadata, string](c.CapDefinition, "Signer") } +func ConstantSignerMetadata(value SignerMetadata) SignerMetadataCap { + return &signerMetadataCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignerMetadataFromFields( signer sdk.CapDefinition[string]) SignerMetadataCap { return &simpleSignerMetadata{ diff --git a/pkg/workflows/sdk/testdata/fixtures/workflows/expected_sepolia.yaml b/pkg/workflows/sdk/testdata/fixtures/workflows/expected_sepolia.yaml index fbfaaac2b..b549145c0 100644 --- a/pkg/workflows/sdk/testdata/fixtures/workflows/expected_sepolia.yaml +++ b/pkg/workflows/sdk/testdata/fixtures/workflows/expected_sepolia.yaml @@ -1,8 +1,6 @@ # At the time of writing, this was taken form the staging deployment # trigger ref was added so it can match the way it's done by the builder. It's implied as trigger today, and not harmful to add. # One of the heartbeat and deviation values were modified so that the defaults example can be shown to work. -name: "ccipethsep" -owner: "0x00000000000000000000000000000000000000aa" triggers: - id: "streams-trigger@1.0.0" ref: "trigger" @@ -28,6 +26,7 @@ consensus: - "$(trigger.outputs)" config: report_id: "0001" + key_id: 'evm' aggregation_method: "data_feeds" aggregation_config: allowedPartialStaleness: "0.5" @@ -75,4 +74,4 @@ targets: config: address: "0xE0082363396985ae2FdcC3a9F816A586Eed88416" deltaStage: "45s" - schedule: "oneAtATime" \ No newline at end of file + schedule: "oneAtATime" diff --git a/pkg/workflows/sdk/testdata/fixtures/workflows/notstreamssepolia.yaml b/pkg/workflows/sdk/testdata/fixtures/workflows/notstreamssepolia.yaml index 58f1abd60..2c0cf3556 100644 --- a/pkg/workflows/sdk/testdata/fixtures/workflows/notstreamssepolia.yaml +++ b/pkg/workflows/sdk/testdata/fixtures/workflows/notstreamssepolia.yaml @@ -1,6 +1,3 @@ -workflow: - name: notccipethsep - owner: '0x00000000000000000000000000000000000000aa' not_stream: maxFrequencyMs: 5000 ocr: @@ -9,6 +6,7 @@ ocr: deviation: '0.5' heartbeat: 3600 report_id: '0001' + key_id: 'evm' encoder: EVM encoder_config: Abi: "(bytes32 FeedID, uint224 Price, uint32 Timestamp)[] Reports" diff --git a/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia.yaml b/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia.yaml index 244597be1..720c08845 100644 --- a/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia.yaml +++ b/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia.yaml @@ -1,6 +1,3 @@ -workflow: - name: ccipethsep - owner: '0x00000000000000000000000000000000000000aa' streams: feedIds: - '0x0003fbba4fce42f65d6032b18aee53efdf526cc734ad296cb57565979d883bdd' @@ -52,6 +49,7 @@ ocr: deviation: '0.05' heartbeat: 3600 report_id: '0001' + key_id: 'evm' encoder: EVM encoder_config: abi: "(bytes32 FeedID, uint224 Price, uint32 Timestamp)[] Reports" diff --git a/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia_defaults.yaml b/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia_defaults.yaml index e0d0b438d..84019bdc8 100644 --- a/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia_defaults.yaml +++ b/pkg/workflows/sdk/testdata/fixtures/workflows/sepolia_defaults.yaml @@ -1,6 +1,3 @@ -workflow: - name: ccipethsep - owner: '0x00000000000000000000000000000000000000aa' maxFrequencyMs: 5000 default_heartbeat: 3600 default_deviation: '0.05' @@ -20,6 +17,7 @@ feed_info: - feedId: '0x00036c962c10ac4e0aafae5021c99c4f5adb503c0cb76cfddd8d17efe3098f67' - feedId: '0x0003010b9d4a4740f554eeea1e6a2cf1e60189a3d8cf7ced7a03ce050df30076' report_id: '0001' +key_id: 'evm' encoder: EVM encoder_config: abi: "(bytes32 FeedID, uint224 Price, uint32 Timestamp)[] Reports" @@ -28,4 +26,4 @@ chainWriter: deltaStage: 45s schedule: oneAtATime targetChain: 'write_ethereum-testnet-sepolia@1.0.0' -allowedPartialStaleness: '0.5' \ No newline at end of file +allowedPartialStaleness: '0.5' diff --git a/pkg/workflows/sdk/testutils/compute_capability.go b/pkg/workflows/sdk/testutils/compute_capability.go index 3e9932acb..e89ab9777 100644 --- a/pkg/workflows/sdk/testutils/compute_capability.go +++ b/pkg/workflows/sdk/testutils/compute_capability.go @@ -14,7 +14,7 @@ type computeCapability struct { func (c *computeCapability) Info(ctx context.Context) (capabilities.CapabilityInfo, error) { info := capabilities.MustNewCapabilityInfo( - "custom_compute@1.0.0", capabilities.CapabilityTypeAction, "Custom compute capability", + "custom-compute@1.0.0", capabilities.CapabilityTypeAction, "Custom compute capability", ) info.IsLocal = true return info, nil diff --git a/pkg/workflows/sdk/testutils/runner.go b/pkg/workflows/sdk/testutils/runner.go index aa3c7c35a..390ff233a 100644 --- a/pkg/workflows/sdk/testutils/runner.go +++ b/pkg/workflows/sdk/testutils/runner.go @@ -27,6 +27,7 @@ func NewRunner(ctx context.Context) *Runner { type Runner struct { RawConfig []byte + Secrets map[string]string // Context is held in this runner because it's for testing and capability calls are made by it. // The real SDK implementation will be for the WASM guest and will make host calls, and callbacks to the program. // nolint @@ -187,7 +188,17 @@ func (r *Runner) walk(spec sdk.WorkflowSpec, ref string) error { } func (r *Runner) buildRequest(spec sdk.WorkflowSpec, capability sdk.StepDefinition) (capabilities.CapabilityRequest, error) { - conf, err := values.NewMap(capability.Config) + env := exec.Env{ + Config: r.RawConfig, + Binary: []byte{}, + Secrets: r.Secrets, + } + config, err := exec.FindAndInterpolateEnvVars(capability.Config, env) + if err != nil { + return capabilities.CapabilityRequest{}, err + } + + conf, err := values.NewMap(config.(map[string]any)) if err != nil { return capabilities.CapabilityRequest{}, err } diff --git a/pkg/workflows/sdk/testutils/runner_test.go b/pkg/workflows/sdk/testutils/runner_test.go index 555e1f5c7..b9ec9c2c0 100644 --- a/pkg/workflows/sdk/testutils/runner_test.go +++ b/pkg/workflows/sdk/testutils/runner_test.go @@ -64,7 +64,7 @@ func TestRunner(t *testing.T) { }) t.Run("Run allows hard-coded values", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "tester", Owner: "ryan"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "trigger", Number: 100}.New(workflow) hardCodedInput := basicaction.NewActionOutputsFromFields(sdk.ConstantDefinition("hard-coded")) tTransform := sdk.Compute2[basictrigger.TriggerOutputs, basicaction.ActionOutputs, bool]( @@ -255,9 +255,13 @@ func TestRunner(t *testing.T) { }) } +type ComputeConfig struct { + Fidelity sdk.SecretValue +} + func TestCompute(t *testing.T) { t.Run("Inputs don't loose integer types when any is deserialized to", func(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "foo", Number: 100}.New(workflow) toMap := sdk.Compute1(workflow, "tomap", sdk.Compute1Inputs[string]{Arg0: trigger.CoolOutput()}, func(runtime sdk.Runtime, i0 string) (map[string]any, error) { v, err := strconv.Atoi(i0) @@ -286,10 +290,38 @@ func TestCompute(t *testing.T) { require.NoError(t, runner.Err()) }) + + t.Run("Config interpolates secrets", func(t *testing.T) { + workflow := sdk.NewWorkflowSpecFactory() + trigger := basictrigger.TriggerConfig{Name: "foo", Number: 100}.New(workflow) + + conf := ComputeConfig{ + Fidelity: sdk.Secret("fidelity"), + } + var gotC ComputeConfig + sdk.Compute1WithConfig(workflow, "tomap", &sdk.ComputeConfig[ComputeConfig]{Config: conf}, sdk.Compute1Inputs[string]{Arg0: trigger.CoolOutput()}, func(runtime sdk.Runtime, c ComputeConfig, i0 string) (ComputeConfig, error) { + gotC = c + return c, nil + }) + + runner := testutils.NewRunner(tests.Context(t)) + secretToken := "superSuperSecretToken" + runner.Secrets = map[string]string{ + "fidelity": secretToken, + } + basictriggertest.Trigger(runner, func() (basictrigger.TriggerOutputs, error) { + return basictrigger.TriggerOutputs{CoolOutput: "100"}, nil + }) + + runner.Run(workflow) + + require.NoError(t, runner.Err()) + assert.Equal(t, gotC.Fidelity, sdk.SecretValue(secretToken)) + }) } func registrationWorkflow() (*sdk.WorkflowSpecFactory, map[string]any, map[string]any) { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "tester", Owner: "ryan"}) + workflow := sdk.NewWorkflowSpecFactory() testTriggerConfig := map[string]any{"something": "from nothing"} trigger := sdk.Step[int]{ Definition: sdk.StepDefinition{ @@ -337,7 +369,7 @@ func setupAllRunnerMocks(t *testing.T, runner *testutils.Runner) (*testutils.Tri type actionTransform func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) func createBasicTestWorkflow(actionTransform actionTransform) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "tester", Owner: "ryan"}) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "trigger", Number: 100}.New(workflow) tTransform := sdk.Compute1[basictrigger.TriggerOutputs, bool]( workflow, diff --git a/pkg/workflows/sdk/testutils/runtime.go b/pkg/workflows/sdk/testutils/runtime.go index 5ae962663..8234b77b1 100644 --- a/pkg/workflows/sdk/testutils/runtime.go +++ b/pkg/workflows/sdk/testutils/runtime.go @@ -17,3 +17,7 @@ func (nr *NoopRuntime) Logger() logger.Logger { l, _ := logger.New() return l } + +func (nr *NoopRuntime) Emitter() sdk.MessageEmitter { + return nil +} diff --git a/pkg/workflows/secrets/secrets.go b/pkg/workflows/secrets/secrets.go new file mode 100644 index 000000000..830512eba --- /dev/null +++ b/pkg/workflows/secrets/secrets.go @@ -0,0 +1,206 @@ +package secrets + +import ( + "crypto/rand" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "strings" + + "golang.org/x/crypto/nacl/box" +) + +// this matches the secrets config file by the users, see the secretsConfig.yaml file +type SecretsConfig struct { + SecretsNames map[string][]string `yaml:"secretsNames"` +} + +// this is the payload that will be encrypted +type SecretPayloadToEncrypt struct { + WorkflowOwner string `json:"workflowOwner"` + Secrets map[string]string `json:"secrets"` +} + +// this holds the mapping of secret name (e.g. API_KEY) to the local environment variable name which points to the raw secret +type AssignedSecrets struct { + WorkflowSecretName string `json:"workflowSecretName"` + LocalEnvVarName string `json:"localEnvVarName"` +} + +// this is the metadata that will be stored in the encrypted secrets file +type Metadata struct { + WorkflowOwner string `json:"workflowOwner"` + CapabilitiesRegistry string `json:"capabilitiesRegistry"` + DonId string `json:"donId"` + DateEncrypted string `json:"dateEncrypted"` + NodePublicEncryptionKeys map[string]string `json:"nodePublicEncryptionKeys"` + EnvVarsAssignedToNodes map[string][]AssignedSecrets `json:"envVarsAssignedToNodes"` +} + +// this is the result of the encryption, will be used by the DON +type EncryptedSecretsResult struct { + EncryptedSecrets map[string]string `json:"encryptedSecrets"` + Metadata Metadata `json:"metadata"` +} + +func ContainsP2pId(p2pId [32]byte, p2pIds [][32]byte) bool { + for _, id := range p2pIds { + if id == p2pId { + return true + } + } + return false +} + +func EncryptSecretsForNodes( + workflowOwner string, + secrets map[string][]string, + encryptionPublicKeys map[string][32]byte, // map of p2pIds to the node's CSA (Ed25519) key. + config SecretsConfig, +) (map[string]string, map[string][]AssignedSecrets, error) { + encryptedSecrets := make(map[string]string) + secretsEnvVarsByNode := make(map[string][]AssignedSecrets) // Only used for metadata + i := 0 + + // Encrypt secrets for each node + for p2pId, encryptionPublicKey := range encryptionPublicKeys { + secretsPayload := SecretPayloadToEncrypt{ + WorkflowOwner: workflowOwner, + Secrets: make(map[string]string), + } + + for secretName, secretValues := range secrets { + // Assign secrets to nodes in a round-robin fashion + secretValue := secretValues[i%len(secretValues)] + secretsPayload.Secrets[secretName] = secretValue + } + + // Marshal the secrets payload into JSON + secretsJSON, err := json.Marshal(secretsPayload) + if err != nil { + return nil, nil, err + } + + // Encrypt secrets payload + encrypted, err := box.SealAnonymous(nil, secretsJSON, &encryptionPublicKey, rand.Reader) + if err != nil { + return nil, nil, err + } + encryptedSecrets[p2pId] = base64.StdEncoding.EncodeToString(encrypted) + + // Generate metadata showing which nodes were assigned which environment variables + for secretName, envVarNames := range config.SecretsNames { + secretsEnvVarsByNode[p2pId] = append(secretsEnvVarsByNode[p2pId], AssignedSecrets{ + WorkflowSecretName: secretName, + LocalEnvVarName: envVarNames[i%len(envVarNames)], + }) + } + + i++ + } + + return encryptedSecrets, secretsEnvVarsByNode, nil +} + +type X25519Key interface { + Decrypt(box []byte) ([]byte, error) + PublicKey() [32]byte + PublicKeyString() string +} + +func DecryptSecretsForNode( + result EncryptedSecretsResult, + key X25519Key, + workflowOwner string, +) (map[string]string, error) { + var foundP2pId string + for p2pId, pubKey := range result.Metadata.NodePublicEncryptionKeys { + if pubKey == key.PublicKeyString() { + foundP2pId = p2pId + break + } + } + + if foundP2pId == "" { + return nil, fmt.Errorf("cannot find public key %s in nodePublicEncryptionKeys list", key.PublicKeyString()) + } + + bundle, ok := result.EncryptedSecrets[foundP2pId] + if !ok { + return nil, fmt.Errorf("cannot find secrets blob for node with public key %s", key.PublicKeyString()) + } + + bundleBytes, err := base64.StdEncoding.DecodeString(bundle) + if err != nil { + return nil, fmt.Errorf("cannot base64 decode bundle into bytes: %w", err) + } + + payloadBytes, err := key.Decrypt(bundleBytes) + if err != nil { + return nil, fmt.Errorf("cannot decrypt box: %w", err) + } + + var payload SecretPayloadToEncrypt + err = json.Unmarshal(payloadBytes, &payload) + if err != nil { + return nil, err + } + + if normalizeOwner(payload.WorkflowOwner) != normalizeOwner(workflowOwner) { + return nil, fmt.Errorf("invalid secrets bundle: got owner %s, expected %s", payload.WorkflowOwner, workflowOwner) + } + + return payload.Secrets, nil +} + +func normalizeOwner(owner string) string { + o := owner + if strings.HasPrefix(o, "0x") { + o = o[2:] + } + + o = strings.ToLower(o) + return o +} + +func ValidateEncryptedSecrets(secretsData []byte, encryptionPublicKeys map[string][32]byte, workflowOwner string) error { + var encryptedSecrets EncryptedSecretsResult + err := json.Unmarshal(secretsData, &encryptedSecrets) + if err != nil { + return fmt.Errorf("failed to parse encrypted secrets JSON: %w", err) + } + + if encryptedSecrets.Metadata.WorkflowOwner != workflowOwner { + return fmt.Errorf("the workflow owner in the encrypted secrets metadata: %s does not match the input workflow owner: %s", encryptedSecrets.Metadata.WorkflowOwner, workflowOwner) + } + + // Verify that the encryptedSecrets values are all valid base64 strings + for _, encryptedSecret := range encryptedSecrets.EncryptedSecrets { + _, err := base64.StdEncoding.DecodeString(encryptedSecret) + if err != nil { + return fmt.Errorf("the encrypted secrets JSON payload contains encrypted secrets which are not in base64 format: %w", err) + } + } + + // Check that the p2pIds keys in encryptedSecrets.EncryptedSecrets match the keys in encryptionPublicKeys + for p2pId := range encryptedSecrets.Metadata.NodePublicEncryptionKeys { + if _, ok := encryptedSecrets.EncryptedSecrets[p2pId]; !ok { + return fmt.Errorf("no encrypted secret found for node with p2pId: %s. Ensure secrets have been correctly encrypted for this DON", p2pId) + } + } + + // Check that the encryptionPublicKey values in the encryptedSecrets metadata match the keys in encryptionPublicKeys + for p2pId, keyFromMetadata := range encryptedSecrets.Metadata.NodePublicEncryptionKeys { + encryptionPublicKey, ok := encryptionPublicKeys[p2pId] + if !ok { + return fmt.Errorf("encryption key not found for node with p2pId: %s. Ensure secrets have been correctly encrypted for this DON", p2pId) + } + + if keyFromMetadata != hex.EncodeToString(encryptionPublicKey[:]) { + return fmt.Errorf("the encryption public key in the encrypted secrets metadata does not match the one in the workflow registry. Ensure secrets have been correctly encrypted for this DON") + } + } + + return nil +} diff --git a/pkg/workflows/secrets/secrets_test.go b/pkg/workflows/secrets/secrets_test.go new file mode 100644 index 000000000..bf346b69c --- /dev/null +++ b/pkg/workflows/secrets/secrets_test.go @@ -0,0 +1,288 @@ +package secrets + +import ( + "crypto/rand" + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/crypto/nacl/box" +) + +// Mock data for testing, see JSON in https://gist.github.com/shileiwill/c077b31193f3f1a124bf4b046a464bf5 +var ( + encryptionPublicKeys = map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {17, 65, 221, 30, 70, 121, 124, 237, 155, 15, 186, 212, 145, 21, 241, 133, 7, 246, 246, 230, 227, 204, 134, 231, 229, 186, 22, 158, 88, 100, 90, 220}, + "147d5cc651819b093cd2fdff9760f0f0f77b7ef7798d9e24fc6a350b7300e5d9": {65, 45, 198, 254, 72, 234, 78, 52, 186, 170, 119, 218, 46, 59, 3, 45, 57, 185, 56, 89, 123, 111, 61, 97, 254, 126, 209, 131, 168, 39, 164, 49}, + "2934f31f278e5c60618f85861bd6add54a4525d79a642019bdc87d75d26372c3": {40, 185, 17, 67, 236, 145, 17, 121, 106, 125, 99, 225, 76, 28, 246, 187, 1, 180, 237, 89, 102, 122, 181, 79, 91, 199, 46, 190, 73, 200, 129, 190}, + "298834a041a056df58c839cb53d99b78558693042e54dff238f504f16d18d4b6": {72, 121, 1, 224, 192, 169, 211, 198, 110, 124, 252, 80, 243, 169, 227, 205, 191, 223, 27, 1, 7, 39, 61, 115, 217, 74, 145, 210, 120, 84, 85, 22}, + "5f247f61a6d5bfdd1d5064db0bd25fe443648133c6131975edb23481424e3d9c": {122, 22, 111, 188, 129, 110, 180, 164, 220, 182, 32, 209, 28, 60, 202, 197, 192, 133, 213, 107, 25, 114, 55, 65, 0, 17, 111, 135, 97, 157, 235, 184}, + "77224be9d052343b1d17156a1e463625c0d746468d4f5a44cddd452365b1d4ed": {7, 224, 255, 197, 123, 98, 99, 96, 77, 245, 23, 185, 75, 217, 134, 22, 148, 81, 163, 201, 6, 0, 168, 85, 187, 25, 33, 45, 197, 117, 222, 84}, + "adb6bf005cdb23f21e11b82d66b9f62628c2939640ed93028bf0dad3923c5a8b": {64, 59, 114, 240, 177, 179, 181, 245, 169, 27, 207, 237, 183, 242, 133, 153, 118, 117, 2, 160, 75, 91, 126, 6, 127, 207, 55, 130, 226, 62, 235, 156}, + "b96933429b1a81c811e1195389d7733e936b03e8086e75ea1fa92c61564b6c31": {117, 172, 99, 252, 151, 163, 30, 49, 22, 128, 132, 224, 222, 140, 205, 43, 234, 144, 5, 155, 96, 157, 150, 47, 62, 67, 252, 41, 108, 219, 162, 141}, + "d7e9f2252b09edf0802a65b60bc9956691747894cb3ab9fefd072adf742eb9f1": {180, 115, 9, 31, 225, 212, 219, 188, 38, 173, 113, 198, 123, 68, 50, 248, 244, 40, 14, 6, 186, 181, 226, 18, 42, 146, 244, 171, 139, 111, 242, 245}, + "e38c9f2760db006f070e9cc1bc1c2269ad033751adaa85d022fb760cbc5b5ef6": {69, 66, 244, 253, 46, 209, 80, 200, 201, 118, 179, 152, 2, 254, 61, 153, 74, 236, 58, 201, 79, 209, 30, 120, 23, 246, 147, 177, 201, 161, 218, 187}, + } + secrets = map[string][]string{ + "SECRET_A": {"one", "two", "three", "four"}, + "SECRET_B": {"all"}, + } + workflowOwner = "0xFbb30BD8E9D779044c3c30dd82e52a5FA1573388" + config = SecretsConfig{ + SecretsNames: map[string][]string{ + "SECRET_A": {"ENV_VAR_A_FOR_NODE_ONE", "ENV_VAR_A_FOR_NODE_TWO", "ENV_VAR_A_FOR_NODE_THREE", "ENV_VAR_A_FOR_NODE_FOUR"}, + "SECRET_B": {"ENV_VAR_B_FOR_ALL_NODES"}, + }, + } +) + +func TestEncryptSecretsForNodes(t *testing.T) { + encryptedSecrets, secretsEnvVarsByNode, err := EncryptSecretsForNodes(workflowOwner, secrets, encryptionPublicKeys, config) + // Ensure no error occurred + assert.NoError(t, err) + + // Ensure all p2pKeys are in encryptedSecrets map + assert.Equal(t, len(encryptionPublicKeys), len(encryptedSecrets)) + for p2pId := range encryptionPublicKeys { + _, exists := encryptedSecrets[p2pId] + assert.True(t, exists, "p2pId %s not found in encryptedSecrets", p2pId) + } + + // In envVarsAssignedToNodes, ensure SECRET_B has ENV_VAR_B_FOR_ALL_NODES for all nodes + for _, assignedSecrets := range secretsEnvVarsByNode { + for _, assignedSecret := range assignedSecrets { + if assignedSecret.WorkflowSecretName == "SECRET_B" { + assert.Contains(t, assignedSecret.LocalEnvVarName, "ENV_VAR_B_FOR_ALL_NODES") + } + } + } + + // In envVarsAssignedToNodes, ensure ENV_VAR_A_FOR_NODE_ONE and ENV_VAR_A_FOR_NODE_TWO shows up in 3 nodes and others in 2 nodes + nodeCount := make(map[string]int) + + for _, assignedSecrets := range secretsEnvVarsByNode { + for _, assignedSecret := range assignedSecrets { + nodeCount[assignedSecret.LocalEnvVarName]++ + } + } + + assert.Equal(t, 3, nodeCount["ENV_VAR_A_FOR_NODE_ONE"], "ENV_VAR_A_FOR_NODE_ONE should be assigned to 3 nodes") + assert.Equal(t, 3, nodeCount["ENV_VAR_A_FOR_NODE_TWO"], "ENV_VAR_A_FOR_NODE_TWO should be assigned to 3 nodes") + assert.Equal(t, 2, nodeCount["ENV_VAR_A_FOR_NODE_THREE"], "ENV_VAR_A_FOR_NODE_THREE should be assigned to 2 nodes") + assert.Equal(t, 2, nodeCount["ENV_VAR_A_FOR_NODE_FOUR"], "ENV_VAR_A_FOR_NODE_FOUR should be assigned to 2 nodes") +} + +type key struct { + publicKey *[32]byte + privateKey *[32]byte +} + +func (k *key) PublicKey() [32]byte { + return *k.publicKey +} + +func (k *key) PublicKeyString() string { + return base64.StdEncoding.EncodeToString((*k.publicKey)[:]) +} + +func (k *key) Decrypt(sealedBox []byte) ([]byte, error) { + b, ok := box.OpenAnonymous(nil, sealedBox, k.publicKey, k.privateKey) + if !ok { + return nil, errors.New("failed to decrypt box") + } + + return b, nil +} + +func newKey() (*key, error) { + pk, sk, err := box.GenerateKey(rand.Reader) + if err != nil { + return nil, err + } + + return &key{publicKey: pk, privateKey: sk}, nil +} + +func TestEncryptDecrypt(t *testing.T) { + k, err := newKey() + require.NoError(t, err) + + k2, err := newKey() + require.NoError(t, err) + + expectedSecrets := map[string]string{ + "foo": "fooToken", + "bar": "barToken", + } + secrets := map[string][]string{ + "foo": []string{expectedSecrets["foo"]}, + "bar": []string{expectedSecrets["bar"]}, + } + encryptionKeys := map[string][32]byte{ + "nodeAPeerID": k.PublicKey(), + "nodeBPeerID": k2.PublicKey(), + } + config := SecretsConfig{ + SecretsNames: map[string][]string{ + "foo": []string{"ENV_FOO"}, + "bar": []string{"ENV_BAR"}, + }, + } + + encryptedSecrets, _, err := EncryptSecretsForNodes(workflowOwner, secrets, encryptionKeys, config) + require.NoError(t, err) + + result := EncryptedSecretsResult{ + EncryptedSecrets: encryptedSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeAPeerID": k.PublicKeyString(), + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + t.Run("success", func(st *testing.T) { + gotSecrets, err := DecryptSecretsForNode(result, k, workflowOwner) + require.NoError(st, err) + + assert.Equal(st, expectedSecrets, gotSecrets) + + gotSecrets, err = DecryptSecretsForNode(result, k2, workflowOwner) + require.NoError(st, err) + + assert.Equal(st, expectedSecrets, gotSecrets) + }) + + t.Run("incorrect owner", func(st *testing.T) { + _, err = DecryptSecretsForNode(result, k, "wrong owner") + assert.ErrorContains(t, err, "invalid secrets bundle: got owner") + }) + + t.Run("owner without 0x prefix", func(st *testing.T) { + _, err = DecryptSecretsForNode(result, k, workflowOwner[2:]) + require.NoError(t, err) + }) + + t.Run("owner with lower casing", func(st *testing.T) { + _, err = DecryptSecretsForNode(result, k, strings.ToLower(workflowOwner)) + require.NoError(t, err) + }) + + t.Run("key not in metadata", func(st *testing.T) { + overriddenResult := EncryptedSecretsResult{ + EncryptedSecrets: encryptedSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + _, err = DecryptSecretsForNode(overriddenResult, k, workflowOwner) + assert.ErrorContains(t, err, "cannot find public key") + }) + + t.Run("missing secrets blob", func(st *testing.T) { + overriddenSecrets := map[string]string{ + "nodeAPeerID": encryptedSecrets["nodeAPeerID"], + } + overriddenResult := EncryptedSecretsResult{ + EncryptedSecrets: overriddenSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeAPeerID": k.PublicKeyString(), + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + _, err = DecryptSecretsForNode(overriddenResult, k2, workflowOwner) + assert.ErrorContains(t, err, "cannot find secrets blob") + }) + +} + +func TestValidateEncryptedSecrets(t *testing.T) { + // Helper function to generate a valid base64 encoded string + validBase64 := func(input string) string { + return base64.StdEncoding.EncodeToString([]byte(input)) + } + + // Define a key for testing + keyFromMetadata := [32]byte{1, 2, 3} + + // Valid JSON input with matching workflow owner + validInput := map[string]interface{}{ + "encryptedSecrets": map[string]string{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": validBase64("secret1"), + }, + "metadata": map[string]interface{}{ + "workflowOwner": "correctOwner", + "nodePublicEncryptionKeys": map[string]string{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": hex.EncodeToString(keyFromMetadata[:]), + }, + }, + } + + // Serialize the valid input + validData, _ := json.Marshal(validInput) + + // Define test cases + tests := []struct { + name string + inputData []byte + encryptionPublicKeys map[string][32]byte + workflowOwner string + shouldError bool + }{ + { + name: "Valid input", + inputData: validData, + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: false, + }, + { + name: "Invalid base64 encoded secret", + inputData: []byte(`{"encryptedSecrets": {"09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": "invalid-base64!"}}`), + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: true, + }, + { + name: "Missing public key", + inputData: validData, + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "some-other-id": {1, 2, 3}, + }, + shouldError: true, + }, + { + name: "Mismatched workflow owner", + inputData: validData, + workflowOwner: "incorrectOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: true, + }, + } + + // Run the test cases + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidateEncryptedSecrets(test.inputData, test.encryptionPublicKeys, test.workflowOwner) + if (err != nil) != test.shouldError { + t.Errorf("Expected error: %v, got: %v", test.shouldError, err != nil) + } + }) + } +} diff --git a/pkg/workflows/utils.go b/pkg/workflows/utils.go index 561b118df..2d6816c75 100644 --- a/pkg/workflows/utils.go +++ b/pkg/workflows/utils.go @@ -3,6 +3,7 @@ package workflows import ( "crypto/sha256" "encoding/hex" + "strings" ) func EncodeExecutionID(workflowID, eventID string) (string, error) { @@ -19,3 +20,67 @@ func EncodeExecutionID(workflowID, eventID string) (string, error) { return hex.EncodeToString(s.Sum(nil)), nil } + +func GenerateWorkflowIDFromStrings(owner string, name string, workflow []byte, config []byte, secretsURL string) (string, error) { + ownerWithoutPrefix := owner + if strings.HasPrefix(owner, "0x") { + ownerWithoutPrefix = owner[2:] + } + + ownerb, err := hex.DecodeString(ownerWithoutPrefix) + if err != nil { + return "", err + } + + wid, err := GenerateWorkflowID(ownerb, name, workflow, config, secretsURL) + if err != nil { + return "", err + } + + return hex.EncodeToString(wid[:]), nil +} + +var ( + versionByte = byte(0) +) + +func GenerateWorkflowID(owner []byte, name string, workflow []byte, config []byte, secretsURL string) ([32]byte, error) { + s := sha256.New() + _, err := s.Write(owner) + if err != nil { + return [32]byte{}, err + } + _, err = s.Write([]byte(name)) + if err != nil { + return [32]byte{}, err + } + _, err = s.Write(workflow) + if err != nil { + return [32]byte{}, err + } + _, err = s.Write([]byte(config)) + if err != nil { + return [32]byte{}, err + } + _, err = s.Write([]byte(secretsURL)) + if err != nil { + return [32]byte{}, err + } + + sha := [32]byte(s.Sum(nil)) + sha[0] = versionByte + + return sha, nil +} + +// HashTruncateName returns the SHA-256 hash of the workflow name truncated to the first 10 bytes. +func HashTruncateName(name string) [10]byte { + // Compute SHA-256 hash of the input string + hash := sha256.Sum256([]byte(name)) + + // Truncate the hash to 10 bytes + var result [10]byte + copy(result[:], hash[:10]) + + return result +} diff --git a/pkg/workflows/utils_test.go b/pkg/workflows/utils_test.go index e66e7ae33..ae506a7df 100644 --- a/pkg/workflows/utils_test.go +++ b/pkg/workflows/utils_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func Test_EncodeExecutionID(t *testing.T) { @@ -38,3 +39,63 @@ func Test_EncodeExecutionID(t *testing.T) { reversed := hex.EncodeToString(s.Sum(nil)) assert.NotEqual(t, reversed, actual) } + +func Test_GenerateWorkflowIDFromStrings(t *testing.T) { + // With prefix + owner := "0x26729408f179371be6433b9585d8427f121bfe82" + got, err := GenerateWorkflowIDFromStrings(owner, "porporpore", []byte("workflow"), []byte("config"), "http://mysecrets.com") + require.NoError(t, err) + assert.NotNil(t, got) + + // Always starts with the version byte + assert.Equal(t, got[:2], hex.EncodeToString([]byte{versionByte})) + + // Without prefix + owner = "26729408f179371be6433b9585d8427f121bfe82" + got, err = GenerateWorkflowIDFromStrings(owner, "porporpore", []byte("workflow"), []byte("config"), "http://mysecrets.com") + require.NoError(t, err) + assert.NotNil(t, got) + + // Very short; empty but with a prefix + owner = "0x" + got, err = GenerateWorkflowIDFromStrings(owner, "porporpore", []byte("workflow"), []byte("config"), "http://mysecrets.com") + require.NoError(t, err) + assert.NotNil(t, got) + + owner = "invalid" + _, err = GenerateWorkflowIDFromStrings(owner, "porporpore", []byte("workflow"), []byte("config"), "http://mysecrets.com") + assert.ErrorContains(t, err, "encoding/hex") +} + +func TestNormalizeWorkflowName(t *testing.T) { + tt := []struct { + input string + expected [10]byte + }{ + { + input: "Hello, world!", + expected: [10]byte{0x31, 0x5f, 0x5b, 0xdb, 0x76, 0xd0, 0x78, 0xc4, 0x3b, 0x8a}, + }, + { + input: "My Incredible Workflow Name", + expected: [10]byte{0x84, 0x00, 0x2e, 0xb9, 0xe2, 0xa0, 0x6b, 0x09, 0x97, 0x7c}, + }, + { + input: "You either die a hero, or live long enough to see yourself become the villain.", + expected: [10]byte{0x6b, 0xa1, 0xf7, 0xa6, 0xa0, 0x91, 0x95, 0x1a, 0x2d, 0xd2}, + }, + } + + for _, tc := range tt { + t.Run(tc.input, func(t *testing.T) { + // Call the function with the test input + result := HashTruncateName(tc.input) + + // Assert that the result is exactly the expected output + require.Equal(t, tc.expected, result) + + // Assert that the result is 10 bytes long + require.Len(t, result, 10) + }) + } +} diff --git a/pkg/workflows/wasm/host/module.go b/pkg/workflows/wasm/host/module.go index 8b31e77c6..601b69632 100644 --- a/pkg/workflows/wasm/host/module.go +++ b/pkg/workflows/wasm/host/module.go @@ -2,6 +2,7 @@ package host import ( "bytes" + "context" "encoding/base64" "encoding/binary" "encoding/json" @@ -18,43 +19,25 @@ import ( "github.com/bytecodealliance/wasmtime-go/v23" "google.golang.org/protobuf/proto" + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) -// safeMem returns a copy of the wasm module memory at the given pointer and size. -func safeMem(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { - mem := caller.GetExport("memory").Memory() - data := mem.UnsafeData(caller) - if ptr+size > int32(len(data)) { - return nil, errors.New("out of bounds memory access") - } - - cd := make([]byte, size) - copy(cd, data[ptr:ptr+size]) - return cd, nil -} - -// copyBuffer copies the given src byte slice into the wasm module memory at the given pointer and size. -func copyBuffer(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { - mem := caller.GetExport("memory").Memory() - rawData := mem.UnsafeData(caller) - if int32(len(rawData)) < ptr+size { - return -1 - } - buffer := rawData[ptr : ptr+size] - dataLen := int64(len(src)) - copy(buffer, src) - return dataLen +type RequestData struct { + fetchRequestsCounter int + response *wasmpb.Response + ctx func() context.Context } -type respStore struct { - m map[string]*wasmpb.Response +type store struct { + m map[string]*RequestData mu sync.RWMutex } -func (r *respStore) add(id string, resp *wasmpb.Response) error { +func (r *store) add(id string, req *RequestData) error { r.mu.Lock() defer r.mu.Unlock() @@ -63,42 +46,54 @@ func (r *respStore) add(id string, resp *wasmpb.Response) error { return fmt.Errorf("error storing response: response already exists for id: %s", id) } - r.m[id] = resp + r.m[id] = req return nil } -func (r *respStore) get(id string) (*wasmpb.Response, error) { +func (r *store) get(id string) (*RequestData, error) { r.mu.Lock() defer r.mu.Unlock() _, found := r.m[id] if !found { - return nil, fmt.Errorf("could not find response for id %s", id) + return nil, fmt.Errorf("could not find request data for id %s", id) } return r.m[id], nil } +func (r *store) delete(id string) { + r.mu.Lock() + defer r.mu.Unlock() + + delete(r.m, id) +} + var ( - defaultTickInterval = 100 * time.Millisecond - defaultTimeout = 300 * time.Millisecond - defaultMaxMemoryMBs = 64 - DefaultInitialFuel = uint64(100_000_000) + defaultTickInterval = 100 * time.Millisecond + defaultTimeout = 10 * time.Second + defaultMinMemoryMBs = 128 + DefaultInitialFuel = uint64(100_000_000) + defaultMaxFetchRequests = 5 ) type DeterminismConfig struct { // Seed is the seed used to generate cryptographically insecure random numbers in the module. Seed int64 } - type ModuleConfig struct { - TickInterval time.Duration - Timeout *time.Duration - MaxMemoryMBs int64 - InitialFuel uint64 - Logger logger.Logger - IsUncompressed bool - Fetch func(*wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) + TickInterval time.Duration + Timeout *time.Duration + MaxMemoryMBs int64 + MinMemoryMBs int64 + InitialFuel uint64 + Logger logger.Logger + IsUncompressed bool + Fetch func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) + MaxFetchRequests int + + // Labeler is used to emit messages from the module. + Labeler custmsg.MessageEmitter // If Determinism is set, the module will override the random_get function in the WASI API with // the provided seed to ensure deterministic behavior. @@ -106,11 +101,12 @@ type ModuleConfig struct { } type Module struct { - engine *wasmtime.Engine - module *wasmtime.Module - linker *wasmtime.Linker + engine *wasmtime.Engine + module *wasmtime.Module + linker *wasmtime.Linker + wconfig *wasmtime.Config - r *respStore + requestStore *store cfg *ModuleConfig @@ -143,11 +139,19 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) } if modCfg.Fetch == nil { - modCfg.Fetch = func(*wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + modCfg.Fetch = func(context.Context, *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, fmt.Errorf("fetch not implemented") } } + if modCfg.MaxFetchRequests == 0 { + modCfg.MaxFetchRequests = defaultMaxFetchRequests + } + + if modCfg.Labeler == nil { + modCfg.Labeler = &unimplementedMessageEmitter{} + } + logger := modCfg.Logger if modCfg.TickInterval == 0 { @@ -158,11 +162,15 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) modCfg.Timeout = &defaultTimeout } - // Take the max of the default and the configured max memory mbs. + if modCfg.MinMemoryMBs == 0 { + modCfg.MinMemoryMBs = int64(defaultMinMemoryMBs) + } + + // Take the max of the min and the configured max memory mbs. // We do this because Go requires a minimum of 16 megabytes to run, - // and local testing has shown that with less than 64 mbs, some + // and local testing has shown that with less than the min, some // binaries may error sporadically. - modCfg.MaxMemoryMBs = int64(math.Max(float64(defaultMaxMemoryMBs), float64(modCfg.MaxMemoryMBs))) + modCfg.MaxMemoryMBs = int64(math.Max(float64(modCfg.MinMemoryMBs), float64(modCfg.MaxMemoryMBs))) cfg := wasmtime.NewConfig() cfg.SetEpochInterruption(true) @@ -170,8 +178,10 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) cfg.SetConsumeFuel(true) } - engine := wasmtime.NewEngineWithConfig(cfg) + cfg.CacheConfigLoadDefault() + cfg.SetCraneliftOptLevel(wasmtime.OptLevelSpeedAndSize) + engine := wasmtime.NewEngineWithConfig(cfg) if !modCfg.IsUncompressed { rdr := brotli.NewReader(bytes.NewBuffer(binary)) decompedBinary, err := io.ReadAll(rdr) @@ -192,35 +202,14 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) return nil, fmt.Errorf("error creating wasi linker: %w", err) } - r := &respStore{ - m: map[string]*wasmpb.Response{}, + requestStore := &store{ + m: map[string]*RequestData{}, } err = linker.FuncWrap( "env", "sendResponse", - func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { - b, innerErr := safeMem(caller, ptr, ptrlen) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - var resp wasmpb.Response - innerErr = proto.Unmarshal(b, &resp) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - innerErr = r.add(resp.Id, &resp) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - return ErrnoSuccess - }, + createSendResponseFn(logger, requestStore), ) if err != nil { return nil, fmt.Errorf("error wrapping sendResponse func: %w", err) @@ -229,48 +218,7 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) err = linker.FuncWrap( "env", "log", - func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { - b, innerErr := safeMem(caller, ptr, ptrlen) - if innerErr != nil { - logger.Errorf("error calling log: %s", err) - return - } - - var raw map[string]interface{} - innerErr = json.Unmarshal(b, &raw) - if innerErr != nil { - return - } - - level := raw["level"] - delete(raw, "level") - - msg := raw["msg"].(string) - delete(raw, "msg") - delete(raw, "ts") - - var args []interface{} - for k, v := range raw { - args = append(args, k, v) - } - - switch level { - case "debug": - logger.Debugw(msg, args...) - case "info": - logger.Infow(msg, args...) - case "warn": - logger.Warnw(msg, args...) - case "error": - logger.Errorw(msg, args...) - case "panic": - logger.Panicw(msg, args...) - case "fatal": - logger.Fatalw(msg, args...) - default: - logger.Infow(msg, args...) - } - }, + createLogFn(logger), ) if err != nil { return nil, fmt.Errorf("error wrapping log func: %w", err) @@ -279,18 +227,28 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) err = linker.FuncWrap( "env", "fetch", - fetchFn(logger, modCfg), + createFetchFn(logger, wasmRead, wasmWrite, wasmWriteUInt32, modCfg, requestStore), ) if err != nil { return nil, fmt.Errorf("error wrapping fetch func: %w", err) } + err = linker.FuncWrap( + "env", + "emit", + createEmitFn(logger, requestStore, modCfg.Labeler, wasmRead, wasmWrite, wasmWriteUInt32), + ) + if err != nil { + return nil, fmt.Errorf("error wrapping emit func: %w", err) + } + m := &Module{ - engine: engine, - module: mod, - linker: linker, + engine: engine, + module: mod, + linker: linker, + wconfig: cfg, - r: r, + requestStore: requestStore, cfg: modCfg, @@ -324,9 +282,29 @@ func (m *Module) Close() { m.linker.Close() m.engine.Close() m.module.Close() + m.wconfig.Close() } -func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { +func (m *Module) Run(ctx context.Context, request *wasmpb.Request) (*wasmpb.Response, error) { + ctxWithTimeout, cancel := context.WithTimeout(ctx, *m.cfg.Timeout) + defer cancel() + + if request == nil { + return nil, fmt.Errorf("invalid request: can't be nil") + } + + if request.Id == "" { + return nil, fmt.Errorf("invalid request: can't be empty") + } + + // we add the request context to the store to make it available to the Fetch fn + err := m.requestStore.add(request.Id, &RequestData{ctx: func() context.Context { return ctxWithTimeout }}) + if err != nil { + return nil, fmt.Errorf("error adding ctx to the store: %w", err) + } + // we delete the request data from the store when we're done + defer m.requestStore.delete(request.Id) + store := wasmtime.NewStore(m.engine) defer store.Close() @@ -338,6 +316,8 @@ func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { reqstr := base64.StdEncoding.EncodeToString(reqpb) wasi := wasmtime.NewWasiConfig() + defer wasi.Close() + wasi.SetArgv([]string{"wasi", reqstr}) store.SetWasi(wasi) @@ -374,22 +354,27 @@ func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { _, err = start.Call(store) switch { case containsCode(err, wasm.CodeSuccess): - resp, innerErr := m.r.get(request.Id) + storedRequest, innerErr := m.requestStore.get(request.Id) if innerErr != nil { return nil, innerErr } - return resp, nil + + if storedRequest.response == nil { + return nil, fmt.Errorf("could not find response for id %s", request.Id) + } + + return storedRequest.response, nil case containsCode(err, wasm.CodeInvalidResponse): return nil, fmt.Errorf("invariant violation: error marshaling response") case containsCode(err, wasm.CodeInvalidRequest): return nil, fmt.Errorf("invariant violation: invalid request to runner") case containsCode(err, wasm.CodeRunnerErr): - resp, innerErr := m.r.get(request.Id) + storedRequest, innerErr := m.requestStore.get(request.Id) if innerErr != nil { return nil, innerErr } - return nil, fmt.Errorf("error executing runner: %s: %w", resp.ErrMsg, innerErr) + return nil, fmt.Errorf("error executing runner: %s: %w", storedRequest.response.ErrMsg, err) case containsCode(err, wasm.CodeHostErr): return nil, fmt.Errorf("invariant violation: host errored during sendResponse") default: @@ -401,47 +386,353 @@ func containsCode(err error, code int) bool { return strings.Contains(err.Error(), fmt.Sprintf("exit status %d", code)) } -func fetchFn(logger logger.Logger, modCfg *ModuleConfig) func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { - const fetchErrSfx = "error calling fetch" - return func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { - b, innerErr := safeMem(caller, reqptr, reqptrlen) +// createSendResponseFn injects the dependency required by a WASM guest to +// send a response back to the host. +func createSendResponseFn(logger logger.Logger, requestStore *store) func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { + return func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { + b, innerErr := wasmRead(caller, ptr, ptrlen) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } - req := &wasmpb.FetchRequest{} - innerErr = proto.Unmarshal(b, req) + var resp wasmpb.Response + innerErr = proto.Unmarshal(b, &resp) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } - fetchResp, innerErr := modCfg.Fetch(req) + storedReq, innerErr := requestStore.get(resp.Id) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } + storedReq.response = &resp + + return ErrnoSuccess + } +} + +func createFetchFn( + logger logger.Logger, + reader unsafeReaderFunc, + writer unsafeWriterFunc, + sizeWriter unsafeFixedLengthWriterFunc, + modCfg *ModuleConfig, + requestStore *store, +) func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { + return func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { + const errFetchSfx = "error calling fetch" + + // writeErr marshals and writes an error response to wasm + writeErr := func(err error) int32 { + resp := &wasmpb.FetchResponse{ + ExecutionError: true, + ErrorMessage: err.Error(), + } + + respBytes, perr := proto.Marshal(resp) + if perr != nil { + logger.Errorf("%s: %s", errFetchSfx, perr) + return ErrnoFault + } + + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + logger.Errorf("%s: %s", errFetchSfx, errors.New("failed to write error response")) + return ErrnoFault + } + + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + logger.Errorf("%s: %s", errFetchSfx, errors.New("failed to write error response length")) + return ErrnoFault + } + + return ErrnoSuccess + } + + b, innerErr := reader(caller, reqptr, reqptrlen) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + req := &wasmpb.FetchRequest{} + innerErr = proto.Unmarshal(b, req) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + storedRequest, innerErr := requestStore.get(req.Id) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + // limit the number of fetch calls we can make per request + if storedRequest.fetchRequestsCounter >= modCfg.MaxFetchRequests { + logger.Errorf("%s: max number of fetch request %d exceeded", errFetchSfx, modCfg.MaxFetchRequests) + return writeErr(errors.New("max number of fetch requests exceeded")) + } + storedRequest.fetchRequestsCounter++ + + fetchResp, innerErr := modCfg.Fetch(storedRequest.ctx(), req) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } respBytes, innerErr := proto.Marshal(fetchResp) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) - return ErrnoFault + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) } - size := copyBuffer(caller, respBytes, respptr, int32(len(respBytes))) - if size == -1 { - return ErrnoFault + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + return writeErr(errors.New("failed to write response")) } - uint32Size := int32(4) - resplenBytes := make([]byte, uint32Size) - binary.LittleEndian.PutUint32(resplenBytes, uint32(len(respBytes))) - size = copyBuffer(caller, resplenBytes, resplenptr, uint32Size) - if size == -1 { - return ErrnoFault + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + return writeErr(errors.New("failed to write response length")) } return ErrnoSuccess } } + +// createEmitFn injects dependencies and builds the emit function exposed by the WASM. Errors in +// Emit, if any, are returned in the Error Message of the response. +func createEmitFn( + l logger.Logger, + requestStore *store, + e custmsg.MessageEmitter, + reader unsafeReaderFunc, + writer unsafeWriterFunc, + sizeWriter unsafeFixedLengthWriterFunc, +) func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + logErr := func(err error) { + l.Errorf("error emitting message: %s", err) + } + + return func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + // writeErr marshals and writes an error response to wasm + writeErr := func(err error) int32 { + logErr(err) + + resp := &wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: err.Error(), + }, + } + + respBytes, perr := proto.Marshal(resp) + if perr != nil { + logErr(perr) + return ErrnoFault + } + + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response")) + return ErrnoFault + } + + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response length")) + return ErrnoFault + } + + return ErrnoSuccess + } + + b, err := reader(caller, msgptr, msglen) + if err != nil { + return writeErr(err) + } + + reqID, msg, labels, err := toEmissible(b) + if err != nil { + return writeErr(err) + } + + req, err := requestStore.get(reqID) + if err != nil { + logErr(fmt.Errorf("failed to get request from store: %s", err)) + return writeErr(err) + } + + if err := e.WithMapLabels(labels).Emit(req.ctx(), msg); err != nil { + return writeErr(err) + } + + return ErrnoSuccess + } +} + +// createLogFn injects dependencies and builds the log function exposed by the WASM. +func createLogFn(logger logger.Logger) func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { + return func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { + b, innerErr := wasmRead(caller, ptr, ptrlen) + if innerErr != nil { + logger.Errorf("error calling log: %s", innerErr) + return + } + + var raw map[string]interface{} + innerErr = json.Unmarshal(b, &raw) + if innerErr != nil { + return + } + + level := raw["level"] + delete(raw, "level") + + msg := raw["msg"].(string) + delete(raw, "msg") + delete(raw, "ts") + + var args []interface{} + for k, v := range raw { + args = append(args, k, v) + } + + switch level { + case "debug": + logger.Debugw(msg, args...) + case "info": + logger.Infow(msg, args...) + case "warn": + logger.Warnw(msg, args...) + case "error": + logger.Errorw(msg, args...) + case "panic": + logger.Panicw(msg, args...) + case "fatal": + logger.Fatalw(msg, args...) + default: + logger.Infow(msg, args...) + } + } +} + +type unimplementedMessageEmitter struct{} + +func (u *unimplementedMessageEmitter) Emit(context.Context, string) error { + return errors.New("unimplemented") +} + +func (u *unimplementedMessageEmitter) WithMapLabels(map[string]string) custmsg.MessageEmitter { + return u +} + +func (u *unimplementedMessageEmitter) With(kvs ...string) custmsg.MessageEmitter { + return u +} + +func (u *unimplementedMessageEmitter) Labels() map[string]string { + return nil +} + +func toEmissible(b []byte) (string, string, map[string]string, error) { + msg := &wasmpb.EmitMessageRequest{} + if err := proto.Unmarshal(b, msg); err != nil { + return "", "", nil, err + } + + validated, err := toValidatedLabels(msg) + if err != nil { + return "", "", nil, err + } + + return msg.RequestId, msg.Message, validated, nil +} + +func toValidatedLabels(msg *wasmpb.EmitMessageRequest) (map[string]string, error) { + vl, err := values.FromMapValueProto(msg.Labels) + if err != nil { + return nil, err + } + + // Handle the case of no labels before unwrapping. + if vl == nil { + vl = values.EmptyMap() + } + + var labels map[string]string + if err := vl.UnwrapTo(&labels); err != nil { + return nil, err + } + + return labels, nil +} + +// unsafeWriterFunc defines behavior for writing directly to wasm memory. A source slice of bytes +// is written to the location defined by the ptr. +type unsafeWriterFunc func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 + +// unsafeFixedLengthWriterFunc defines behavior for writing a uint32 value to wasm memory at the location defined +// by the ptr. +type unsafeFixedLengthWriterFunc func(c *wasmtime.Caller, ptr int32, val uint32) int64 + +// unsafeReaderFunc abstractly defines the behavior of reading from WASM memory. Returns a copy of +// the memory at the given pointer and size. +type unsafeReaderFunc func(c *wasmtime.Caller, ptr, len int32) ([]byte, error) + +// wasmMemoryAccessor is the default implementation for unsafely accessing the memory of the WASM module. +func wasmMemoryAccessor(caller *wasmtime.Caller) []byte { + return caller.GetExport("memory").Memory().UnsafeData(caller) +} + +// wasmRead returns a copy of the wasm module memory at the given pointer and size. +func wasmRead(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { + return read(wasmMemoryAccessor(caller), ptr, size) +} + +// Read acts on a byte slice that should represent an unsafely accessed slice of memory. It returns +// a copy of the memory at the given pointer and size. +func read(memory []byte, ptr int32, size int32) ([]byte, error) { + if size < 0 || ptr < 0 { + return nil, fmt.Errorf("invalid memory access: ptr: %d, size: %d", ptr, size) + } + + if ptr+size > int32(len(memory)) { + return nil, errors.New("out of bounds memory access") + } + + cd := make([]byte, size) + copy(cd, memory[ptr:ptr+size]) + return cd, nil +} + +// wasmWrite copies the given src byte slice into the wasm module memory at the given pointer and size. +func wasmWrite(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { + return write(wasmMemoryAccessor(caller), src, ptr, size) +} + +// wasmWriteUInt32 binary encodes and writes a uint32 to the wasm module memory at the given pointer. +func wasmWriteUInt32(caller *wasmtime.Caller, ptr int32, val uint32) int64 { + return writeUInt32(wasmMemoryAccessor(caller), ptr, val) +} + +// writeUInt32 binary encodes and writes a uint32 to the memory at the given pointer. +func writeUInt32(memory []byte, ptr int32, val uint32) int64 { + uint32Size := int32(4) + buffer := make([]byte, uint32Size) + binary.LittleEndian.PutUint32(buffer, val) + return write(memory, buffer, ptr, uint32Size) +} + +// write copies the given src byte slice into the memory at the given pointer and size. +func write(memory, src []byte, ptr, size int32) int64 { + if size < 0 || ptr < 0 { + return -1 + } + + if int32(len(memory)) < ptr+size { + return -1 + } + buffer := memory[ptr : ptr+size] + dataLen := int64(len(src)) + copy(buffer, src) + return dataLen +} diff --git a/pkg/workflows/wasm/host/module_test.go b/pkg/workflows/wasm/host/module_test.go new file mode 100644 index 000000000..a19c43fa2 --- /dev/null +++ b/pkg/workflows/wasm/host/module_test.go @@ -0,0 +1,671 @@ +package host + +import ( + "context" + "encoding/binary" + "sync" + "testing" + + "github.com/bytecodealliance/wasmtime-go/v23" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" + wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" +) + +type mockMessageEmitter struct { + e func(context.Context, string, map[string]string) error + labels map[string]string +} + +func (m *mockMessageEmitter) Emit(ctx context.Context, msg string) error { + return m.e(ctx, msg, m.labels) +} + +func (m *mockMessageEmitter) WithMapLabels(labels map[string]string) custmsg.MessageEmitter { + m.labels = labels + return m +} + +func (m *mockMessageEmitter) With(keyValues ...string) custmsg.MessageEmitter { + // do nothing + return m +} + +func (m *mockMessageEmitter) Labels() map[string]string { + return m.labels +} + +func newMockMessageEmitter(e func(context.Context, string, map[string]string) error) custmsg.MessageEmitter { + return &mockMessageEmitter{e: e} +} + +// Test_createEmitFn tests that the emit function used by the module is created correctly. Memory +// access functions are injected as mocks. +func Test_createEmitFn(t *testing.T) { + t.Run("success", func(t *testing.T) { + ctxKey := "key" + ctxValue := "test-value" + ctx := tests.Context(t) + ctx = context.WithValue(ctx, ctxKey, "test-value") + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + reqId := "random-id" + err := store.add( + reqId, + &RequestData{ctx: func() context.Context { return ctx }}) + require.NoError(t, err) + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(ctx context.Context, _ string, _ map[string]string) error { + v := ctx.Value(ctxKey) + assert.Equal(t, ctxValue, v) + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: reqId, + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "foo": { + Value: &pb.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("success without labels", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(_ context.Context, _ string, _ map[string]string) error { + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{}) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("successfully write error to memory on failure to read", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return nil, assert.AnError + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, int32(len(respBytes)), 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode, "code mismatch") + }) + + t.Run("failure to emit writes error to memory", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + reqId := "random-id" + store.add(reqId, &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + }) + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(_ context.Context, _ string, _ map[string]string) error { + return assert.AnError + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: reqId, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("bad read failure to unmarshal protos", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + badData := []byte("not proto bufs") + msg := &wasmpb.EmitMessageRequest{} + marshallErr := proto.Unmarshal(badData, msg) + assert.Error(t, marshallErr) + + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: marshallErr.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return badData, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) +} + +func TestCreateFetchFn(t *testing.T) { + const testID = "test-id" + t.Run("OK-success", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + MaxFetchRequests: 5, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_read_from_store", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return nil, assert.AnError + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + assert.Equal(t, assert.AnError.Error(), resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_unmarshal_request", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return []byte("bad-request-payload"), nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := "cannot parse invalid wire-format data" + assert.Contains(t, resp.ErrorMessage, expectedErr) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_find_id_in_store", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := "could not find request data for id test-id" + assert.Equal(t, expectedErr, resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_returns_an_error", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := assert.AnError.Error() + assert.Equal(t, expectedErr, resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return nil, assert.AnError + }, + MaxFetchRequests: 1, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_write_response", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return -1 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoFault, gotCode) + }) + + t.Run("NOK-fetch_fails_to_write_response_size", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return -1 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoFault, gotCode) + }) +} + +func Test_read(t *testing.T) { + t.Run("successfully read from slice", func(t *testing.T) { + memory := []byte("hello, world") + got, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + assert.Equal(t, []byte("hello, world"), got) + }) + + t.Run("fail to read because out of bounds request", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, int32(len(memory)+1)) + assert.Error(t, err) + }) + + t.Run("fails to read because of invalid pointer or length", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, -1) + assert.Error(t, err) + + _, err = read(memory, -1, 1) + assert.Error(t, err) + }) + + t.Run("validate that memory is read only once copied", func(t *testing.T) { + memory := []byte("hello, world") + copied, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + + // mutate copy + copied[0] = 'H' + assert.Equal(t, []byte("Hello, world"), copied) + + // original memory is unchanged + assert.Equal(t, []byte("hello, world"), memory) + }) +} + +func Test_write(t *testing.T) { + t.Run("successfully write to slice", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, 12) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(len(giveSrc))) + assert.Equal(t, []byte("hello, world"), memory[:len(giveSrc)]) + }) + + t.Run("cannot write to slice because memory too small", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)-1) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(-1)) + }) + + t.Run("fails to write to invalid access", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)) + n := write(memory, giveSrc, 0, -1) + assert.Equal(t, n, int64(-1)) + + n = write(memory, giveSrc, -1, 1) + assert.Equal(t, n, int64(-1)) + }) +} + +// Test_writeUInt32 tests that a uint32 is written to memory correctly. +func Test_writeUInt32(t *testing.T) { + t.Run("success", func(t *testing.T) { + memory := make([]byte, 4) + n := writeUInt32(memory, 0, 42) + wantBuf := make([]byte, 4) + binary.LittleEndian.PutUint32(wantBuf, 42) + assert.Equal(t, n, int64(4)) + assert.Equal(t, wantBuf, memory) + }) +} + +func Test_toValidatedLabels(t *testing.T) { + t.Run("success", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + wantLabels := map[string]string{ + "test": "value", + } + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("success with empty labels", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{} + wantLabels := map[string]string{} + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("fails with non string", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_Int64Value{ + Int64Value: *proto.Int64(42), + }, + }, + }, + }, + } + _, err := toValidatedLabels(msg) + assert.Error(t, err) + }) +} + +func Test_toEmissible(t *testing.T) { + t.Run("success", func(t *testing.T) { + reqID := "random-id" + msg := &wasmpb.EmitMessageRequest{ + RequestId: reqID, + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + + b, err := proto.Marshal(msg) + assert.NoError(t, err) + + rid, gotMsg, gotLabels, err := toEmissible(b) + assert.NoError(t, err) + assert.Equal(t, "hello, world", gotMsg) + assert.Equal(t, map[string]string{"test": "value"}, gotLabels) + assert.Equal(t, reqID, rid) + }) + + t.Run("fails with bad message", func(t *testing.T) { + _, _, _, err := toEmissible([]byte("not proto bufs")) + assert.Error(t, err) + }) +} diff --git a/pkg/workflows/wasm/host/test/builderr/cmd/main.go b/pkg/workflows/wasm/host/test/builderr/cmd/main.go new file mode 100644 index 000000000..cc88a8079 --- /dev/null +++ b/pkg/workflows/wasm/host/test/builderr/cmd/main.go @@ -0,0 +1,23 @@ +package main + +import ( + "errors" + + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) (*sdk.WorkflowSpecFactory, error) { + // Do something that errors + return nil, errors.New("oops: I couldn't build this workflow") +} + +func main() { + runner := wasm.NewRunner() + workflow, err := BuildWorkflow(runner.Config()) + if err != nil { + runner.ExitWithError(err) + } + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/computepanic/cmd/main.go b/pkg/workflows/wasm/host/test/computepanic/cmd/main.go new file mode 100644 index 000000000..cb1f7a705 --- /dev/null +++ b/pkg/workflows/wasm/host/test/computepanic/cmd/main.go @@ -0,0 +1,41 @@ +package main + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +type foo struct { + thing string +} + +func (f *foo) doAThing() { + _ = f.thing +} + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory() + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + var f *foo + f.doAThing() + return false, nil + }) + + return workflow +} + +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/dirs/cmd/main.go b/pkg/workflows/wasm/host/test/dirs/cmd/main.go index a7500974b..8599985f6 100644 --- a/pkg/workflows/wasm/host/test/dirs/cmd/main.go +++ b/pkg/workflows/wasm/host/test/dirs/cmd/main.go @@ -12,12 +12,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/emit/cmd/main.go b/pkg/workflows/wasm/host/test/emit/cmd/main.go new file mode 100644 index 000000000..0d36ca361 --- /dev/null +++ b/pkg/workflows/wasm/host/test/emit/cmd/main.go @@ -0,0 +1,38 @@ +//go:build wasip1 + +package main + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory() + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(rsdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + if err := rsdk.Emitter(). + With("test-string-field-key", "this is a test field content"). + Emit("testing emit"); err != nil { + return false, err + } + return true, nil + }) + + return workflow +} + +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/env/cmd/main.go b/pkg/workflows/wasm/host/test/env/cmd/main.go index 9dc7190bb..372078b2f 100644 --- a/pkg/workflows/wasm/host/test/env/cmd/main.go +++ b/pkg/workflows/wasm/host/test/env/cmd/main.go @@ -13,12 +13,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/fetch/cmd/main.go b/pkg/workflows/wasm/host/test/fetch/cmd/main.go index d7a71e0a1..2eb8949c8 100644 --- a/pkg/workflows/wasm/host/test/fetch/cmd/main.go +++ b/pkg/workflows/wasm/host/test/fetch/cmd/main.go @@ -12,12 +12,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go b/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go new file mode 100644 index 000000000..1aaa19acd --- /dev/null +++ b/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go @@ -0,0 +1,45 @@ +//go:build wasip1 + +package main + +import ( + "net/http" + + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory() + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(rsdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + + for i := 0; i < 6; i++ { + _, err := rsdk.Fetch(sdk.FetchRequest{ + Method: http.MethodGet, + URL: "https://min-api.cryptocompare.com/data/pricemultifull?fsyms=ETH&tsyms=BTC", + }) + if err != nil { + return false, err + } + } + + return true, nil + }) + + return workflow +} +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/files/cmd/main.go b/pkg/workflows/wasm/host/test/files/cmd/main.go index ec5f0ac75..492ac47ce 100644 --- a/pkg/workflows/wasm/host/test/files/cmd/main.go +++ b/pkg/workflows/wasm/host/test/files/cmd/main.go @@ -12,12 +12,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/http/cmd/main.go b/pkg/workflows/wasm/host/test/http/cmd/main.go index 68e43662e..bacf8d8b3 100644 --- a/pkg/workflows/wasm/host/test/http/cmd/main.go +++ b/pkg/workflows/wasm/host/test/http/cmd/main.go @@ -12,12 +12,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/log/cmd/main.go b/pkg/workflows/wasm/host/test/log/cmd/main.go index 4c3c04732..4dfbd79b2 100644 --- a/pkg/workflows/wasm/host/test/log/cmd/main.go +++ b/pkg/workflows/wasm/host/test/log/cmd/main.go @@ -10,12 +10,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/oom/cmd/main.go b/pkg/workflows/wasm/host/test/oom/cmd/main.go index 21ecb6965..a14775cb5 100644 --- a/pkg/workflows/wasm/host/test/oom/cmd/main.go +++ b/pkg/workflows/wasm/host/test/oom/cmd/main.go @@ -6,5 +6,5 @@ import "math" func main() { // allocate more bytes than the binary should be able to access, 64 megs - _ = make([]byte, int64(128*math.Pow(10, 6))) + _ = make([]byte, int64(512*math.Pow(10, 6))) } diff --git a/pkg/workflows/wasm/host/test/rand/cmd/main.go b/pkg/workflows/wasm/host/test/rand/cmd/main.go index fa908a108..ca56c82fd 100644 --- a/pkg/workflows/wasm/host/test/rand/cmd/main.go +++ b/pkg/workflows/wasm/host/test/rand/cmd/main.go @@ -13,9 +13,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{}, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} trigger := triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go b/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go index 2f0461bb4..4a2ae6d07 100644 --- a/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go +++ b/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go @@ -8,12 +8,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} _ = triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/test/success/cmd/main.go b/pkg/workflows/wasm/host/test/success/cmd/main.go index 2ee9f7bd4..477ba097c 100644 --- a/pkg/workflows/wasm/host/test/success/cmd/main.go +++ b/pkg/workflows/wasm/host/test/success/cmd/main.go @@ -10,12 +10,7 @@ import ( ) func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "ryan", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} _ = triggerCfg.New(workflow) diff --git a/pkg/workflows/wasm/host/wasip1.go b/pkg/workflows/wasm/host/wasip1.go index 28950a16d..08235e23e 100644 --- a/pkg/workflows/wasm/host/wasip1.go +++ b/pkg/workflows/wasm/host/wasip1.go @@ -81,7 +81,7 @@ func clockTimeGet(caller *wasmtime.Caller, id int32, precision int64, resultTime uint64Size := int32(8) trg := make([]byte, uint64Size) binary.LittleEndian.PutUint64(trg, uint64(val)) - copyBuffer(caller, trg, resultTimestamp, uint64Size) + wasmWrite(caller, trg, resultTimestamp, uint64Size) return ErrnoSuccess } @@ -105,7 +105,7 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, return ErrnoInval } - subs, err := safeMem(caller, subscriptionptr, nsubscriptions*subscriptionLen) + subs, err := wasmRead(caller, subscriptionptr, nsubscriptions*subscriptionLen) if err != nil { return ErrnoFault } @@ -176,13 +176,13 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, binary.LittleEndian.PutUint32(rne, uint32(nsubscriptions)) // Write the number of events to `resultNevents` - size := copyBuffer(caller, rne, resultNevents, uint32Size) + size := wasmWrite(caller, rne, resultNevents, uint32Size) if size == -1 { return ErrnoFault } // Write the events to `events` - size = copyBuffer(caller, events, eventsptr, nsubscriptions*eventsLen) + size = wasmWrite(caller, events, eventsptr, nsubscriptions*eventsLen) if size == -1 { return ErrnoFault } @@ -221,7 +221,7 @@ func createRandomGet(cfg *ModuleConfig) func(caller *wasmtime.Caller, buf, bufLe } // Copy the random bytes into the wasm module memory - if n := copyBuffer(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { + if n := wasmWrite(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { return ErrnoFault } diff --git a/pkg/workflows/wasm/host/wasm.go b/pkg/workflows/wasm/host/wasm.go index d5d824460..2e0dea4d3 100644 --- a/pkg/workflows/wasm/host/wasm.go +++ b/pkg/workflows/wasm/host/wasm.go @@ -1,6 +1,7 @@ package host import ( + "context" "errors" "fmt" @@ -12,7 +13,7 @@ import ( wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) -func GetWorkflowSpec(modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.WorkflowSpec, error) { +func GetWorkflowSpec(ctx context.Context, modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.WorkflowSpec, error) { m, err := NewModule(modCfg, binary, WithDeterminism()) if err != nil { return nil, fmt.Errorf("could not instantiate module: %w", err) @@ -28,7 +29,7 @@ func GetWorkflowSpec(modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.W SpecRequest: &emptypb.Empty{}, }, } - resp, err := m.Run(req) + resp, err := m.Run(ctx, req) if err != nil { return nil, err } diff --git a/pkg/workflows/wasm/host/wasm_test.go b/pkg/workflows/wasm/host/wasm_test.go index 4692ef96d..3e5335a9d 100644 --- a/pkg/workflows/wasm/host/wasm_test.go +++ b/pkg/workflows/wasm/host/wasm_test.go @@ -2,6 +2,7 @@ package host import ( "bytes" + "context" _ "embed" "fmt" "io" @@ -21,37 +22,46 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" capabilitiespb "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" valuespb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) const ( - successBinaryLocation = "test/success/cmd/testmodule.wasm" - successBinaryCmd = "test/success/cmd" - failureBinaryLocation = "test/fail/cmd/testmodule.wasm" - failureBinaryCmd = "test/fail/cmd" - oomBinaryLocation = "test/oom/cmd/testmodule.wasm" - oomBinaryCmd = "test/oom/cmd" - sleepBinaryLocation = "test/sleep/cmd/testmodule.wasm" - sleepBinaryCmd = "test/sleep/cmd" - filesBinaryLocation = "test/files/cmd/testmodule.wasm" - filesBinaryCmd = "test/files/cmd" - dirsBinaryLocation = "test/dirs/cmd/testmodule.wasm" - dirsBinaryCmd = "test/dirs/cmd" - httpBinaryLocation = "test/http/cmd/testmodule.wasm" - httpBinaryCmd = "test/http/cmd" - envBinaryLocation = "test/env/cmd/testmodule.wasm" - envBinaryCmd = "test/env/cmd" - logBinaryLocation = "test/log/cmd/testmodule.wasm" - logBinaryCmd = "test/log/cmd" - fetchBinaryLocation = "test/fetch/cmd/testmodule.wasm" - fetchBinaryCmd = "test/fetch/cmd" - randBinaryLocation = "test/rand/cmd/testmodule.wasm" - randBinaryCmd = "test/rand/cmd" + successBinaryLocation = "test/success/cmd/testmodule.wasm" + successBinaryCmd = "test/success/cmd" + failureBinaryLocation = "test/fail/cmd/testmodule.wasm" + failureBinaryCmd = "test/fail/cmd" + oomBinaryLocation = "test/oom/cmd/testmodule.wasm" + oomBinaryCmd = "test/oom/cmd" + sleepBinaryLocation = "test/sleep/cmd/testmodule.wasm" + sleepBinaryCmd = "test/sleep/cmd" + filesBinaryLocation = "test/files/cmd/testmodule.wasm" + filesBinaryCmd = "test/files/cmd" + dirsBinaryLocation = "test/dirs/cmd/testmodule.wasm" + dirsBinaryCmd = "test/dirs/cmd" + httpBinaryLocation = "test/http/cmd/testmodule.wasm" + httpBinaryCmd = "test/http/cmd" + envBinaryLocation = "test/env/cmd/testmodule.wasm" + envBinaryCmd = "test/env/cmd" + logBinaryLocation = "test/log/cmd/testmodule.wasm" + logBinaryCmd = "test/log/cmd" + fetchBinaryLocation = "test/fetch/cmd/testmodule.wasm" + fetchBinaryCmd = "test/fetch/cmd" + fetchlimitBinaryLocation = "test/fetchlimit/cmd/testmodule.wasm" + fetchlimitBinaryCmd = "test/fetchlimit/cmd" + randBinaryLocation = "test/rand/cmd/testmodule.wasm" + randBinaryCmd = "test/rand/cmd" + emitBinaryLocation = "test/emit/cmd/testmodule.wasm" + emitBinaryCmd = "test/emit/cmd" + computePanicBinaryLocation = "test/computepanic/cmd/testmodule.wasm" + computePanicBinaryCmd = "test/computepanic/cmd" + buildErrorBinaryLocation = "test/builderr/cmd/testmodule.wasm" + buildErrorBinaryCmd = "test/builderr/cmd" ) -func createTestBinary(outputPath, path string, compress bool, t *testing.T) []byte { +func createTestBinary(outputPath, path string, uncompressed bool, t *testing.T) []byte { cmd := exec.Command("go", "build", "-o", path, fmt.Sprintf("github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/host/%s", outputPath)) // #nosec cmd.Env = append(os.Environ(), "GOOS=wasip1", "GOARCH=wasm") @@ -61,7 +71,7 @@ func createTestBinary(outputPath, path string, compress bool, t *testing.T) []by binary, err := os.ReadFile(path) require.NoError(t, err) - if !compress { + if uncompressed { return binary } @@ -77,44 +87,48 @@ func createTestBinary(outputPath, path string, compress bool, t *testing.T) []by } func Test_GetWorkflowSpec(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) - spec, err := GetWorkflowSpec( + _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, binary, []byte(""), ) require.NoError(t, err) - - assert.Equal(t, spec.Name, "tester") - assert.Equal(t, spec.Owner, "ryan") } func Test_GetWorkflowSpec_UncompressedBinary(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, false, t) - spec, err := GetWorkflowSpec( + _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ Logger: logger.Test(t), - IsUncompressed: true, + IsUncompressed: false, }, binary, []byte(""), ) require.NoError(t, err) - - assert.Equal(t, spec.Name, "tester") - assert.Equal(t, spec.Owner, "ryan") } func Test_GetWorkflowSpec_BinaryErrors(t *testing.T) { + ctx := tests.Context(t) failBinary := createTestBinary(failureBinaryCmd, failureBinaryLocation, true, t) _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, failBinary, []byte(""), @@ -124,13 +138,17 @@ func Test_GetWorkflowSpec_BinaryErrors(t *testing.T) { } func Test_GetWorkflowSpec_Timeout(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) d := time.Duration(0) _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Timeout: &d, - Logger: logger.Test(t), + Timeout: &d, + Logger: logger.Test(t), + IsUncompressed: true, }, binary, // use the success binary with a zero timeout []byte(""), @@ -139,13 +157,33 @@ func Test_GetWorkflowSpec_Timeout(t *testing.T) { assert.ErrorContains(t, err, "wasm trap: interrupt") } +func Test_GetWorkflowSpec_BuildError(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) + binary := createTestBinary(buildErrorBinaryCmd, buildErrorBinaryLocation, true, t) + + _, err := GetWorkflowSpec( + ctx, + &ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + }, + binary, + []byte(""), + ) + assert.ErrorContains(t, err, "oops") +} + func Test_Compute_Logs(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(logBinaryCmd, logBinaryLocation, true, t) logger, logs := logger.TestObserved(t, zapcore.InfoLevel) m, err := NewModule(&ModuleConfig{ - Logger: logger, - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger, + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, nil }, }, binary) @@ -167,7 +205,7 @@ func Test_Compute_Logs(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) require.Len(t, logs.AllUntimed(), 1) @@ -187,10 +225,184 @@ func Test_Compute_Logs(t *testing.T) { } } +func Test_Compute_Emit(t *testing.T) { + t.Parallel() + binary := createTestBinary(emitBinaryCmd, emitBinaryLocation, true, t) + + lggr := logger.Test(t) + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + WorkflowId: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + WorkflowExecutionId: "workflow-execution-id", + }, + }, + }, + }, + } + + fetchFunc := func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return nil, nil + } + + t.Run("successfully call emit with metadata in labels", func(t *testing.T) { + ctxKey := "key" + ctx := tests.Context(t) + ctxValue := "test-value" + ctx = context.WithValue(ctx, ctxKey, ctxValue) + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(gotCtx context.Context, msg string, kvs map[string]string) error { + t.Helper() + + v := ctx.Value(ctxKey) + assert.Equal(t, ctxValue, v) + + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + return nil + }), + }, binary) + require.NoError(t, err) + + m.Start() + + _, err = m.Run(ctx, req) + assert.Nil(t, err) + }) + + t.Run("failure on emit writes to error chain and logs", func(t *testing.T) { + lggr, logs := logger.TestObserved(t, zapcore.InfoLevel) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(_ context.Context, msg string, kvs map[string]string) error { + t.Helper() + + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + + return assert.AnError + }), + }, binary) + require.NoError(t, err) + + m.Start() + + ctx := tests.Context(t) + _, err = m.Run(ctx, req) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + + require.Len(t, logs.AllUntimed(), 1) + + expectedEntries := []Entry{ + { + Log: zapcore.Entry{Level: zapcore.ErrorLevel, Message: fmt.Sprintf("error emitting message: %s", assert.AnError)}, + }, + } + for i := range expectedEntries { + assert.Equal(t, expectedEntries[i].Log.Level, logs.AllUntimed()[i].Entry.Level) + assert.Equal(t, expectedEntries[i].Log.Message, logs.AllUntimed()[i].Entry.Message) + } + }) + + t.Run("failure on emit due to missing workflow identifying metadata", func(t *testing.T) { + lggr := logger.Test(t) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(_ context.Context, msg string, labels map[string]string) error { + return nil + }), // never called + }, binary) + require.NoError(t, err) + + m.Start() + + req = &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + + ctx := tests.Context(t) + _, err = m.Run(ctx, req) + assert.Error(t, err) + assert.ErrorContains(t, err, "failed to create emission") + }) +} + +func Test_Compute_PanicIsRecovered(t *testing.T) { + t.Parallel() + binary := createTestBinary(computePanicBinaryCmd, computePanicBinaryLocation, true, t) + + ctx := tests.Context(t) + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + assert.ErrorContains(t, err, "invalid memory address or nil pointer dereference") +} + func Test_Compute_Fetch(t *testing.T) { + t.Parallel() binary := createTestBinary(fetchBinaryCmd, fetchBinaryLocation, true, t) t.Run("OK_default_runtime_cfg", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) expected := sdk.FetchResponse{ ExecutionError: false, Body: []byte("valid-response"), @@ -199,8 +411,9 @@ func Test_Compute_Fetch(t *testing.T) { } m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return &wasmpb.FetchResponse{ ExecutionError: expected.ExecutionError, Body: expected.Body, @@ -226,7 +439,7 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - response, err := m.Run(req) + response, err := m.Run(ctx, req) assert.Nil(t, err) actual := sdk.FetchResponse{} @@ -239,6 +452,8 @@ func Test_Compute_Fetch(t *testing.T) { }) t.Run("OK_custom_runtime_cfg", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) expected := sdk.FetchResponse{ ExecutionError: false, Body: []byte("valid-response"), @@ -247,8 +462,9 @@ func Test_Compute_Fetch(t *testing.T) { } m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return &wasmpb.FetchResponse{ ExecutionError: expected.ExecutionError, Body: expected.Body, @@ -277,7 +493,7 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - response, err := m.Run(req) + response, err := m.Run(ctx, req) assert.Nil(t, err) actual := sdk.FetchResponse{} @@ -290,11 +506,14 @@ func Test_Compute_Fetch(t *testing.T) { }) t.Run("NOK_fetch_error_returned", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) logger, logs := logger.TestObserved(t, zapcore.InfoLevel) m, err := NewModule(&ModuleConfig{ - Logger: logger, - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger, + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, assert.AnError }, }, binary) @@ -316,8 +535,9 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.NotNil(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) require.Len(t, logs.AllUntimed(), 1) expectedEntries := []Entry{ @@ -330,28 +550,322 @@ func Test_Compute_Fetch(t *testing.T) { assert.Equal(t, expectedEntries[i].Log.Message, logs.AllUntimed()[i].Entry.Message) } }) + + t.Run("OK_context_propagation", func(t *testing.T) { + t.Parallel() + type testkey string + var key testkey = "test-key" + var expectedValue string = "test-value" + + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte(expectedValue), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: []byte(ctx.Value(key).(string)), + StatusCode: uint32(expected.StatusCode), + }, nil + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + RuntimeConfig: &wasmpb.RuntimeConfig{ + MaxFetchResponseSizeBytes: 2 * 1024, + }, + }, + }, + } + + ctx := context.WithValue(tests.Context(t), key, expectedValue) + response, err := m.Run(ctx, req) + assert.Nil(t, err) + + actual := sdk.FetchResponse{} + r, err := pb.CapabilityResponseFromProto(response.GetComputeResponse().GetResponse()) + require.NoError(t, err) + err = r.Value.Underlying["Value"].UnwrapTo(&actual) + require.NoError(t, err) + + assert.Equal(t, expected, actual) + }) + + t.Run("OK_context_cancelation", func(t *testing.T) { + t.Parallel() + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + select { + case <-ctx.Done(): + return nil, assert.AnError + default: + return &wasmpb.FetchResponse{}, nil + } + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + RuntimeConfig: &wasmpb.RuntimeConfig{ + MaxFetchResponseSizeBytes: 2 * 1024, + }, + }, + }, + } + + ctx, cancel := context.WithCancel(tests.Context(t)) + cancel() + _, err = m.Run(ctx, req) + require.NotNil(t, err) + assert.ErrorContains(t, err, fmt.Sprintf("error executing runner: error executing custom compute: %s", assert.AnError)) + }) + + t.Run("NOK_exceed_amout_of_defined_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 1, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.NotNil(t, err) + }) + + t.Run("NOK_exceed_amout_of_default_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.NotNil(t, err) + }) + + t.Run("OK_making_up_to_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 6, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.Nil(t, err) + }) + + t.Run("OK_multiple_request_reusing_module", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 6, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.Nil(t, err) + + // we can reuse the request because after completion it gets deleted from the store + _, err = m.Run(ctx, req) + require.Nil(t, err) + }) + } func TestModule_Errors(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) - _, err = m.Run(nil) - assert.ErrorContains(t, err, "invariant violation: invalid request to runner") + _, err = m.Run(ctx, nil) + assert.ErrorContains(t, err, "invalid request: can't be nil") req := &wasmpb.Request{ + Id: "", + } + _, err = m.Run(ctx, req) + assert.ErrorContains(t, err, "invalid request: can't be empty") + + req = &wasmpb.Request{ Id: uuid.New().String(), } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid request: message must be SpecRequest or ComputeRequest") req = &wasmpb.Request{ Id: uuid.New().String(), Message: &wasmpb.Request_ComputeRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid compute request: nil request") m.Start() @@ -368,14 +882,15 @@ func TestModule_Errors(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid compute request: could not find compute function for id doesnt-exist") } func TestModule_Sandbox_Memory(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(oomBinaryCmd, oomBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -384,14 +899,17 @@ func TestModule_Sandbox_Memory(t *testing.T) { Id: uuid.New().String(), Message: &wasmpb.Request_SpecRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "exit status 2") } func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(sleepBinaryCmd, sleepBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + d := 1 * time.Millisecond + m, err := NewModule(&ModuleConfig{Timeout: &d, IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -402,7 +920,7 @@ func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { } start := time.Now() - _, err = m.Run(req) + _, err = m.Run(ctx, req) end := time.Now() // The binary sleeps for 1 hour, @@ -413,10 +931,11 @@ func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { } func TestModule_Sandbox_Timeout(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(sleepBinaryCmd, sleepBinaryLocation, true, t) tmt := 10 * time.Millisecond - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t), Timeout: &tmt}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t), Timeout: &tmt}, binary) require.NoError(t, err) m.Start() @@ -426,15 +945,17 @@ func TestModule_Sandbox_Timeout(t *testing.T) { Message: &wasmpb.Request_SpecRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "interrupt") } func TestModule_Sandbox_CantReadFiles(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(filesBinaryCmd, filesBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -453,14 +974,16 @@ func TestModule_Sandbox_CantReadFiles(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "open /tmp/file") } func TestModule_Sandbox_CantCreateDir(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(dirsBinaryCmd, dirsBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -479,14 +1002,16 @@ func TestModule_Sandbox_CantCreateDir(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "mkdir") } func TestModule_Sandbox_HTTPRequest(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(httpBinaryCmd, httpBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -505,14 +1030,16 @@ func TestModule_Sandbox_HTTPRequest(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.NotNil(t, err) } func TestModule_Sandbox_ReadEnv(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(envBinaryCmd, envBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -535,11 +1062,12 @@ func TestModule_Sandbox_ReadEnv(t *testing.T) { }, } // This will return an error if FOO == BAR in the WASM binary - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) } func TestModule_Sandbox_RandomGet(t *testing.T) { + t.Parallel() req := &wasmpb.Request{ Id: uuid.New().String(), Message: &wasmpb.Request_ComputeRequest{ @@ -555,10 +1083,12 @@ func TestModule_Sandbox_RandomGet(t *testing.T) { }, } t.Run("success: deterministic override via module config", func(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(randBinaryCmd, randBinaryLocation, true, t) m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, Determinism: &DeterminismConfig{ Seed: 42, }, @@ -567,21 +1097,23 @@ func TestModule_Sandbox_RandomGet(t *testing.T) { m.Start() - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) }) t.Run("success: default module config is non deterministic", func(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(randBinaryCmd, randBinaryLocation, true, t) m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, binary) require.NoError(t, err) m.Start() - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Error(t, err) assert.ErrorContains(t, err, "expected deterministic output") }) diff --git a/pkg/workflows/wasm/pb/wasm.go b/pkg/workflows/wasm/pb/wasm.go index 068fe4103..2065f1255 100644 --- a/pkg/workflows/wasm/pb/wasm.go +++ b/pkg/workflows/wasm/pb/wasm.go @@ -75,7 +75,7 @@ func WorkflowSpecToProto(spec *sdk.WorkflowSpec) (*WorkflowSpec, error) { if err != nil { return nil, fmt.Errorf("error translating step definition to proto: %w", err) } - ws.Targets = append(ws.Consensus, tt) + ws.Targets = append(ws.Targets, tt) } return ws, nil @@ -163,7 +163,7 @@ func ProtoToWorkflowSpec(spec *WorkflowSpec) (*sdk.WorkflowSpec, error) { if err != nil { return nil, fmt.Errorf("error translating step definition to proto: %w", err) } - ws.Targets = append(ws.Consensus, tt) + ws.Targets = append(ws.Targets, tt) } return ws, nil diff --git a/pkg/workflows/wasm/pb/wasm.pb.go b/pkg/workflows/wasm/pb/wasm.pb.go index 95a8839a0..d8d21aefb 100644 --- a/pkg/workflows/wasm/pb/wasm.pb.go +++ b/pkg/workflows/wasm/pb/wasm.pb.go @@ -587,22 +587,104 @@ func (*Response_ComputeResponse) isResponse_Message() {} func (*Response_SpecResponse) isResponse_Message() {} +// NOTE: This message was added because it is needed to be used as part of the request and for metrics. +type FetchRequestMetadata struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + WorkflowId string `protobuf:"bytes,1,opt,name=workflowId,proto3" json:"workflowId,omitempty"` + WorkflowName string `protobuf:"bytes,2,opt,name=workflowName,proto3" json:"workflowName,omitempty"` + WorkflowOwner string `protobuf:"bytes,3,opt,name=workflowOwner,proto3" json:"workflowOwner,omitempty"` + WorkflowExecutionId string `protobuf:"bytes,4,opt,name=workflowExecutionId,proto3" json:"workflowExecutionId,omitempty"` + DecodedWorkflowName string `protobuf:"bytes,5,opt,name=decodedWorkflowName,proto3" json:"decodedWorkflowName,omitempty"` +} + +func (x *FetchRequestMetadata) Reset() { + *x = FetchRequestMetadata{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FetchRequestMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FetchRequestMetadata) ProtoMessage() {} + +func (x *FetchRequestMetadata) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FetchRequestMetadata.ProtoReflect.Descriptor instead. +func (*FetchRequestMetadata) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{8} +} + +func (x *FetchRequestMetadata) GetWorkflowId() string { + if x != nil { + return x.WorkflowId + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowName() string { + if x != nil { + return x.WorkflowName + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowOwner() string { + if x != nil { + return x.WorkflowOwner + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowExecutionId() string { + if x != nil { + return x.WorkflowExecutionId + } + return "" +} + +func (x *FetchRequestMetadata) GetDecodedWorkflowName() string { + if x != nil { + return x.DecodedWorkflowName + } + return "" +} + type FetchRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` - Method string `protobuf:"bytes,2,opt,name=method,proto3" json:"method,omitempty"` - Headers *pb1.Map `protobuf:"bytes,3,opt,name=headers,proto3" json:"headers,omitempty"` - Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` - TimeoutMs uint32 `protobuf:"varint,5,opt,name=timeoutMs,proto3" json:"timeoutMs,omitempty"` + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + Method string `protobuf:"bytes,2,opt,name=method,proto3" json:"method,omitempty"` + Headers *pb1.Map `protobuf:"bytes,3,opt,name=headers,proto3" json:"headers,omitempty"` + Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` + TimeoutMs uint32 `protobuf:"varint,5,opt,name=timeoutMs,proto3" json:"timeoutMs,omitempty"` + Id string `protobuf:"bytes,6,opt,name=id,proto3" json:"id,omitempty"` + Metadata *FetchRequestMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` } func (x *FetchRequest) Reset() { *x = FetchRequest{} if protoimpl.UnsafeEnabled { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -615,7 +697,7 @@ func (x *FetchRequest) String() string { func (*FetchRequest) ProtoMessage() {} func (x *FetchRequest) ProtoReflect() protoreflect.Message { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -628,7 +710,7 @@ func (x *FetchRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchRequest.ProtoReflect.Descriptor instead. func (*FetchRequest) Descriptor() ([]byte, []int) { - return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{8} + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{9} } func (x *FetchRequest) GetUrl() string { @@ -666,22 +748,37 @@ func (x *FetchRequest) GetTimeoutMs() uint32 { return 0 } +func (x *FetchRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *FetchRequest) GetMetadata() *FetchRequestMetadata { + if x != nil { + return x.Metadata + } + return nil +} + type FetchResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` - ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` - StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` // NOTE: this is actually a uint8, but proto doesn't support this. - Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` - Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` + // NOTE: this is actually a uint8, but proto doesn't support this. + StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` + Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` } func (x *FetchResponse) Reset() { *x = FetchResponse{} if protoimpl.UnsafeEnabled { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -694,7 +791,7 @@ func (x *FetchResponse) String() string { func (*FetchResponse) ProtoMessage() {} func (x *FetchResponse) ProtoReflect() protoreflect.Message { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -707,7 +804,7 @@ func (x *FetchResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchResponse.ProtoReflect.Descriptor instead. func (*FetchResponse) Descriptor() ([]byte, []int) { - return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{9} + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{10} } func (x *FetchResponse) GetExecutionError() bool { @@ -745,6 +842,163 @@ func (x *FetchResponse) GetBody() []byte { return nil } +type EmitMessageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + Labels *pb1.Map `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` + RequestId string `protobuf:"bytes,3,opt,name=requestId,proto3" json:"requestId,omitempty"` +} + +func (x *EmitMessageRequest) Reset() { + *x = EmitMessageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageRequest) ProtoMessage() {} + +func (x *EmitMessageRequest) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageRequest.ProtoReflect.Descriptor instead. +func (*EmitMessageRequest) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{11} +} + +func (x *EmitMessageRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *EmitMessageRequest) GetLabels() *pb1.Map { + if x != nil { + return x.Labels + } + return nil +} + +func (x *EmitMessageRequest) GetRequestId() string { + if x != nil { + return x.RequestId + } + return "" +} + +type Error struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *Error) Reset() { + *x = Error{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Error) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Error) ProtoMessage() {} + +func (x *Error) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Error.ProtoReflect.Descriptor instead. +func (*Error) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{12} +} + +func (x *Error) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +type EmitMessageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Error *Error `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *EmitMessageResponse) Reset() { + *x = EmitMessageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageResponse) ProtoMessage() {} + +func (x *EmitMessageResponse) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageResponse.ProtoReflect.Descriptor instead. +func (*EmitMessageResponse) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{13} +} + +func (x *EmitMessageResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + var File_workflows_wasm_pb_wasm_proto protoreflect.FileDescriptor var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ @@ -829,32 +1083,64 @@ var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x64, 0x6b, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x70, 0x65, 0x63, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x22, 0x91, 0x01, 0x0a, 0x0c, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, - 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, - 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x74, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x22, 0xb6, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x74, - 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x65, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x43, 0x6f, 0x64, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0xe4, 0x01, 0x0a, 0x14, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1e, 0x0a, + 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x22, 0x0a, + 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4f, 0x77, 0x6e, + 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x30, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x13, 0x64, 0x65, 0x63, + 0x6f, 0x64, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x64, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xd8, 0x01, 0x0a, 0x0c, + 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, + 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x16, + 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, - 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, - 0x79, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, - 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, - 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, + 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x35, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x64, 0x6b, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xb6, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x74, 0x63, 0x68, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x65, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, + 0x64, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x43, 0x6f, 0x64, 0x65, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, + 0x61, 0x70, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, + 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, + 0x71, 0x0a, 0x12, 0x45, 0x6d, 0x69, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, + 0x23, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x49, + 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x49, 0x64, 0x22, 0x21, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x37, 0x0a, 0x13, 0x45, 0x6d, 0x69, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x05, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x73, 0x64, + 0x6b, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x43, + 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, + 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, + 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, + 0x6b, 0x67, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, 0x73, 0x64, 0x6b, + 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -869,7 +1155,7 @@ func file_workflows_wasm_pb_wasm_proto_rawDescGZIP() []byte { return file_workflows_wasm_pb_wasm_proto_rawDescData } -var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 14) var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*RuntimeConfig)(nil), // 0: sdk.RuntimeConfig (*ComputeRequest)(nil), // 1: sdk.ComputeRequest @@ -879,35 +1165,42 @@ var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*StepDefinition)(nil), // 5: sdk.StepDefinition (*WorkflowSpec)(nil), // 6: sdk.WorkflowSpec (*Response)(nil), // 7: sdk.Response - (*FetchRequest)(nil), // 8: sdk.FetchRequest - (*FetchResponse)(nil), // 9: sdk.FetchResponse - (*pb.CapabilityRequest)(nil), // 10: capabilities.CapabilityRequest - (*emptypb.Empty)(nil), // 11: google.protobuf.Empty - (*pb.CapabilityResponse)(nil), // 12: capabilities.CapabilityResponse - (*pb1.Map)(nil), // 13: values.Map + (*FetchRequestMetadata)(nil), // 8: sdk.FetchRequestMetadata + (*FetchRequest)(nil), // 9: sdk.FetchRequest + (*FetchResponse)(nil), // 10: sdk.FetchResponse + (*EmitMessageRequest)(nil), // 11: sdk.EmitMessageRequest + (*Error)(nil), // 12: sdk.Error + (*EmitMessageResponse)(nil), // 13: sdk.EmitMessageResponse + (*pb.CapabilityRequest)(nil), // 14: capabilities.CapabilityRequest + (*emptypb.Empty)(nil), // 15: google.protobuf.Empty + (*pb.CapabilityResponse)(nil), // 16: capabilities.CapabilityResponse + (*pb1.Map)(nil), // 17: values.Map } var file_workflows_wasm_pb_wasm_proto_depIdxs = []int32{ - 10, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest + 14, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest 0, // 1: sdk.ComputeRequest.runtimeConfig:type_name -> sdk.RuntimeConfig 1, // 2: sdk.Request.computeRequest:type_name -> sdk.ComputeRequest - 11, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty - 12, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse - 13, // 5: sdk.StepInputs.mapping:type_name -> values.Map + 15, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty + 16, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse + 17, // 5: sdk.StepInputs.mapping:type_name -> values.Map 4, // 6: sdk.StepDefinition.inputs:type_name -> sdk.StepInputs - 13, // 7: sdk.StepDefinition.config:type_name -> values.Map + 17, // 7: sdk.StepDefinition.config:type_name -> values.Map 5, // 8: sdk.WorkflowSpec.triggers:type_name -> sdk.StepDefinition 5, // 9: sdk.WorkflowSpec.actions:type_name -> sdk.StepDefinition 5, // 10: sdk.WorkflowSpec.consensus:type_name -> sdk.StepDefinition 5, // 11: sdk.WorkflowSpec.targets:type_name -> sdk.StepDefinition 3, // 12: sdk.Response.computeResponse:type_name -> sdk.ComputeResponse 6, // 13: sdk.Response.specResponse:type_name -> sdk.WorkflowSpec - 13, // 14: sdk.FetchRequest.headers:type_name -> values.Map - 13, // 15: sdk.FetchResponse.headers:type_name -> values.Map - 16, // [16:16] is the sub-list for method output_type - 16, // [16:16] is the sub-list for method input_type - 16, // [16:16] is the sub-list for extension type_name - 16, // [16:16] is the sub-list for extension extendee - 0, // [0:16] is the sub-list for field type_name + 17, // 14: sdk.FetchRequest.headers:type_name -> values.Map + 8, // 15: sdk.FetchRequest.metadata:type_name -> sdk.FetchRequestMetadata + 17, // 16: sdk.FetchResponse.headers:type_name -> values.Map + 17, // 17: sdk.EmitMessageRequest.labels:type_name -> values.Map + 12, // 18: sdk.EmitMessageResponse.error:type_name -> sdk.Error + 19, // [19:19] is the sub-list for method output_type + 19, // [19:19] is the sub-list for method input_type + 19, // [19:19] is the sub-list for extension type_name + 19, // [19:19] is the sub-list for extension extendee + 0, // [0:19] is the sub-list for field type_name } func init() { file_workflows_wasm_pb_wasm_proto_init() } @@ -1013,7 +1306,7 @@ func file_workflows_wasm_pb_wasm_proto_init() { } } file_workflows_wasm_pb_wasm_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FetchRequest); i { + switch v := v.(*FetchRequestMetadata); i { case 0: return &v.state case 1: @@ -1025,6 +1318,18 @@ func file_workflows_wasm_pb_wasm_proto_init() { } } file_workflows_wasm_pb_wasm_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FetchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchResponse); i { case 0: return &v.state @@ -1036,6 +1341,42 @@ func file_workflows_wasm_pb_wasm_proto_init() { return nil } } + file_workflows_wasm_pb_wasm_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Error); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_workflows_wasm_pb_wasm_proto_msgTypes[2].OneofWrappers = []interface{}{ (*Request_ComputeRequest)(nil), @@ -1051,7 +1392,7 @@ func file_workflows_wasm_pb_wasm_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_workflows_wasm_pb_wasm_proto_rawDesc, NumEnums: 0, - NumMessages: 10, + NumMessages: 14, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/workflows/wasm/pb/wasm.proto b/pkg/workflows/wasm/pb/wasm.proto index 180b2cd12..e347602b8 100644 --- a/pkg/workflows/wasm/pb/wasm.proto +++ b/pkg/workflows/wasm/pb/wasm.proto @@ -8,9 +8,7 @@ import "capabilities/pb/capabilities.proto"; import "values/pb/values.proto"; import "google/protobuf/empty.proto"; -message RuntimeConfig { - int64 maxFetchResponseSizeBytes = 1; -} +message RuntimeConfig { int64 maxFetchResponseSizeBytes = 1; } message ComputeRequest { capabilities.CapabilityRequest request = 1; @@ -27,9 +25,7 @@ message Request { } } -message ComputeResponse { - capabilities.CapabilityResponse response = 1; -} +message ComputeResponse { capabilities.CapabilityResponse response = 1; } message StepInputs { string outputRef = 1; @@ -63,18 +59,41 @@ message Response { } } +// NOTE: This message was added because it is needed to be used as part of the request and for metrics. +message FetchRequestMetadata { + string workflowId = 1; + string workflowName = 2; + string workflowOwner = 3; + string workflowExecutionId = 4; + string decodedWorkflowName = 5; +} + message FetchRequest { string url = 1; string method = 2; values.Map headers = 3; bytes body = 4; uint32 timeoutMs = 5; + string id = 6; + FetchRequestMetadata metadata = 7; } message FetchResponse { bool executionError = 1; string errorMessage = 2; - uint32 statusCode = 3; // NOTE: this is actually a uint8, but proto doesn't support this. + + // NOTE: this is actually a uint8, but proto doesn't support this. + uint32 statusCode = 3; values.Map headers = 4; bytes body = 5; } + +message EmitMessageRequest { + string message = 1; + values.Map labels = 2; + string requestId = 3; +} + +message Error { string message = 1; } + +message EmitMessageResponse { Error error = 1; } diff --git a/pkg/workflows/wasm/runner.go b/pkg/workflows/wasm/runner.go index 1372117fa..90f93ee2a 100644 --- a/pkg/workflows/wasm/runner.go +++ b/pkg/workflows/wasm/runner.go @@ -26,24 +26,34 @@ var _ sdk.Runner = (*Runner)(nil) type Runner struct { sendResponse func(payload *wasmpb.Response) - sdkFactory func(cfg *RuntimeConfig) *Runtime + sdkFactory func(cfg *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime args []string req *wasmpb.Request } func (r *Runner) Run(factory *sdk.WorkflowSpecFactory) { if r.req == nil { - req, err := r.parseRequest() - if err != nil { - r.sendResponse(errorResponse(unknownID, err)) + success := r.cacheRequest() + if !success { return } - - r.req = req } req := r.req + // We set this up *after* parsing the request, so that we can guarantee + // that we'll have access to the request object. + defer func() { + if err := recover(); err != nil { + asErr, ok := err.(error) + if ok { + r.sendResponse(errorResponse(r.req.Id, asErr)) + } else { + r.sendResponse(errorResponse(r.req.Id, fmt.Errorf("caught panic: %+v", err))) + } + } + }() + resp := &wasmpb.Response{ Id: req.Id, } @@ -72,18 +82,27 @@ func (r *Runner) Run(factory *sdk.WorkflowSpecFactory) { func (r *Runner) Config() []byte { if r.req == nil { - req, err := r.parseRequest() - if err != nil { - r.sendResponse(errorResponse(unknownID, err)) + success := r.cacheRequest() + if !success { return nil } - - r.req = req } return r.req.Config } +func (r *Runner) ExitWithError(err error) { + if r.req == nil { + success := r.cacheRequest() + if !success { + return + } + } + + r.sendResponse(errorResponse(r.req.Id, err)) + return +} + func errorResponse(id string, err error) *wasmpb.Response { return &wasmpb.Response{ Id: id, @@ -91,6 +110,19 @@ func errorResponse(id string, err error) *wasmpb.Response { } } +func (r *Runner) cacheRequest() bool { + if r.req == nil { + req, err := r.parseRequest() + if err != nil { + r.sendResponse(errorResponse(unknownID, err)) + return false + } + + r.req = req + } + return true +} + func (r *Runner) parseRequest() (*wasmpb.Request, error) { // We expect exactly 2 args, i.e. `wasm `, // where is a base64 encoded protobuf message. @@ -156,7 +188,7 @@ func (r *Runner) handleComputeRequest(factory *sdk.WorkflowSpecFactory, id strin } // Extract the config from the request - drc := defaultRuntimeConfig() + drc := defaultRuntimeConfig(id, &creq.Metadata) if rc := computeReq.GetRuntimeConfig(); rc != nil { if rc.MaxFetchResponseSizeBytes != 0 { drc.MaxFetchResponseSizeBytes = rc.MaxFetchResponseSizeBytes diff --git a/pkg/workflows/wasm/runner_test.go b/pkg/workflows/wasm/runner_test.go index c8f3eda0a..05aacdcda 100644 --- a/pkg/workflows/wasm/runner_test.go +++ b/pkg/workflows/wasm/runner_test.go @@ -2,9 +2,14 @@ package wasm import ( "encoding/base64" + "encoding/binary" + "math/big" "testing" + "time" + "unsafe" "github.com/google/uuid" + "github.com/shopspring/decimal" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" @@ -12,8 +17,10 @@ import ( "google.golang.org/protobuf/types/known/emptypb" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictarget" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" capabilitiespb "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" @@ -82,83 +89,345 @@ func Test_Runner_Config(t *testing.T) { assert.Nil(t, gotResponse) } -func TestRunner_Run_ExecuteCompute(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "cedric", - }, - ) - - trigger := basictrigger.TriggerConfig{Name: "trigger", Number: 100}.New(workflow) - computeFn := func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { - return true, nil - } - sdk.Compute1( - workflow, - "compute", - sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, - computeFn, - ) +type ValidStruct struct { + SomeInt int + SomeString string + SomeTime time.Time +} - var gotResponse *wasmpb.Response - responseFn := func(resp *wasmpb.Response) { - gotResponse = resp - } +type PrivateFieldStruct struct { + SomeInt int + SomeString string + somePrivateTime time.Time +} - m, err := values.NewMap(map[string]any{ - "cool_output": "a trigger event", - }) - require.NoError(t, err) +func TestRunner_Run_ExecuteCompute(t *testing.T) { + now := time.Now().UTC() - req := capabilities.CapabilityRequest{ - Config: values.EmptyMap(), - Inputs: m, - Metadata: capabilities.RequestMetadata{ - ReferenceID: "compute", + cases := []struct { + name string + expectedOutput any + compute func(*sdk.WorkflowSpecFactory, basictrigger.TriggerOutputsCap) + errorString string + }{ + // Success cases + { + name: "valid compute func - bigint", + expectedOutput: big.NewInt(1), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (*big.Int, error) { + return big.NewInt(1), nil + }, + ) + }, + errorString: "", }, - } - reqpb := capabilitiespb.CapabilityRequestToProto(req) - request := &wasmpb.Request{ - Id: uuid.New().String(), - Message: &wasmpb.Request_ComputeRequest{ - ComputeRequest: &wasmpb.ComputeRequest{ - Request: reqpb, + { + name: "valid compute func - bool", + expectedOutput: true, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + return true, nil + }, + ) }, + errorString: "", + }, + { + name: "valid compute func - bytes", + expectedOutput: []byte{3}, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) ([]byte, error) { + return []byte{3}, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - decimal", + expectedOutput: decimal.NewFromInt(2), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (decimal.Decimal, error) { + return decimal.NewFromInt(2), nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - float64", + expectedOutput: float64(1.1), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (float64, error) { + return 1.1, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - int", + expectedOutput: int64(1), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (int, error) { + return 1, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - list", + expectedOutput: []interface{}([]interface{}{int64(1), int64(2), int64(3), int64(4)}), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) ([]int, error) { + return []int{1, 2, 3, 4}, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - map", + expectedOutput: map[string]interface{}(map[string]interface{}{"test": int64(1)}), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (map[string]int, error) { + out := map[string]int{"test": 1} + return out, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - deep map", + expectedOutput: map[string]interface{}(map[string]interface{}{"test1": map[string]interface{}{"test2": int64(1)}}), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (map[string]map[string]int, error) { + out := map[string]map[string]int{"test1": {"test2": 1}} + return out, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - string", + expectedOutput: "hiya", + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (string, error) { + return "hiya", nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - struct", + expectedOutput: map[string]interface{}(map[string]interface{}{"SomeInt": int64(3), "SomeString": "hiya", "SomeTime": now}), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (ValidStruct, error) { + return ValidStruct{SomeString: "hiya", SomeTime: now, SomeInt: 3}, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - empty interface", + expectedOutput: nil, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (interface{}, error) { + var empty interface{} + return empty, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - time", + expectedOutput: now, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (time.Time, error) { + return now, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - any", + expectedOutput: now, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (any, error) { + return now, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - nil", + expectedOutput: nil, + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (any, error) { + return nil, nil + }, + ) + }, + errorString: "", + }, + { + name: "valid compute func - private struct", + expectedOutput: map[string]interface{}(map[string]interface{}{"SomeInt": int64(3), "SomeString": "hiya"}), + compute: func(workflow *sdk.WorkflowSpecFactory, trigger basictrigger.TriggerOutputsCap) { + sdk.Compute1( + workflow, + "compute", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (PrivateFieldStruct, error) { + return PrivateFieldStruct{SomeString: "hiya", somePrivateTime: now, SomeInt: 3}, nil + }, + ) + }, + errorString: "", }, } - str, err := marshalRequest(request) - require.NoError(t, err) - runner := &Runner{ - args: []string{"wasm", str}, - sendResponse: responseFn, - sdkFactory: func(cfg *RuntimeConfig) *Runtime { return nil }, - } - runner.Run(workflow) + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + workflow := sdk.NewWorkflowSpecFactory() - assert.NotNil(t, gotResponse.GetComputeResponse()) + trigger := basictrigger.TriggerConfig{Name: "trigger", Number: 100}.New(workflow) - resp := gotResponse.GetComputeResponse().GetResponse() - assert.Equal(t, resp.Error, "") + tt.compute(workflow, trigger) - m, err = values.FromMapValueProto(resp.Value) - require.NoError(t, err) + var gotResponse *wasmpb.Response + responseFn := func(resp *wasmpb.Response) { + gotResponse = resp + } - unw, err := values.Unwrap(m) - require.NoError(t, err) + m, err := values.NewMap(map[string]any{ + "cool_output": "a trigger event", + }) + require.NoError(t, err) + + req := capabilities.CapabilityRequest{ + Config: values.EmptyMap(), + Inputs: m, + Metadata: capabilities.RequestMetadata{ + ReferenceID: "compute", + }, + } + reqpb := capabilitiespb.CapabilityRequestToProto(req) + request := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: reqpb, + }, + }, + } + str, err := marshalRequest(request) + require.NoError(t, err) + runner := &Runner{ + args: []string{"wasm", str}, + sendResponse: responseFn, + sdkFactory: func(cfg *RuntimeConfig, _ ...func(*RuntimeConfig)) *Runtime { + return nil + }, + } + runner.Run(workflow) + + if tt.errorString == "" { + assert.NotNil(t, gotResponse.GetComputeResponse()) + resp := gotResponse.GetComputeResponse().GetResponse() + assert.Equal(t, resp.Error, "") - assert.Equal(t, unw.(map[string]any)["Value"].(bool), true) + m, err = values.FromMapValueProto(resp.Value) + require.NoError(t, err) + + unw, err := values.Unwrap(m) + require.NoError(t, err) + + assert.Equal(t, tt.expectedOutput, unw.(map[string]any)["Value"]) + } else { + assert.Equal(t, tt.errorString, gotResponse.ErrMsg) + assert.Nil(t, gotResponse.GetComputeResponse()) + } + }) + } } func TestRunner_Run_GetWorkflowSpec(t *testing.T) { - workflow := sdk.NewWorkflowSpecFactory( - sdk.NewWorkflowParams{ - Name: "tester", - Owner: "cedric", - }, - ) + workflow := sdk.NewWorkflowSpecFactory() trigger := basictrigger.TriggerConfig{Name: "trigger", Number: 100}.New(workflow) + // Define and add a target to the workflow + targetInput := basictarget.TargetInput{CoolInput: trigger.CoolOutput()} + targetConfig := basictarget.TargetConfig{Name: "basictarget", Number: 150} + targetConfig.New(workflow, targetInput) computeFn := func(sdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { return true, nil } @@ -200,5 +469,173 @@ func TestRunner_Run_GetWorkflowSpec(t *testing.T) { // Do some massaging due to protos lossy conversion of types gotSpec.Triggers[0].Inputs.Mapping = map[string]any{} gotSpec.Triggers[0].Config["number"] = int64(gotSpec.Triggers[0].Config["number"].(uint64)) + gotSpec.Targets[0].Config["number"] = int64(gotSpec.Targets[0].Config["number"].(uint64)) assert.Equal(t, &gotSpec, spc) + + // Verify the target is included in the workflow spec + assert.Equal(t, targetConfig.Number, uint64(gotSpec.Targets[0].Config["number"].(int64))) +} + +// Test_createEmitFn validates the runtime's emit function implementation. Uses mocks of the +// imported wasip1 emit function. +func Test_createEmitFn(t *testing.T) { + var ( + l = logger.Test(t) + reqId = "random-id" + sdkConfig = &RuntimeConfig{ + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + RequestID: &reqId, + } + giveMsg = "testing guest" + giveLabels = map[string]string{ + "some-key": "some-value", + } + ) + + t.Run("success", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.NoError(t, err) + }) + + t.Run("successfully read error message when emit fails", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // marshall the protobufs + b, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + }) + + t.Run("fail to deserialize response from memory", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // b is a non-protobuf byte slice + b := []byte(assert.AnError.Error()) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "invalid wire-format data") + }) + + t.Run("fail with nonzero code from emit", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 42 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "emit failed with errno 42") + }) +} + +func Test_createFetchFn(t *testing.T) { + var ( + l = logger.Test(t) + requestID = uuid.New().String() + sdkConfig = &RuntimeConfig{ + RequestID: &requestID, + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + } + ) + + t.Run("OK-success", func(t *testing.T) { + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeFetch := createFetchFn(sdkConfig, l, hostFetch) + response, err := runtimeFetch(sdk.FetchRequest{}) + assert.NoError(t, err) + assert.Equal(t, sdk.FetchResponse{ + Headers: map[string]any{}, + }, response) + }) + + t.Run("NOK-config_missing_request_id", func(t *testing.T) { + invalidConfig := &RuntimeConfig{ + RequestID: nil, + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + } + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeFetch := createFetchFn(invalidConfig, l, hostFetch) + _, err := runtimeFetch(sdk.FetchRequest{}) + assert.ErrorContains(t, err, "request ID is required to fetch") + }) + + t.Run("NOK-fetch_returns_handled_error", func(t *testing.T) { + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + fetchResponse := &wasmpb.FetchResponse{ + ExecutionError: true, + ErrorMessage: assert.AnError.Error(), + } + respBytes, perr := proto.Marshal(fetchResponse) + if perr != nil { + return 0 + } + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(respBytes)) + copy(resp, respBytes) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(respBytes))) + + return 0 + } + runtimeFetch := createFetchFn(sdkConfig, l, hostFetch) + _, err := runtimeFetch(sdk.FetchRequest{}) + assert.ErrorContains(t, err, assert.AnError.Error()) + }) } diff --git a/pkg/workflows/wasm/runner_wasip1.go b/pkg/workflows/wasm/runner_wasip1.go index 6a85a43db..c31aa7427 100644 --- a/pkg/workflows/wasm/runner_wasip1.go +++ b/pkg/workflows/wasm/runner_wasip1.go @@ -1,17 +1,12 @@ package wasm import ( - "encoding/binary" - "errors" - "fmt" "os" "unsafe" "google.golang.org/protobuf/proto" "github.com/smartcontractkit/chainlink-common/pkg/logger" - "github.com/smartcontractkit/chainlink-common/pkg/values" - "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) @@ -24,103 +19,59 @@ func log(respptr unsafe.Pointer, respptrlen int32) //go:wasmimport env fetch func fetch(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 -const uint32Size = int32(4) - -func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { - return unsafe.Pointer(&buf[0]), int32(len(buf)) -} +//go:wasmimport env emit +func emit(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 func NewRunner() *Runner { l := logger.NewWithSync(&wasmWriteSyncer{}) return &Runner{ - sendResponse: func(response *wasmpb.Response) { - pb, err := proto.Marshal(response) - if err != nil { - // We somehow couldn't marshal the response, so let's - // exit with a special error code letting the host know - // what happened. - os.Exit(CodeInvalidResponse) - } - - // unknownID will only be set when we've failed to parse - // the request. Like before, let's bubble this up. - if response.Id == unknownID { - os.Exit(CodeInvalidRequest) - } - - ptr, ptrlen := bufferToPointerLen(pb) - errno := sendResponse(ptr, ptrlen) - if errno != 0 { - os.Exit(CodeHostErr) - } - - code := CodeSuccess - if response.ErrMsg != "" { - code = CodeRunnerErr + sendResponse: sendResponseFn, + sdkFactory: func(sdkConfig *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime { + for _, opt := range opts { + opt(sdkConfig) } - os.Exit(code) - }, - sdkFactory: func(sdkConfig *RuntimeConfig) *Runtime { return &Runtime{ - logger: l, - fetchFn: func(req sdk.FetchRequest) (sdk.FetchResponse, error) { - headerspb, err := values.NewMap(req.Headers) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) - } - - b, err := proto.Marshal(&wasmpb.FetchRequest{ - Url: req.URL, - Method: req.Method, - Headers: values.ProtoMap(headerspb), - Body: req.Body, - TimeoutMs: req.TimeoutMs, - }) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) - } - reqptr, reqptrlen := bufferToPointerLen(b) - - respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) - respptr, _ := bufferToPointerLen(respBuffer) - - resplenBuffer := make([]byte, uint32Size) - resplenptr, _ := bufferToPointerLen(resplenBuffer) - - errno := fetch(respptr, resplenptr, reqptr, reqptrlen) - if errno != 0 { - return sdk.FetchResponse{}, errors.New("failed to execute fetch") - } - - responseSize := binary.LittleEndian.Uint32(resplenBuffer) - response := &wasmpb.FetchResponse{} - err = proto.Unmarshal(respBuffer[:responseSize], response) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) - } - - fields := response.Headers.GetFields() - headersResp := make(map[string]any, len(fields)) - for k, v := range fields { - headersResp[k] = v - } - - return sdk.FetchResponse{ - ExecutionError: response.ExecutionError, - ErrorMessage: response.ErrorMessage, - StatusCode: uint8(response.StatusCode), - Headers: headersResp, - Body: response.Body, - }, nil - }, + logger: l, + fetchFn: createFetchFn(sdkConfig, l, fetch), + emitFn: createEmitFn(sdkConfig, l, emit), } }, args: os.Args, } } +// sendResponseFn implements sendResponse for import into WASM. +func sendResponseFn(response *wasmpb.Response) { + pb, err := proto.Marshal(response) + if err != nil { + // We somehow couldn't marshal the response, so let's + // exit with a special error code letting the host know + // what happened. + os.Exit(CodeInvalidResponse) + } + + // unknownID will only be set when we've failed to parse + // the request. Like before, let's bubble this up. + if response.Id == unknownID { + os.Exit(CodeInvalidRequest) + } + + ptr, ptrlen := bufferToPointerLen(pb) + errno := sendResponse(ptr, ptrlen) + if errno != 0 { + os.Exit(CodeHostErr) + } + + code := CodeSuccess + if response.ErrMsg != "" { + code = CodeRunnerErr + } + + os.Exit(code) +} + type wasmWriteSyncer struct{} // Write is used to proxy log requests from the WASM binary back to the host diff --git a/pkg/workflows/wasm/sdk.go b/pkg/workflows/wasm/sdk.go index d6c29a009..f3d6b13ae 100644 --- a/pkg/workflows/wasm/sdk.go +++ b/pkg/workflows/wasm/sdk.go @@ -1,26 +1,47 @@ package wasm import ( + "encoding/binary" + "errors" + "fmt" + "unsafe" + + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/events" + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" + wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) +// Length of responses are encoded into 4 byte buffers in little endian. uint32Size is the size +// of that buffer. +const uint32Size = int32(4) + type Runtime struct { fetchFn func(req sdk.FetchRequest) (sdk.FetchResponse, error) + emitFn func(msg string, labels map[string]string) error logger logger.Logger } type RuntimeConfig struct { MaxFetchResponseSizeBytes int64 + RequestID *string + Metadata *capabilities.RequestMetadata } const ( defaultMaxFetchResponseSizeBytes = 5 * 1024 ) -func defaultRuntimeConfig() *RuntimeConfig { +func defaultRuntimeConfig(id string, md *capabilities.RequestMetadata) *RuntimeConfig { return &RuntimeConfig{ MaxFetchResponseSizeBytes: defaultMaxFetchResponseSizeBytes, + RequestID: &id, + Metadata: md, } } @@ -33,3 +54,216 @@ func (r *Runtime) Fetch(req sdk.FetchRequest) (sdk.FetchResponse, error) { func (r *Runtime) Logger() logger.Logger { return r.logger } + +func (r *Runtime) Emitter() sdk.MessageEmitter { + return newWasmGuestEmitter(r.emitFn) +} + +type wasmGuestEmitter struct { + base custmsg.MessageEmitter + emitFn func(string, map[string]string) error + labels map[string]string +} + +func newWasmGuestEmitter(emitFn func(string, map[string]string) error) wasmGuestEmitter { + return wasmGuestEmitter{ + emitFn: emitFn, + labels: make(map[string]string), + base: custmsg.NewLabeler(), + } +} + +func (w wasmGuestEmitter) Emit(msg string) error { + return w.emitFn(msg, w.labels) +} + +func (w wasmGuestEmitter) With(keyValues ...string) sdk.MessageEmitter { + newEmitter := newWasmGuestEmitter(w.emitFn) + newEmitter.base = w.base.With(keyValues...) + newEmitter.labels = newEmitter.base.Labels() + return newEmitter +} + +// createEmitFn builds the runtime's emit function implementation, which is a function +// that handles marshalling and unmarshalling messages for the WASM to act on. +func createEmitFn( + sdkConfig *RuntimeConfig, + l logger.Logger, + emit func(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32, +) func(string, map[string]string) error { + emitFn := func(msg string, labels map[string]string) error { + // Prepare the labels to be emitted + if sdkConfig.Metadata == nil { + return NewEmissionError(fmt.Errorf("metadata is required to emit")) + } + + labels, err := toEmitLabels(sdkConfig.Metadata, labels) + if err != nil { + return NewEmissionError(err) + } + + vm, err := values.NewMap(labels) + if err != nil { + return NewEmissionError(fmt.Errorf("could not wrap labels to map: %w", err)) + } + + // Marshal the message and labels into a protobuf message + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: *sdkConfig.RequestID, + Message: msg, + Labels: values.ProtoMap(vm), + }) + if err != nil { + return err + } + + // Prepare the request to be sent to the host memory by allocating space for the + // response and response length buffers. + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + // The request buffer is the wasm memory, get a pointer to the first element and the length + // of the protobuf message. + reqptr, reqptrlen := bufferToPointerLen(b) + + // Emit the message via the method imported from the host + errno := emit(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return NewEmissionError(fmt.Errorf("emit failed with errno %d", errno)) + } + + // Attempt to read and handle the response from the host memory + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.EmitMessageResponse{} + if err := proto.Unmarshal(respBuffer[:responseSize], response); err != nil { + l.Errorw("failed to unmarshal emit response", "error", err.Error()) + return NewEmissionError(err) + } + + if response.Error != nil && response.Error.Message != "" { + return NewEmissionError(errors.New(response.Error.Message)) + } + + return nil + } + + return emitFn +} + +// createFetchFn injects dependencies and creates a fetch function that can be used by the WASM +// binary. +func createFetchFn( + sdkConfig *RuntimeConfig, + l logger.Logger, + fetch func(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32, +) func(sdk.FetchRequest) (sdk.FetchResponse, error) { + fetchFn := func(req sdk.FetchRequest) (sdk.FetchResponse, error) { + headerspb, err := values.NewMap(req.Headers) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) + } + + if sdkConfig.RequestID == nil { + return sdk.FetchResponse{}, fmt.Errorf("request ID is required to fetch") + } + + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: *sdkConfig.RequestID, + Url: req.URL, + Method: req.Method, + Headers: values.ProtoMap(headerspb), + Body: req.Body, + TimeoutMs: req.TimeoutMs, + + Metadata: &wasmpb.FetchRequestMetadata{ + WorkflowId: sdkConfig.Metadata.WorkflowID, + WorkflowName: sdkConfig.Metadata.WorkflowName, + WorkflowOwner: sdkConfig.Metadata.WorkflowOwner, + WorkflowExecutionId: sdkConfig.Metadata.WorkflowExecutionID, + DecodedWorkflowName: sdkConfig.Metadata.DecodedWorkflowName, + }, + }) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) + } + reqptr, reqptrlen := bufferToPointerLen(b) + + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + errno := fetch(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return sdk.FetchResponse{}, fmt.Errorf("fetch failed with errno %d", errno) + } + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.FetchResponse{} + err = proto.Unmarshal(respBuffer[:responseSize], response) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) + } + if response.ExecutionError && response.ErrorMessage != "" { + return sdk.FetchResponse{ + ExecutionError: response.ExecutionError, + ErrorMessage: response.ErrorMessage, + }, errors.New(response.ErrorMessage) + } + + fields := response.Headers.GetFields() + headersResp := make(map[string]any, len(fields)) + for k, v := range fields { + headersResp[k] = v + } + + return sdk.FetchResponse{ + StatusCode: uint8(response.StatusCode), + Headers: headersResp, + Body: response.Body, + }, nil + } + return fetchFn +} + +// bufferToPointerLen returns a pointer to the first element of the buffer and the length of the buffer. +func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { + return unsafe.Pointer(&buf[0]), int32(len(buf)) +} + +// toEmitLabels ensures that the required metadata is present in the labels map +func toEmitLabels(md *capabilities.RequestMetadata, labels map[string]string) (map[string]string, error) { + if md.WorkflowID == "" { + return nil, fmt.Errorf("must provide workflow id to emit event") + } + + if md.WorkflowName == "" { + return nil, fmt.Errorf("must provide workflow name to emit event") + } + + if md.WorkflowOwner == "" { + return nil, fmt.Errorf("must provide workflow owner to emit event") + } + + labels[events.LabelWorkflowExecutionID] = md.WorkflowExecutionID + labels[events.LabelWorkflowOwner] = md.WorkflowOwner + labels[events.LabelWorkflowID] = md.WorkflowID + labels[events.LabelWorkflowName] = md.WorkflowName + return labels, nil +} + +// EmissionError wraps all errors that occur during the emission process for the runtime to handle. +type EmissionError struct { + Wrapped error +} + +func NewEmissionError(err error) *EmissionError { + return &EmissionError{Wrapped: err} +} + +func (e *EmissionError) Error() string { + return fmt.Errorf("failed to create emission: %w", e.Wrapped).Error() +} diff --git a/pkg/workflows/wasm/sdk_test.go b/pkg/workflows/wasm/sdk_test.go new file mode 100644 index 000000000..312dba7c7 --- /dev/null +++ b/pkg/workflows/wasm/sdk_test.go @@ -0,0 +1,66 @@ +package wasm + +import ( + "testing" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + + "github.com/stretchr/testify/assert" +) + +func Test_toEmitLabels(t *testing.T) { + t.Run("successfully transforms metadata", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + gotLabels, err := toEmitLabels(md, empty) + assert.NoError(t, err) + + assert.Equal(t, map[string]string{ + "workflow_id": "workflow-id", + "workflow_name": "workflow-name", + "workflow_owner_address": "workflow-owner", + "workflow_execution_id": "", + }, gotLabels) + }) + + t.Run("fails on missing workflow id", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow id") + }) + + t.Run("fails on missing workflow name", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow name") + }) + + t.Run("fails on missing workflow owner", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow owner") + }) +} diff --git a/sonar-project.properties b/sonar-project.properties index a29d74421..3310f7904 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -2,6 +2,7 @@ sonar.projectKey=smartcontractkit_chainlink-common sonar.sources=. sonar.sourceEncoding=UTF-8 +sonar.python.version=3.8 # Full exclusions from the static analysis sonar.exclusions=\ @@ -16,18 +17,23 @@ sonar.exclusions=\ **/*report.xml,\ **/*.txt,\ **/*.abi,\ -**/*.bin +**/*.bin,\ +**/generated_*,\ +**/*_generated.go,\ +**/mock_*.go # Coverage exclusions sonar.coverage.exclusions=\ **/test/**/*,\ **/*_test.go,\ observability-lib/**,\ -**/fuzz/**/* +**/fuzz/**/*,\ +**/capabilities/**/*test/**/* + # Tests' root folder, inclusions (tests to check and count) and exclusions sonar.tests=. sonar.test.inclusions=**/*_test.go # Duplication exclusions -sonar.cpd.exclusions=observability-lib/** \ No newline at end of file +sonar.cpd.exclusions=**/observability-lib/**/* \ No newline at end of file