diff --git a/.github/workflows/helm-ci.yml b/.github/workflows/helm-ci.yml index 8ecb7ddab3ce..91bdd5d0235a 100644 --- a/.github/workflows/helm-ci.yml +++ b/.github/workflows/helm-ci.yml @@ -11,35 +11,9 @@ env: jobs: call-lint: - name: Lint Helm Chart - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v4 - - - name: Check Docs - run: | - docker run --rm --volume "$(pwd):/helm-docs" -u "$(id -u)" jnorwood/helm-docs:v1.11.0 - if ! git diff --exit-code; then - echo "Documentation not up to date. Please run helm-docs and commit changes!" >&2 - exit 1 - fi - - - name: Lint Yaml - run: make helm-lint - - - name: Lint Code Base - uses: docker://github/super-linter:v3.17.2 - env: - FILTER_REGEX_EXCLUDE: .*(CHANGELOG\.md|README\.md|Chart\.yaml|NOTES.txt).* - FILTER_REGEX_INCLUDE: .*production/helm/.* - VALIDATE_ALL_CODEBASE: false - VALIDATE_KUBERNETES_KUBEVAL: false - VALIDATE_YAML: false - VALIDATE_GO: false - DEFAULT_BRANCH: main - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - LOG_LEVEL: DEBUG + uses: grafana/helm-charts/.github/workflows/linter.yml@main + with: + filter_regex_include: .*production/helm/loki/.* call-test: name: Test Helm Chart runs-on: ubuntu-latest diff --git a/.github/workflows/helm-release.yaml b/.github/workflows/helm-release.yaml index c303939fdf39..1a22318fcf75 100644 --- a/.github/workflows/helm-release.yaml +++ b/.github/workflows/helm-release.yaml @@ -11,7 +11,7 @@ on: jobs: call-update-helm-repo: - uses: grafana/helm-charts/.github/workflows/update-helm-repo.yaml@main + uses: grafana/helm-charts/.github/workflows/update-helm-repo.yaml@70dbbb722dee3f2ee126e12684cc0e92a20972ed with: charts_dir: production/helm cr_configfile: production/helm/cr.yaml diff --git a/.github/workflows/helm-tagged-release-pr.yaml b/.github/workflows/helm-tagged-release-pr.yaml index fb6ed4315497..d237cb6af61b 100644 --- a/.github/workflows/helm-tagged-release-pr.yaml +++ b/.github/workflows/helm-tagged-release-pr.yaml @@ -33,7 +33,7 @@ jobs: bash .github/workflows/scripts/helm-tagged-release.sh ${RELEASE_VERSION} - name: Create Pull Request - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v7 with: token: ${{ steps.get_github_app_token.outputs.token }} title: "chore: release loki helm chart ${{ steps.update.outputs.new_chart_version }}" diff --git a/.github/workflows/helm-weekly-release-pr.yaml b/.github/workflows/helm-weekly-release-pr.yaml index 45f7d6e0288d..f863baf74999 100644 --- a/.github/workflows/helm-weekly-release-pr.yaml +++ b/.github/workflows/helm-weekly-release-pr.yaml @@ -41,7 +41,7 @@ jobs: bash .github/workflows/scripts/helm-weekly-release.sh -k - name: Create Pull Request - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v7 with: token: ${{ steps.get_github_app_token.outputs.token }} title: "chore: release loki helm chart ${{ steps.update-k.outputs.new_chart_version }}" @@ -57,7 +57,7 @@ jobs: bash .github/workflows/scripts/helm-weekly-release.sh - name: Create Pull Request - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v7 with: token: ${{ steps.get_github_app_token.outputs.token }} title: "chore: release loki helm chart ${{ steps.update.outputs.new_chart_version }}" diff --git a/.github/workflows/operator-bundle.yaml b/.github/workflows/operator-bundle.yaml index 39630e45b8f8..e99fc0865612 100644 --- a/.github/workflows/operator-bundle.yaml +++ b/.github/workflows/operator-bundle.yaml @@ -19,7 +19,7 @@ jobs: go: ['1.22'] steps: - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go diff --git a/.github/workflows/operator-images.yaml b/.github/workflows/operator-images.yaml index 768b6acf0010..a2c58a448d9a 100644 --- a/.github/workflows/operator-images.yaml +++ b/.github/workflows/operator-images.yaml @@ -48,7 +48,7 @@ jobs: echo "IMAGE_TAGS=$IMAGE_TAGS" >> $GITHUB_OUTPUT - name: Build and publish image on quay.io - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./operator push: true @@ -86,7 +86,7 @@ jobs: echo "IMAGE_TAGS=$IMAGE_TAGS" >> $GITHUB_OUTPUT - name: Build and publish image on quay.io - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./operator/bundle/openshift file: ./operator/bundle/openshift/bundle.Dockerfile @@ -125,7 +125,7 @@ jobs: echo "IMAGE_TAGS=$IMAGE_TAGS" >> $GITHUB_OUTPUT - name: Build and publish image on quay.io - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./operator file: ./operator/calculator.Dockerfile diff --git a/.github/workflows/operator-scorecard.yaml b/.github/workflows/operator-scorecard.yaml index 1a067a0ea140..4fd8fe5852d3 100644 --- a/.github/workflows/operator-scorecard.yaml +++ b/.github/workflows/operator-scorecard.yaml @@ -19,7 +19,7 @@ jobs: go: ['1.22'] steps: - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go diff --git a/.github/workflows/operator.yaml b/.github/workflows/operator.yaml index 639746aeb5ac..38b3b61196de 100644 --- a/.github/workflows/operator.yaml +++ b/.github/workflows/operator.yaml @@ -21,7 +21,7 @@ jobs: - name: Install make run: sudo apt-get install make - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go @@ -43,13 +43,13 @@ jobs: - name: Install make run: sudo apt-get install make - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go - uses: actions/checkout@v4 - name: Lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v6 with: version: v1.61.0 args: --timeout=5m @@ -69,7 +69,7 @@ jobs: - name: Install make run: sudo apt-get install make - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go @@ -90,7 +90,7 @@ jobs: - name: Install make run: sudo apt-get install make - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go @@ -111,7 +111,7 @@ jobs: - name: Install make run: sudo apt-get install make - name: Set up Go 1.x - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go }} id: go diff --git a/.github/workflows/promtail-windows-test.yml b/.github/workflows/promtail-windows-test.yml index 90ccf72ad1be..0aa131d769ab 100644 --- a/.github/workflows/promtail-windows-test.yml +++ b/.github/workflows/promtail-windows-test.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Go ${{ matrix.go-version }} - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: ${{ matrix.go-version }} # You can test your matrix by printing the current Go version diff --git a/.github/workflows/syft-sbom-ci.yml b/.github/workflows/syft-sbom-ci.yml index 41c6feb0a921..8c5e437c4a34 100644 --- a/.github/workflows/syft-sbom-ci.yml +++ b/.github/workflows/syft-sbom-ci.yml @@ -14,7 +14,7 @@ jobs: uses: actions/checkout@v4 - name: Anchore SBOM Action - uses: anchore/sbom-action@v0.17.7 + uses: anchore/sbom-action@v0.17.8 with: artifact-name: ${{ github.event.repository.name }}-spdx.json diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 23ebc2fd685e..3c5971ad53a1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "3.2.1", + ".": "3.3.0", "operator": "0.7.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index e5ce39cc310c..8046a5abfd69 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,233 @@ # Changelog -## Unreleased +## [3.3.0](https://github.com/grafana/loki/compare/v3.2.1...v3.3.0) (2024-11-19) + +### ⚠ BREAKING CHANGES + +* **blooms:** Introduce a new block schema (V3) ([#14038](https://github.com/grafana/loki/issues/14038)) +* **blooms:** Index structured metadata into blooms ([#14061](https://github.com/grafana/loki/issues/14061)) +* **operator:** Migrate project layout to kubebuilder go/v4 ([#14447](https://github.com/grafana/loki/issues/14447)) +* **operator:** Rename loki api go module ([#14568](https://github.com/grafana/loki/issues/14568)) +* **operator:** Provide default OTLP attribute configuration ([#14410](https://github.com/grafana/loki/issues/14410)) + +### Features + +* ability to log stream selectors before service name detection ([#14154](https://github.com/grafana/loki/issues/14154)) ([d7ff426](https://github.com/grafana/loki/commit/d7ff42664681794b9ef5026ac3758cdd9569ac1a)) +* add app_name as a service label ([#13660](https://github.com/grafana/loki/issues/13660)) ([f2a16f4](https://github.com/grafana/loki/commit/f2a16f43b27503ba9ee76bac2b44d825ce030e0f)) +* add backoff mechanism to the retention process ([#14182](https://github.com/grafana/loki/issues/14182)) ([3136880](https://github.com/grafana/loki/commit/31368806a9c5e0ff6c43045e008861f26ed61af3)) +* add functions to common.libsonnet for warpstream ([#14123](https://github.com/grafana/loki/issues/14123)) ([2bde071](https://github.com/grafana/loki/commit/2bde071872fd08c138e03535b520ff7ae32dd336)) +* add gauge loki_ingest_storage_reader_phase ([#14679](https://github.com/grafana/loki/issues/14679)) ([f5b0fb6](https://github.com/grafana/loki/commit/f5b0fb6b998dc0a49cd36c0968862340c7e517bf)) +* add gauge to track the partition_id ([#14713](https://github.com/grafana/loki/issues/14713)) ([a142b3d](https://github.com/grafana/loki/commit/a142b3d540a79a94f6ed1283cfb0ac8aed49e600)) +* add missing cluster label to mixins ([#12870](https://github.com/grafana/loki/issues/12870)) ([547ca70](https://github.com/grafana/loki/commit/547ca708b9b56e2761bd19ebfcfc9f8571d9af2a)) +* add query user and query source to "executing query" log lines ([#14320](https://github.com/grafana/loki/issues/14320)) ([4d69929](https://github.com/grafana/loki/commit/4d6992982d99a542f1e99af18b691830b71469e0)) +* add retries for s3 ObjectExists calls ([#14062](https://github.com/grafana/loki/issues/14062)) ([73cbbb0](https://github.com/grafana/loki/commit/73cbbb0f2257b9eb5a3bf5d2cf1f4d4d2490d47d)) +* add structured metadata to the promtail push API ([#14153](https://github.com/grafana/loki/issues/14153)) ([66cffcb](https://github.com/grafana/loki/commit/66cffcb427bda28af6fbcfcf85a34771db3787bc)) +* Add support for partition ingester in dashboards ([#14498](https://github.com/grafana/loki/issues/14498)) ([70deebf](https://github.com/grafana/loki/commit/70deebf26e88c6f2b10c78b3b8ce785c8a16e03b)) +* Allows to configure client_max_body_size ([#12924](https://github.com/grafana/loki/issues/12924)) ([809a024](https://github.com/grafana/loki/commit/809a024581c1f600744b9db0b2b2142234317082)) +* Apply patterns line length limit to json message key ([#14296](https://github.com/grafana/loki/issues/14296)) ([41fafd8](https://github.com/grafana/loki/commit/41fafd87933224d5d43592e91e339322fc90a466)) +* **blooms:** Add bloom planner and bloom builder to `backend` target ([#13997](https://github.com/grafana/loki/issues/13997)) ([bf60455](https://github.com/grafana/loki/commit/bf60455c8e52b87774df9ca90232b4c72d72e46b)) +* **blooms:** disk-backed queue for the bloom-planner (backport k227) ([#14927](https://github.com/grafana/loki/issues/14927)) ([1f6828b](https://github.com/grafana/loki/commit/1f6828b25c5c5d6ad5eda3be60a435db8ca55fc3)) +* **blooms:** Index structured metadata into blooms ([#14061](https://github.com/grafana/loki/issues/14061)) ([a2fbaa8](https://github.com/grafana/loki/commit/a2fbaa8e09b6eebff2f7c20746e84f1365bd7433)) +* **blooms:** Only write key and key=value to blooms ([#14686](https://github.com/grafana/loki/issues/14686)) ([3af0004](https://github.com/grafana/loki/commit/3af0004cb4d4dafbcbe099e4409edf6e6ff056a5)) +* Configurable list of json fields to mine patterns ([#14528](https://github.com/grafana/loki/issues/14528)) ([7050897](https://github.com/grafana/loki/commit/70508975fd40d3e4dbb518d3f8c7bf96e37307b6)) +* detected field values ([#14350](https://github.com/grafana/loki/issues/14350)) ([7983f94](https://github.com/grafana/loki/commit/7983f94b15b422b94517641bd9cec5c9da6903e1)) +* **distributors:** Use a pool of worker to push to ingesters. ([#14245](https://github.com/grafana/loki/issues/14245)) ([f80d68a](https://github.com/grafana/loki/commit/f80d68a1edbd85a605be882eb0104b169343cf00)) +* Do not add empty blooms to offsets ([#14577](https://github.com/grafana/loki/issues/14577)) ([51c42e8](https://github.com/grafana/loki/commit/51c42e864563f2fa9ffc160cb13f6d6126ea5c6d)) +* Extract task computing into a strategy interface ([#13690](https://github.com/grafana/loki/issues/13690)) ([ab5e6ea](https://github.com/grafana/loki/commit/ab5e6eaaeea24f93f434dcece6ff5d9dc83e6d32)) +* **fluentd-plugin-datadog-loki:** support custom http headers ([#14299](https://github.com/grafana/loki/issues/14299)) ([e59035e](https://github.com/grafana/loki/commit/e59035e17315f453d4b2e2334330bc062d40f0fd)) +* **helm:** :sparkles: add additional service annotations for components in distributed mode ([#14131](https://github.com/grafana/loki/issues/14131)) ([5978f13](https://github.com/grafana/loki/commit/5978f1344c84525e6b8bda45869b867b7e878956)) +* **helm:** add configurable extraEnvFrom to admin-api and enterprisegw ([#14533](https://github.com/grafana/loki/issues/14533)) ([5d78a3a](https://github.com/grafana/loki/commit/5d78a3a3fd1f630d6b012a9240fa081e63bcb7ef)) +* **helm:** Add kubeVersionOverride for Helm chart ([#14434](https://github.com/grafana/loki/issues/14434)) ([0935d77](https://github.com/grafana/loki/commit/0935d77df08e6ad40a9f498f53e94e335b020ded)) +* **helm:** Add persistence option to memcached on Helm chart ([#13619](https://github.com/grafana/loki/issues/13619)) ([ef1df0e](https://github.com/grafana/loki/commit/ef1df0e66fc8e2fe9327a66aea31279ca5c7307a)) +* **helm:** add tolerations to pattern-ingester statefulset ([#13605](https://github.com/grafana/loki/issues/13605)) ([09530c0](https://github.com/grafana/loki/commit/09530c0f4a1503713a76c68153b4da5287f9b79f)) +* **helm:** Allow setting node attributes to `tokengen` and `provisioner` ([#14311](https://github.com/grafana/loki/issues/14311)) ([c708ae6](https://github.com/grafana/loki/commit/c708ae691ca2d9a26b1c2a4591ed32dbfdd94619)) +* **helm:** Replace bloom compactor with bloom planner and builder ([#14003](https://github.com/grafana/loki/issues/14003)) ([08e61ca](https://github.com/grafana/loki/commit/08e61ca4db086b573ef636a156bfc624132515be)) +* **helm:** update chart with loki version 3.2.0 ([#14281](https://github.com/grafana/loki/issues/14281)) ([11b92ee](https://github.com/grafana/loki/commit/11b92eeb95612a2bb002ea22f048c55ae20557a2)) +* **Helm:** Update Loki Helm chart for restricted environments ([#14440](https://github.com/grafana/loki/issues/14440)) ([adc7538](https://github.com/grafana/loki/commit/adc75389a39e3aaad69303b82b0d68ec3d94485c)) +* implement IsRetryableErr for S3ObjectClient ([#14174](https://github.com/grafana/loki/issues/14174)) ([fc90a63](https://github.com/grafana/loki/commit/fc90a63636c689993bd9b568f9c54198bfb1f3ae)) +* Implement owned streams calculation using Partition Ring ([#14282](https://github.com/grafana/loki/issues/14282)) ([3c36ba9](https://github.com/grafana/loki/commit/3c36ba949d65e803cc6702b8664f87aca07ed052)) +* Implement WAL segment ingestion via Kafka with partition ring ([#14043](https://github.com/grafana/loki/issues/14043)) ([d178f4c](https://github.com/grafana/loki/commit/d178f4c7e2eadbd17ac82f8305782533c7308ba2)) +* Improve pattern ingester tracing ([#14707](https://github.com/grafana/loki/issues/14707)) ([80aec25](https://github.com/grafana/loki/commit/80aec2548203957dbb834ba69e6d734d9054416d)) +* **ingester:** implement partition shuffle sharding for ingester ([#14304](https://github.com/grafana/loki/issues/14304)) ([1a4436c](https://github.com/grafana/loki/commit/1a4436c41721e3e6aca82c26abaec8fe6f775d9f)) +* Introduce new `ObjectExistsWithSize` API to ([#14268](https://github.com/grafana/loki/issues/14268)) ([ac422b3](https://github.com/grafana/loki/commit/ac422b3bc3e822b4525401496a8b73e91d566128)) +* Introduce shardable probabilistic topk for instant queries. (backport k227) ([#14765](https://github.com/grafana/loki/issues/14765)) ([02eb024](https://github.com/grafana/loki/commit/02eb02458e99d4dcb2f734f6a8e83bbd76a8ea4f)) +* **jsonnet:** Allow to name prefix zoned ingesters ([#14260](https://github.com/grafana/loki/issues/14260)) ([fac3177](https://github.com/grafana/loki/commit/fac3177814b8d2914eb3af618d571104eba18934)) +* **kafka:** Add Ingestion from Kafka in Ingesters ([#14192](https://github.com/grafana/loki/issues/14192)) ([b6e9945](https://github.com/grafana/loki/commit/b6e9945f83991a01395df537a8e014585a57913b)) +* **kafka:** Add support for SASL auth to Kafka ([#14487](https://github.com/grafana/loki/issues/14487)) ([e2a209c](https://github.com/grafana/loki/commit/e2a209c076c9c9fd53732a0a7804acba3bff378e)) +* **kafka:** Enable querier to optionally query partition ingesters ([#14418](https://github.com/grafana/loki/issues/14418)) ([633bb5e](https://github.com/grafana/loki/commit/633bb5eb7e0717c3e1eafaab32f0ba2dacb4f5cd)) +* **kafka:** enqueue commit offset only once per batch process ([#14278](https://github.com/grafana/loki/issues/14278)) ([beca6f3](https://github.com/grafana/loki/commit/beca6f33662e8a43ea59943a4327a1c328960058)) +* **kafka:** Implement limiter using partition ring for Kafka ([#14359](https://github.com/grafana/loki/issues/14359)) ([5cbb239](https://github.com/grafana/loki/commit/5cbb23994beb3494e238fccecbb3f7c5ed5c1d0b)) +* **kafka:** Remove rate limits for kafka ingestion ([#14460](https://github.com/grafana/loki/issues/14460)) ([83a8893](https://github.com/grafana/loki/commit/83a8893a3fbad3a87d7aea3a61e7dae2f6a34168)) +* **kafka:** Replay kafka from last commit before allowing ingesters to become ready ([#14330](https://github.com/grafana/loki/issues/14330)) ([39b57ec](https://github.com/grafana/loki/commit/39b57ec4eac3cbdc718aacae32ab8ff4e989709b)) +* **kafka:** Start ingester flush loop before trying to catch up from Kafka ([#14505](https://github.com/grafana/loki/issues/14505)) ([524ed81](https://github.com/grafana/loki/commit/524ed81395a0b2c6be86fc0fcd013393e555fd62)) +* **logcli:** add gzip compression option ([#14598](https://github.com/grafana/loki/issues/14598)) ([4d3f9f5](https://github.com/grafana/loki/commit/4d3f9f5a7b483b563348c322958486825d314526)) +* **loki:** include structured_metadata size while asserting rate limit ([#14571](https://github.com/grafana/loki/issues/14571)) ([a962edb](https://github.com/grafana/loki/commit/a962edba332f4fdfee29cf11e70019b1b498c258)) +* **max-allowed-line-length:** add config to set `max-allowed-line-length` in pattern ingester ([#14070](https://github.com/grafana/loki/issues/14070)) ([0780456](https://github.com/grafana/loki/commit/0780456662b67edde69004cf4ee3873c23d5094b)) +* mixin / add loki compaction not successfull alert ([#14239](https://github.com/grafana/loki/issues/14239)) ([da04f50](https://github.com/grafana/loki/commit/da04f5007edd85f35d1af5ba8c2c5a4eb96d2149)) +* mixin, allow overriding of some labels by parameterizing mixin recording/alert rules ([#11495](https://github.com/grafana/loki/issues/11495)) ([f1425b6](https://github.com/grafana/loki/commit/f1425b6c24e9d90c99477f67289c3aa34f69573d)) +* mixins / allow bloom dashboards disabling ([#14177](https://github.com/grafana/loki/issues/14177)) ([ce2e6d5](https://github.com/grafana/loki/commit/ce2e6d520b48fe9c5c7593ae2400a6983905782e)) +* **mixins:** Allow hiding useless rows in loki-operational ([#13646](https://github.com/grafana/loki/issues/13646)) ([3aa4f22](https://github.com/grafana/loki/commit/3aa4f2227e4178f05e6b13cffc044989c7839372)) +* **mixins:** merge resources dashboards for ssd into one ([#13471](https://github.com/grafana/loki/issues/13471)) ([45b8719](https://github.com/grafana/loki/commit/45b8719aa768db35d4e7559fd87e22056248b912)) +* move detected field logic to query frontend ([#14212](https://github.com/grafana/loki/issues/14212)) ([36ace66](https://github.com/grafana/loki/commit/36ace66b73e9f9ad2a2d367fbc20803c0d9779c2)) +* move metric aggregation to a per-tenant config ([#14709](https://github.com/grafana/loki/issues/14709)) ([c1fde26](https://github.com/grafana/loki/commit/c1fde26730b4fc54e4bbc724d1b29f653541f720)) +* New bloom planning using chunk size TSDB stats ([#14547](https://github.com/grafana/loki/issues/14547)) ([673ede1](https://github.com/grafana/loki/commit/673ede16a5f675684f9e6a53903335af5075a507)) +* **operator:** Add support for Loki OTLP limits config ([#13446](https://github.com/grafana/loki/issues/13446)) ([d02f435](https://github.com/grafana/loki/commit/d02f435d3bf121b19e15de4f139c95a6d010b25c)) +* **operator:** Declare feature FIPS support for OpenShift only ([#14308](https://github.com/grafana/loki/issues/14308)) ([720c303](https://github.com/grafana/loki/commit/720c3037923c174e71a02d99d4bee6271428fbdb)) +* **operator:** introduce 1x.pico size ([#14407](https://github.com/grafana/loki/issues/14407)) ([57de81d](https://github.com/grafana/loki/commit/57de81d8c27e221832790443cebaf141353c3e3f)) +* **operator:** Provide default OTLP attribute configuration ([#14410](https://github.com/grafana/loki/issues/14410)) ([1b52387](https://github.com/grafana/loki/commit/1b5238721994c00764b6a7e7d63269c5b56d2480)) +* **operator:** Update Loki operand to v3.1.1 ([#14042](https://github.com/grafana/loki/issues/14042)) ([7ae1588](https://github.com/grafana/loki/commit/7ae1588200396b73a16fadd2610670a5ce5fd747)) +* **operator:** Update Loki operand to v3.2.1 ([#14526](https://github.com/grafana/loki/issues/14526)) ([5e970e5](https://github.com/grafana/loki/commit/5e970e50b166e73f5563e21c23db3ea99b24642e)) +* **operator:** User-guide for OTLP configuration ([#14620](https://github.com/grafana/loki/issues/14620)) ([27b4071](https://github.com/grafana/loki/commit/27b40713540bd60918780cdd4cb645e6761427cb)) +* Optionally require writes to kafka on Push requests ([#14186](https://github.com/grafana/loki/issues/14186)) ([7c78232](https://github.com/grafana/loki/commit/7c78232ad312d58ae00101a11e9d7c67f53f1361)) +* revert "feat: add functions to common.libsonnet for warpstream" ([#14129](https://github.com/grafana/loki/issues/14129)) ([18c27f9](https://github.com/grafana/loki/commit/18c27f9d4ec0c5fbd439972f9abb8bca0bdd6f9e)) +* **ruler:** enables ruler store that uses clients from thanos-io/objstore pkg ([#11713](https://github.com/grafana/loki/issues/11713)) ([8bca2e7](https://github.com/grafana/loki/commit/8bca2e76089e0b9894b7a4c18a950f4baaa5a412)) +* **storage:** AWS backend using thanos.io/objstore ([#11221](https://github.com/grafana/loki/issues/11221)) ([b872246](https://github.com/grafana/loki/commit/b87224647dc88901c61cb4bd571dfda9405a7826)) +* **storage:** Azure backend using thanos.io/objstore ([#11315](https://github.com/grafana/loki/issues/11315)) ([5824e3d](https://github.com/grafana/loki/commit/5824e3d35cd1273ccd1a63d7381098617a7697dd)) +* **storage:** GCS backend using thanos.io/objstore ([#11132](https://github.com/grafana/loki/issues/11132)) ([c059ace](https://github.com/grafana/loki/commit/c059ace53edba79864a567035b120db80addf23c)) +* support ruler sidecar in singleBinary mode ([#13572](https://github.com/grafana/loki/issues/13572)) ([684baf7](https://github.com/grafana/loki/commit/684baf7dbacef4b85a08db8de9934458745124d8)) +* track discarded data by usageTracker ([#14081](https://github.com/grafana/loki/issues/14081)) ([c65721e](https://github.com/grafana/loki/commit/c65721e7ade0ef89fd282d9f764fb2d05f6b9c42)) ### Bug Fixes -* **promtail:** fix parser for azureeventhubs message without time field ([#14218](https://github.com/grafana/loki/pull/14218)) +* **`detected_fields`:** return parsed labels when parsers are passed ([#14047](https://github.com/grafana/loki/issues/14047)) ([aa1ac99](https://github.com/grafana/loki/commit/aa1ac99f4d369c87fd0db4fcf853ebce534e3500)) +* Add additional validation for timeout while retrieving headers ([#14217](https://github.com/grafana/loki/issues/14217)) ([8322e51](https://github.com/grafana/loki/commit/8322e518e68de286b2bc58cf15ea9fe947eeec86)) +* Add s3 principal to iam policy attached to sqs in lambda-promtail terraform code ([#14619](https://github.com/grafana/loki/issues/14619)) ([db0889e](https://github.com/grafana/loki/commit/db0889e2748b69a5c60d044dfab44bc652f1464d)) +* Add tenant limits as dependency to pattern ingester ([#14665](https://github.com/grafana/loki/issues/14665)) ([31eea90](https://github.com/grafana/loki/commit/31eea9042ada6650227eb281a36410ab521817a8)) +* **aggregated-metrics:** correctly create logfmt string ([#14124](https://github.com/grafana/loki/issues/14124)) ([63e84b4](https://github.com/grafana/loki/commit/63e84b476a9a7b97a121847659172fadbb8a1eee)) +* allow any level for aggregated metrics ([#14255](https://github.com/grafana/loki/issues/14255)) ([c001a1d](https://github.com/grafana/loki/commit/c001a1d93af5438fef521460dcba650b44629a93)) +* allow rename of structuremetadata labels ([#13955](https://github.com/grafana/loki/issues/13955)) ([2d4792a](https://github.com/grafana/loki/commit/2d4792a54fb52caa5cd904a17349b04410fae4c0)) +* always write detected_level when enabled, even if unknown ([#14464](https://github.com/grafana/loki/issues/14464)) ([41c6b6c](https://github.com/grafana/loki/commit/41c6b6c2c2f5f56ca76cf75ed05689564b9e9dcd)) +* **blooms:** Check length of tasks before accessing first element in slice ([#14634](https://github.com/grafana/loki/issues/14634)) ([601f549](https://github.com/grafana/loki/commit/601f549656efa5ac769a685169d5bc84eff15a35)) +* **blooms:** Copy chunks from ForSeries (backport k227) ([#14864](https://github.com/grafana/loki/issues/14864)) ([d10f79c](https://github.com/grafana/loki/commit/d10f79c700c100d7333e682287aabbaa3c029768)) +* **blooms:** Do not restart builders when planner disconnects (backport k227) ([#14922](https://github.com/grafana/loki/issues/14922)) ([213e8ee](https://github.com/grafana/loki/commit/213e8eeba6e7fb138069e2858d62f1e3c4556a0e)) +* **blooms:** Exclude label filters where label name is part of the series labels. ([#14661](https://github.com/grafana/loki/issues/14661)) ([d1668f6](https://github.com/grafana/loki/commit/d1668f6a110f7119ebb1cc0e582be369b2af95b8)) +* **blooms:** Fix panic in initialisation of the bloom planner and builder ([#14110](https://github.com/grafana/loki/issues/14110)) ([8307c42](https://github.com/grafana/loki/commit/8307c42c541e769c9d0133df3856af049a815b73)) +* **blooms:** Fix strategy logger and add task test (backport k227) ([#14921](https://github.com/grafana/loki/issues/14921)) ([dc36a1e](https://github.com/grafana/loki/commit/dc36a1e1288a03b68d269ba261f41ac7c2942962)) +* **blooms:** Fix tenants slice on loadTenantTables (backport k227) ([#14901](https://github.com/grafana/loki/issues/14901)) ([540dd5a](https://github.com/grafana/loki/commit/540dd5a5ccb53bc2ee4236871632c7e1daa7f7e5)) +* **blooms:** Skip multi-tenant TSDBs during bloom planning (backport k227) ([#14888](https://github.com/grafana/loki/issues/14888)) ([631cff3](https://github.com/grafana/loki/commit/631cff345cdab110202d757572fbbf8088c0be87)) +* **build:** Use Debian Bullseye base image for build image ([#14368](https://github.com/grafana/loki/issues/14368)) ([3beb8ff](https://github.com/grafana/loki/commit/3beb8ff9cfe7f765b5d5db87892981a223d72f50)) +* **canary:** Reconnect immediately upon tail max duration ([#14287](https://github.com/grafana/loki/issues/14287)) ([9267ee3](https://github.com/grafana/loki/commit/9267ee3561ccbb90589600d7b045f7e05b1b2ee0)) +* **ci:** fixed `Publish Rendered Helm Chart Diff` workflow ([#14365](https://github.com/grafana/loki/issues/14365)) ([6de6420](https://github.com/grafana/loki/commit/6de64209547ec970cb27564be87fe2085307e183)) +* **ci:** updated helm diff rendering workflow ([#14424](https://github.com/grafana/loki/issues/14424)) ([916e511](https://github.com/grafana/loki/commit/916e5115d9099e82834f0d8e123273c75c9cddec)) +* **config:** Copy Alibaba and IBM object storage configuration from common ([#14297](https://github.com/grafana/loki/issues/14297)) ([59ff1ec](https://github.com/grafana/loki/commit/59ff1ece1dacc461d03f71e41c0728396727eee6)) +* **config:** migrate renovate config ([#14646](https://github.com/grafana/loki/issues/14646)) ([a67d8ef](https://github.com/grafana/loki/commit/a67d8ef219aab80071e8256a6cbb18a47c7078e6)) +* correct _extracted logic in detected fields ([#14064](https://github.com/grafana/loki/issues/14064)) ([1b3ba53](https://github.com/grafana/loki/commit/1b3ba530b8fab9aac999387a135a76a62de3e000)) +* correct OTLP documentation typo ([#14602](https://github.com/grafana/loki/issues/14602)) ([063c590](https://github.com/grafana/loki/commit/063c590faa4aa30540572c5d6fdc1da8a6a25ee4)) +* **deps:** update aws-sdk-go-v2 monorepo ([#13986](https://github.com/grafana/loki/issues/13986)) ([6f49123](https://github.com/grafana/loki/commit/6f491233cae226d54d190521d2b935249d88ad05)) +* **deps:** update aws-sdk-go-v2 monorepo ([#14742](https://github.com/grafana/loki/issues/14742)) ([53a1ab7](https://github.com/grafana/loki/commit/53a1ab76257d900b80334d68439d7ff4bfcfd39b)) +* **deps:** update github.com/grafana/dskit digest to 687ec48 ([#14395](https://github.com/grafana/loki/issues/14395)) ([c2f38e1](https://github.com/grafana/loki/commit/c2f38e18c6b8dd134b8f3da164afc9c8625f2f2b)) +* **deps:** update github.com/grafana/dskit digest to 7c41a40 ([#14277](https://github.com/grafana/loki/issues/14277)) ([f39cdbd](https://github.com/grafana/loki/commit/f39cdbd541d85a961db655e70da713be04d9a294)) +* **deps:** update github.com/grafana/dskit digest to 931a021 ([#14032](https://github.com/grafana/loki/issues/14032)) ([7c18642](https://github.com/grafana/loki/commit/7c186425210f892d34a2ccf8ad23b475af8bf9b9)) +* **deps:** update github.com/grafana/dskit digest to b69ac1b ([#14355](https://github.com/grafana/loki/issues/14355)) ([9d7a6ea](https://github.com/grafana/loki/commit/9d7a6ea68053b576553e426d339961d50ee07080)) +* **deps:** update github.com/grafana/dskit digest to f52de24 ([#14319](https://github.com/grafana/loki/issues/14319)) ([a4f3edf](https://github.com/grafana/loki/commit/a4f3edfb52ad4a44a17aaeb753a780b08d6b552c)) +* **deps:** update github.com/twmb/franz-go/pkg/kfake digest to cea7aa5 ([#14590](https://github.com/grafana/loki/issues/14590)) ([688c42a](https://github.com/grafana/loki/commit/688c42a971589be96921ce362c7fc6792368c3da)) +* **deps:** update k8s.io/utils digest to 702e33f ([#14033](https://github.com/grafana/loki/issues/14033)) ([b7eecc7](https://github.com/grafana/loki/commit/b7eecc7a693e96f4d0fe0dcd7583ecdc4dd7283f)) +* **deps:** update module cloud.google.com/go/bigtable to v1.33.0 ([#14580](https://github.com/grafana/loki/issues/14580)) ([a0920ed](https://github.com/grafana/loki/commit/a0920ed9929080926f0f439182cb2428e938c208)) +* **deps:** update module cloud.google.com/go/pubsub to v1.45.0 ([#14361](https://github.com/grafana/loki/issues/14361)) ([4351238](https://github.com/grafana/loki/commit/4351238305a680852b6b29a7cdaef69e46042ee4)) +* **deps:** update module cloud.google.com/go/pubsub to v1.45.1 ([#14650](https://github.com/grafana/loki/issues/14650)) ([f173708](https://github.com/grafana/loki/commit/f17370867b70f65528d98fbfe751d079b5909be0)) +* **deps:** update module cloud.google.com/go/storage to v1.46.0 ([#14744](https://github.com/grafana/loki/issues/14744)) ([8e45116](https://github.com/grafana/loki/commit/8e451165add426e480b2e691c7c69252d98a2d22)) +* **deps:** update module github.com/alicebob/miniredis/v2 to v2.33.0 ([#14721](https://github.com/grafana/loki/issues/14721)) ([7bfda25](https://github.com/grafana/loki/commit/7bfda259721c2b3858066ab71d9df09ad35895a6)) +* **deps:** update module github.com/aws/aws-sdk-go to v1.55.5 ([#14715](https://github.com/grafana/loki/issues/14715)) ([03f0f5a](https://github.com/grafana/loki/commit/03f0f5ab1691550eea59431c9c580530c13bf259)) +* **deps:** update module github.com/axiomhq/hyperloglog to v0.2.0 ([#14722](https://github.com/grafana/loki/issues/14722)) ([0167b22](https://github.com/grafana/loki/commit/0167b22ac6d4886a1c3157437a3c5b19e327723a)) +* **deps:** update module github.com/baidubce/bce-sdk-go to v0.9.189 ([#14044](https://github.com/grafana/loki/issues/14044)) ([7fb34b4](https://github.com/grafana/loki/commit/7fb34b4884269e7dad7cfa27969f470d9466279d)) +* **deps:** update module github.com/baidubce/bce-sdk-go to v0.9.192 ([#14337](https://github.com/grafana/loki/issues/14337)) ([6f7cae2](https://github.com/grafana/loki/commit/6f7cae2a7aae471c8161bd1e596a31fa89c48ae1)) +* **deps:** update module github.com/baidubce/bce-sdk-go to v0.9.196 ([#14651](https://github.com/grafana/loki/issues/14651)) ([478085a](https://github.com/grafana/loki/commit/478085ae02a0df3b2455211326519dd4aef26499)) +* **deps:** update module github.com/baidubce/bce-sdk-go to v0.9.197 ([#14682](https://github.com/grafana/loki/issues/14682)) ([b898294](https://github.com/grafana/loki/commit/b89829421ee3a4589efe34a4b1332fe659c9d8e7)) +* **deps:** update module github.com/coder/quartz to v0.1.2 ([#14652](https://github.com/grafana/loki/issues/14652)) ([7459e07](https://github.com/grafana/loki/commit/7459e07adb6aac48b305d50582eac915ea26528e)) +* **deps:** update module github.com/felixge/fgprof to v0.9.5 ([#14338](https://github.com/grafana/loki/issues/14338)) ([a2ad3aa](https://github.com/grafana/loki/commit/a2ad3aa66940faae4fef7f92aab5a383f576190e)) +* **deps:** update module github.com/fsouza/fake-gcs-server to v1.50.2 ([#14313](https://github.com/grafana/loki/issues/14313)) ([275c97c](https://github.com/grafana/loki/commit/275c97cec7f70e68c56192c565d53a6c2a18ff78)) +* **deps:** update module github.com/hashicorp/raft to v1.7.1 ([#14005](https://github.com/grafana/loki/issues/14005)) ([e9cec1d](https://github.com/grafana/loki/commit/e9cec1d159b02977b6104e0006902e0d6b805527)) +* **deps:** update module github.com/ibm/go-sdk-core/v5 to v5.17.5 ([#14045](https://github.com/grafana/loki/issues/14045)) ([677d217](https://github.com/grafana/loki/commit/677d217533b7d2338e25a8b9b9e8a78045489e7c)) +* **deps:** update module github.com/ibm/go-sdk-core/v5 to v5.18.1 ([#14716](https://github.com/grafana/loki/issues/14716)) ([8395acd](https://github.com/grafana/loki/commit/8395acd0cbd3db9c6f330bd94a22b194fad35a93)) +* **deps:** update module github.com/ibm/ibm-cos-sdk-go to v1.11.1 ([#14342](https://github.com/grafana/loki/issues/14342)) ([aa82a7c](https://github.com/grafana/loki/commit/aa82a7c804edd6df99d3fddc581d02c3b7fa6774)) +* **deps:** update module github.com/klauspost/compress to v1.17.10 ([#14352](https://github.com/grafana/loki/issues/14352)) ([e23c5ed](https://github.com/grafana/loki/commit/e23c5ed9fa97010ef4c985afea25af3922ca215b)) +* **deps:** update module github.com/minio/minio-go/v7 to v7.0.76 ([#14006](https://github.com/grafana/loki/issues/14006)) ([51f9376](https://github.com/grafana/loki/commit/51f937684795982f0d234ab251017ce2c86c9e20)) +* **deps:** update module github.com/minio/minio-go/v7 to v7.0.77 ([#14353](https://github.com/grafana/loki/issues/14353)) ([d0e3ef7](https://github.com/grafana/loki/commit/d0e3ef709a222821fd764f6af72308c302faefb3)) +* **deps:** update module github.com/minio/minio-go/v7 to v7.0.80 ([#14654](https://github.com/grafana/loki/issues/14654)) ([eec2513](https://github.com/grafana/loki/commit/eec25130468eb648c4667361cae7630449af7ef5)) +* **deps:** update module github.com/ncw/swift/v2 to v2.0.3 ([#14356](https://github.com/grafana/loki/issues/14356)) ([c843288](https://github.com/grafana/loki/commit/c8432887d3d4459ad4bc40deba3a3a3726a2f5eb)) +* **deps:** update module github.com/prometheus/client_golang to v1.20.5 ([#14655](https://github.com/grafana/loki/issues/14655)) ([e12f843](https://github.com/grafana/loki/commit/e12f8436b4080db54c6d31c6af38416c6fdd7eb4)) +* **deps:** update module github.com/schollz/progressbar/v3 to v3.17.0 ([#14720](https://github.com/grafana/loki/issues/14720)) ([4419d0f](https://github.com/grafana/loki/commit/4419d0f33e9f4f6f9305d89dd6f2ca47e3a18d8c)) +* **deps:** update module github.com/shirou/gopsutil/v4 to v4.24.10 ([#14719](https://github.com/grafana/loki/issues/14719)) ([3280376](https://github.com/grafana/loki/commit/32803762781c53ec3fe1bdb64841eb24aeed48f5)) +* **deps:** update module github.com/shirou/gopsutil/v4 to v4.24.9 ([#14357](https://github.com/grafana/loki/issues/14357)) ([c8e6a9d](https://github.com/grafana/loki/commit/c8e6a9d38f36ccf1f32e634765bb2363628f3710)) +* **deps:** update module github.com/shopify/sarama to v1.43.3 ([#14059](https://github.com/grafana/loki/issues/14059)) ([1cf4813](https://github.com/grafana/loki/commit/1cf48131d42db7302d6bcf980c355b018fcedb06)) +* **deps:** update module github.com/spf13/afero to v1.11.0 ([#14060](https://github.com/grafana/loki/issues/14060)) ([bbbd82b](https://github.com/grafana/loki/commit/bbbd82bc73322d662ba81efeda3884efcdc09708)) +* **deps:** update module go.etcd.io/bbolt to v1.3.11 ([#14358](https://github.com/grafana/loki/issues/14358)) ([b7bccfc](https://github.com/grafana/loki/commit/b7bccfcec3275b1d6d76c7450415ac8744e4d7b0)) +* **deps:** update module golang.org/x/net to v0.29.0 ([#14341](https://github.com/grafana/loki/issues/14341)) ([1b6b9da](https://github.com/grafana/loki/commit/1b6b9da4e126738037e24d09309b62eac7d54a10)) +* **detected_fields:** always return empty array as `null` ([#14112](https://github.com/grafana/loki/issues/14112)) ([93009d4](https://github.com/grafana/loki/commit/93009d4e8ce520a3925bf5c0baff940db6c9caba)) +* **distributor:** validate partition ring is kafka is enabled ([#14303](https://github.com/grafana/loki/issues/14303)) ([8438d41](https://github.com/grafana/loki/commit/8438d415931f0a3763d551eb36c3d9f476f70713)) +* do not retain span logger created with index set initialized at query time ([#14027](https://github.com/grafana/loki/issues/14027)) ([4e41744](https://github.com/grafana/loki/commit/4e4174400fba410b9f32e0e43c1d866d283a9e62)) +* downgrade grpc to fix regression ([#14065](https://github.com/grafana/loki/issues/14065)) ([8c38d46](https://github.com/grafana/loki/commit/8c38d462f5a057497ab222d463223400f2e7b4ab)) +* enable service detection for otlp endoint ([#14036](https://github.com/grafana/loki/issues/14036)) ([4f962ef](https://github.com/grafana/loki/commit/4f962ef7af250fc347dbed15583787d0238f6e9f)) +* Expand matching for additional variations ([#14221](https://github.com/grafana/loki/issues/14221)) ([71d7291](https://github.com/grafana/loki/commit/71d7291c9c00c3887d9a509991eb4d3e15ae8699)) +* fix bug in query result marshaling for invalid utf8 characters ([#14585](https://github.com/grafana/loki/issues/14585)) ([f411a07](https://github.com/grafana/loki/commit/f411a0795af67630a0a70a88ce64fa071de50a56)) +* **helm:** add missing `loki.storage.azure.chunkDelimiter` parameter to Helm chart ([#14011](https://github.com/grafana/loki/issues/14011)) ([08c70cc](https://github.com/grafana/loki/commit/08c70cca2e7b3a7444b0ec9822a6d5fd58ae70d5)) +* **helm:** Check for `rbac.namespaced` condition before creating roles ([#14201](https://github.com/grafana/loki/issues/14201)) ([3f47f09](https://github.com/grafana/loki/commit/3f47f09a6956719480677f6af02f58394d7f26bb)) +* **helm:** Fix persistence configuration for Memcached ([#14049](https://github.com/grafana/loki/issues/14049)) ([ee6e1cf](https://github.com/grafana/loki/commit/ee6e1cf78864ad3ed915056f695e1f556cc4a22e)) +* **helm:** Fix wrong port name referenced for ingress NetworkPolicy ([#12907](https://github.com/grafana/loki/issues/12907)) ([963a25b](https://github.com/grafana/loki/commit/963a25bf417bbd4171c4d9a2b501330fd663410f)) +* **helm:** Various fixes and enhancements for bloom components ([#14128](https://github.com/grafana/loki/issues/14128)) ([dc0cbd4](https://github.com/grafana/loki/commit/dc0cbd42dcb8e53152573f0baf03ad93aa0d3cd8)) +* Improve docs for min and max table offsets (backport k227) ([#14929](https://github.com/grafana/loki/issues/14929)) ([3161fdc](https://github.com/grafana/loki/commit/3161fdcc6dc1e80a86933a59e6af102c10336c39)) +* **kafka:** Fixes partition selection in distributors ([#14242](https://github.com/grafana/loki/issues/14242)) ([3f47233](https://github.com/grafana/loki/commit/3f472330790204e4d09b7a4e087be3ff0dc04eff)) +* **kafka:** Fixes writer initialization for arm32 ([#14115](https://github.com/grafana/loki/issues/14115)) ([4da035b](https://github.com/grafana/loki/commit/4da035b6b78f8bb3b9af28a82865ab543dd8e230)) +* **kafka:** Set namespace for Loki kafka metrics ([#14426](https://github.com/grafana/loki/issues/14426)) ([8aa8a2b](https://github.com/grafana/loki/commit/8aa8a2bb0e766da4d64313d17337fa54ab84f8a4)) +* **label_format:** renamed label should use ParsedLabel category ([#14515](https://github.com/grafana/loki/issues/14515)) ([82fb2f0](https://github.com/grafana/loki/commit/82fb2f0ae2403686b55fdb2fd5be248f706eddab)) +* level detection for warning level ([#14444](https://github.com/grafana/loki/issues/14444)) ([242a852](https://github.com/grafana/loki/commit/242a852d7d471351ea294fc09e2b5dc62eec0d03)) +* lint errors ([#14574](https://github.com/grafana/loki/issues/14574)) ([99ef900](https://github.com/grafana/loki/commit/99ef9009e5e2e74f76c865fbb3feaf1559f4b47c)) +* **log-to-span:** timestamp.Time should be called with milliseconds ([#14196](https://github.com/grafana/loki/issues/14196)) ([f8d9143](https://github.com/grafana/loki/commit/f8d9143eead92d8727053e065c2d3403f689e4b5)) +* logcli: Check for errors before checking for `exists` when fetching data (backport k227) ([#14906](https://github.com/grafana/loki/issues/14906)) ([31b2a63](https://github.com/grafana/loki/commit/31b2a63ee23098fbd0151ef93020bd1cac093afe)) +* **logcli:** create new tail response for every line ([#14525](https://github.com/grafana/loki/issues/14525)) ([bcfd0d1](https://github.com/grafana/loki/commit/bcfd0d1ad1c72c6c3861c8263989f2ce683eee08)) +* **logql:** Fix panic in json parsing when using empty array index ([#14393](https://github.com/grafana/loki/issues/14393)) ([833bf0d](https://github.com/grafana/loki/commit/833bf0def6a07e2f58996f54b4b983858750e3e3)) +* **logql:** updated JSONExpressionParser not to unescape extracted values if it is JSON object. ([#14499](https://github.com/grafana/loki/issues/14499)) ([08b1a90](https://github.com/grafana/loki/commit/08b1a9080b03bc041471f1ef72c4e3d7c6aea4f4)) +* missing dep PartitionRing for Ingester ([#14292](https://github.com/grafana/loki/issues/14292)) ([6354ded](https://github.com/grafana/loki/commit/6354deda90a9430856447e27123b3a33fd1b77a0)) +* **mixin:** Remove pod label from disk usage aggregation ([#14180](https://github.com/grafana/loki/issues/14180)) ([5d45c96](https://github.com/grafana/loki/commit/5d45c96ce12f7f16c21e61db1a78e94a09c16007)) +* mixins / loki-resources-overview panel layout ([#14178](https://github.com/grafana/loki/issues/14178)) ([8f54ec6](https://github.com/grafana/loki/commit/8f54ec65881bcad90078464d663af9110ef72603)) +* **mixins:** add backend path section in loki-operational for single scalable deployment ([#13023](https://github.com/grafana/loki/issues/13023)) ([16881ab](https://github.com/grafana/loki/commit/16881ab0d3b9e9e6bfc37f22ff69f5f1019a0df1)) +* **mixins:** disk space utilization panels with latest KSM versions ([#13486](https://github.com/grafana/loki/issues/13486)) ([0ea7431](https://github.com/grafana/loki/commit/0ea7431139ae0a18ef4e90bed836a7a6b92ab890)) +* **mixins:** retention dashboards fix metric name ([#14617](https://github.com/grafana/loki/issues/14617)) ([c762b9b](https://github.com/grafana/loki/commit/c762b9b5d3877e7cbfc41d8ab9a1a4287ebe97b2)) +* More correctly report starting phase during kafka-reader startup ([#14632](https://github.com/grafana/loki/issues/14632)) ([ea798e0](https://github.com/grafana/loki/commit/ea798e0f2a3364b4a76f153faf324b4a9ababc4d)) +* move partition_id into label to make PromQL easier ([#14714](https://github.com/grafana/loki/issues/14714)) ([e6cf423](https://github.com/grafana/loki/commit/e6cf42396f7554e46b6c331dd1938922806bcfc5)) +* nix build, downgrade toolchain to go1.23.1 ([#14442](https://github.com/grafana/loki/issues/14442)) ([26dfd62](https://github.com/grafana/loki/commit/26dfd628f0effe2367420f591da36727ebe78806)) +* **operator:** add 1x.pico OpenShift UI dropdown menu ([#14660](https://github.com/grafana/loki/issues/14660)) ([4687f37](https://github.com/grafana/loki/commit/4687f377db0a7ae07ffdea354582c882c10b72c4)) +* **operator:** Add missing groupBy label for all rules on OpenShift ([#14279](https://github.com/grafana/loki/issues/14279)) ([ce7b2e8](https://github.com/grafana/loki/commit/ce7b2e89d9470e4e6a61a94f2b51ff8b938b5a5e)) +* **operator:** correctly ignore again BlotDB dashboards ([#14587](https://github.com/grafana/loki/issues/14587)) ([4879d10](https://github.com/grafana/loki/commit/4879d106bbeea29e331ddb7c9a49274600190032)) +* **operator:** Disable automatic discovery of service name ([#14506](https://github.com/grafana/loki/issues/14506)) ([3834c74](https://github.com/grafana/loki/commit/3834c74966b307411732cd3cbaf66305008b10eb)) +* **operator:** Disable log level discovery for OpenShift tenancy modes ([#14613](https://github.com/grafana/loki/issues/14613)) ([5034d34](https://github.com/grafana/loki/commit/5034d34ad23451954ea2459c341456da8d93d020)) +* **operator:** Fix building the size-calculator image ([#14573](https://github.com/grafana/loki/issues/14573)) ([a79b8fe](https://github.com/grafana/loki/commit/a79b8fe7802964cbb96bde75a7502a8b1e8a23ab)) +* **operator:** Fix make build target for size-calculator ([#14551](https://github.com/grafana/loki/issues/14551)) ([e727187](https://github.com/grafana/loki/commit/e727187ec3be2f10c80e984d00c40dad0308b036)) +* **operator:** Move OTLP attribute for statefulset name to stream labels ([#14630](https://github.com/grafana/loki/issues/14630)) ([5df3594](https://github.com/grafana/loki/commit/5df3594f791d77031c53d7b0f5b01191de8a23f2)) +* **operator:** Use empty initiliazed pod status map when no pods ([#14314](https://github.com/grafana/loki/issues/14314)) ([6f533ed](https://github.com/grafana/loki/commit/6f533ed4386ee2db61680a9021934bfe9a9ba749)) +* **pattern:** Fixes latency metric namespace for tee to pattern ([#14241](https://github.com/grafana/loki/issues/14241)) ([ae955ed](https://github.com/grafana/loki/commit/ae955ed30d841675dbb9e30327b84728050e724a)) +* promtail config unmarshalling ([#14408](https://github.com/grafana/loki/issues/14408)) ([a05431f](https://github.com/grafana/loki/commit/a05431f879a8c29fac6356b6c46be62133c3e93c)) +* promtail parser for azureeventhubs message without time field ([#14218](https://github.com/grafana/loki/issues/14218)) ([2e62abb](https://github.com/grafana/loki/commit/2e62abbf47c47041027baf240722b3d76e7bd9a3)) +* **promtail:** validate scrape_config job name, do not allow duplicate job names ([#13719](https://github.com/grafana/loki/issues/13719)) ([f2d3499](https://github.com/grafana/loki/commit/f2d349924c2aa0453e49fc607603a189108666ec)) +* Propagate query stats from quantile & topk queries ([#13831](https://github.com/grafana/loki/issues/13831)) ([78b275b](https://github.com/grafana/loki/commit/78b275bf1092d834065315207666d6fd1c505f06)) +* remove usage of unsafe string in label adapter unmarshal ([#14216](https://github.com/grafana/loki/issues/14216)) ([758364c](https://github.com/grafana/loki/commit/758364c7775fba22a84498089a476c21f737d32f)) +* Rename mispelled filename ([#14237](https://github.com/grafana/loki/issues/14237)) ([cf1d4a3](https://github.com/grafana/loki/commit/cf1d4a31af5c376e82756eaaab267369f862265d)) +* report correct status code for metric and log queries in metrics.go ([#12102](https://github.com/grafana/loki/issues/12102)) ([900751c](https://github.com/grafana/loki/commit/900751c3bb008c50441c47eef3927a27201b1a11)) +* Report PSRL error message correctly ([#14187](https://github.com/grafana/loki/issues/14187)) ([a475153](https://github.com/grafana/loki/commit/a47515300a5cfac667eca1ca8e8d1a71e590b7d2)) +* Revert "fix(deps): update module github.com/shirou/gopsutil/v4 to v4.24.9 ([#14357](https://github.com/grafana/loki/issues/14357))" ([#14437](https://github.com/grafana/loki/issues/14437)) ([d53955b](https://github.com/grafana/loki/commit/d53955bbff5abae63a166099cef1f26b450a31f1)) +* **s3:** disable client retries when congestion control is enabled ([#14588](https://github.com/grafana/loki/issues/14588)) ([cff9f43](https://github.com/grafana/loki/commit/cff9f43dd6fb5e90c875c14c138ea39b58202dff)) +* **sharding:** apply offset to both `from` and `through` in shard request ([#14256](https://github.com/grafana/loki/issues/14256)) ([17c472d](https://github.com/grafana/loki/commit/17c472d9abea6b1cae21de5fe2af8b365bdaf137)) +* skipping label if it contains special symbol ([#14068](https://github.com/grafana/loki/issues/14068)) ([55e374e](https://github.com/grafana/loki/commit/55e374e85e7275da8f40d1149defd88f31856f25)) +* **storage/chunk/client/aws:** have GetObject check for canceled context ([#14420](https://github.com/grafana/loki/issues/14420)) ([5f325aa](https://github.com/grafana/loki/commit/5f325aac56e41848979e9e33a4a443e31ea525d0)) +* Transform `ObjectExistsWithSize` into `GetAttributes` ([#14329](https://github.com/grafana/loki/issues/14329)) ([2f56f50](https://github.com/grafana/loki/commit/2f56f50cc6591ca482358933c719d005446d0c01)) +* Update AWS storage timeout error for Go 1.23 behavior ([#14226](https://github.com/grafana/loki/issues/14226)) ([a4642b5](https://github.com/grafana/loki/commit/a4642b55e9b374ccd974b662e7b17a2389c3dcbd)) +* Update renovate ignore for operator API with new module path ([#14581](https://github.com/grafana/loki/issues/14581)) ([c9b2907](https://github.com/grafana/loki/commit/c9b2907f3c97cf0a14837c0b27cad7a06d84f447)) +* Wait for OwnedStreams service in Ingester startup ([#14208](https://github.com/grafana/loki/issues/14208)) ([a4aee4f](https://github.com/grafana/loki/commit/a4aee4f4ff494b525f68c9c6c1ae3417a8e61ebe)) + +### Performance Improvements + +* **blooms:** Remove compression of `.tar` archived bloom blocks ([#14159](https://github.com/grafana/loki/issues/14159)) ([cdf084f](https://github.com/grafana/loki/commit/cdf084fdaeaf632e7c078022c6ad4322bfef2989)) +* **logql:** Micro-optimizations for IP filter ([#14072](https://github.com/grafana/loki/issues/14072)) ([c5083c7](https://github.com/grafana/loki/commit/c5083c7f1ff2f86c74b96c9a87cead78ee6fb3cd)) + +### Miscellaneous Chores + +* **blooms:** Introduce a new block schema (V3) ([#14038](https://github.com/grafana/loki/issues/14038)) ([5395daf](https://github.com/grafana/loki/commit/5395daf898c2d0bbc4756ab6260c54feda960911)) + +### Code Refactoring + +* **operator:** Migrate project layout to kubebuilder go/v4 ([#14447](https://github.com/grafana/loki/issues/14447)) ([dbb3b6e](https://github.com/grafana/loki/commit/dbb3b6edc96f3545a946319c0324518800d286cf)) +* **operator:** Rename loki api go module ([#14568](https://github.com/grafana/loki/issues/14568)) ([976d8ab](https://github.com/grafana/loki/commit/976d8ab81c1a79f35d7cec96f6a9c35a9947fa48)) + ## [3.2.1](https://github.com/grafana/loki/compare/v3.2.0...v3.2.1) (2024-10-17) diff --git a/_shared-workflows-dockerhub-login b/_shared-workflows-dockerhub-login new file mode 160000 index 000000000000..70026bc71241 --- /dev/null +++ b/_shared-workflows-dockerhub-login @@ -0,0 +1 @@ +Subproject commit 70026bc712419d9e31fb2a1572f8a9032b8dbbfb diff --git a/clients/cmd/fluent-bit/Dockerfile b/clients/cmd/fluent-bit/Dockerfile index ae361b864429..04f279572d02 100644 --- a/clients/cmd/fluent-bit/Dockerfile +++ b/clients/cmd/fluent-bit/Dockerfile @@ -14,7 +14,7 @@ RUN go build \ -o clients/cmd/fluent-bit/out_grafana_loki.so \ /src/clients/cmd/fluent-bit -FROM fluent/fluent-bit:1.9.10@sha256:b33d4bf7f7b870777c1f596bc33d6d347167d460bc8cc6aa50fddcbedf7bede5 +FROM fluent/fluent-bit:3.2.1@sha256:6820e86902908933c63f1260d36f1bed0fed0c805518667054cf08890dbf06d5 COPY --from=builder /src/clients/cmd/fluent-bit/out_grafana_loki.so /fluent-bit/bin COPY clients/cmd/fluent-bit/fluent-bit.conf /fluent-bit/etc/fluent-bit.conf diff --git a/clients/cmd/fluentd/docker/docker-compose.yml b/clients/cmd/fluentd/docker/docker-compose.yml index 3a5fcfbf76fd..92f814b8126c 100644 --- a/clients/cmd/fluentd/docker/docker-compose.yml +++ b/clients/cmd/fluentd/docker/docker-compose.yml @@ -24,7 +24,7 @@ services: # Read /var/log/syslog and send it to fluentd fluentbit: - image: fluent/fluent-bit:1.9 + image: fluent/fluent-bit:3.2 command: "/fluent-bit/bin/fluent-bit -c /srv/fluent-bit.conf" user: root volumes: diff --git a/clients/cmd/logstash/Dockerfile b/clients/cmd/logstash/Dockerfile index f9845e87bd17..5f7662f6ae49 100644 --- a/clients/cmd/logstash/Dockerfile +++ b/clients/cmd/logstash/Dockerfile @@ -1,4 +1,4 @@ -FROM logstash:7.17.25 +FROM logstash:8.16.0 USER logstash ENV PATH /usr/share/logstash/vendor/jruby/bin:/usr/share/logstash/vendor/bundle/jruby/2.5.0/bin:/usr/share/logstash/jdk/bin:$PATH diff --git a/clients/cmd/promtail/main.go b/clients/cmd/promtail/main.go index 7e00e7ff35db..0fe973969008 100644 --- a/clients/cmd/promtail/main.go +++ b/clients/cmd/promtail/main.go @@ -10,7 +10,7 @@ import ( // embed time zone data _ "time/tzdata" - "k8s.io/klog" + "k8s.io/klog/v2" "github.com/go-kit/log/level" "github.com/grafana/dskit/flagext" diff --git a/clients/pkg/logentry/stages/timestamp.go b/clients/pkg/logentry/stages/timestamp.go index fb1fb8a27c3b..3e2ce9d730a0 100644 --- a/clients/pkg/logentry/stages/timestamp.go +++ b/clients/pkg/logentry/stages/timestamp.go @@ -8,7 +8,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - lru "github.com/hashicorp/golang-lru" + lru "github.com/hashicorp/golang-lru/v2" "github.com/mitchellh/mapstructure" "github.com/prometheus/common/model" @@ -114,9 +114,9 @@ func newTimestampStage(logger log.Logger, config interface{}) (Stage, error) { return nil, err } - var lastKnownTimestamps *lru.Cache + var lastKnownTimestamps *lru.Cache[string, time.Time] if *cfg.ActionOnFailure == TimestampActionOnFailureFudge { - lastKnownTimestamps, err = lru.New(maxLastKnownTimestampsCacheSize) + lastKnownTimestamps, err = lru.New[string, time.Time](maxLastKnownTimestampsCacheSize) if err != nil { return nil, err } @@ -138,7 +138,7 @@ type timestampStage struct { // Stores the last known timestamp for a given "stream id" (guessed, since at this stage // there's no reliable way to know it). - lastKnownTimestamps *lru.Cache + lastKnownTimestamps *lru.Cache[string, time.Time] } // Name implements Stage @@ -222,7 +222,7 @@ func (ts *timestampStage) processActionOnFailureFudge(labels model.LabelSet, t * } // Fudge the timestamp - *t = lastTimestamp.(time.Time).Add(1 * time.Nanosecond) + *t = lastTimestamp.Add(1 * time.Nanosecond) // Store the fudged timestamp, so that a subsequent fudged timestamp will be 1ns after it ts.lastKnownTimestamps.Add(labelsStr, *t) diff --git a/clients/pkg/promtail/targets/file/filetarget.go b/clients/pkg/promtail/targets/file/filetarget.go index ffa168fde43d..b7b5f4e7c686 100644 --- a/clients/pkg/promtail/targets/file/filetarget.go +++ b/clients/pkg/promtail/targets/file/filetarget.go @@ -7,7 +7,7 @@ import ( "sync" "time" - "github.com/bmatcuk/doublestar" + "github.com/bmatcuk/doublestar/v4" "github.com/fsnotify/fsnotify" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -246,7 +246,8 @@ func (t *FileTarget) sync() error { matches = []string{t.path} } else { // Gets current list of files to tail. - matches, err = doublestar.Glob(t.path) + matches, err = doublestar.FilepathGlob(t.path) + if err != nil { return errors.Wrap(err, "filetarget.sync.filepath.Glob") } @@ -255,7 +256,8 @@ func (t *FileTarget) sync() error { if fi, err := os.Stat(t.pathExclude); err == nil && !fi.IsDir() { matchesExcluded = []string{t.pathExclude} } else { - matchesExcluded, err = doublestar.Glob(t.pathExclude) + matchesExcluded, err = doublestar.FilepathGlob(t.pathExclude) + if err != nil { return errors.Wrap(err, "filetarget.sync.filepathexclude.Glob") } diff --git a/clients/pkg/promtail/targets/file/filetargetmanager.go b/clients/pkg/promtail/targets/file/filetargetmanager.go index a02d0295d2bd..4cd7474c919b 100644 --- a/clients/pkg/promtail/targets/file/filetargetmanager.go +++ b/clients/pkg/promtail/targets/file/filetargetmanager.go @@ -7,7 +7,7 @@ import ( "strings" "sync" - "github.com/bmatcuk/doublestar" + "github.com/bmatcuk/doublestar/v4" "github.com/fsnotify/fsnotify" "github.com/go-kit/log" "github.com/go-kit/log/level" diff --git a/clients/pkg/promtail/targets/serverutils/config.go b/clients/pkg/promtail/targets/serverutils/config.go index 451a3d0953c7..add94dc8157f 100644 --- a/clients/pkg/promtail/targets/serverutils/config.go +++ b/clients/pkg/promtail/targets/serverutils/config.go @@ -3,8 +3,8 @@ package serverutils import ( "flag" + "dario.cat/mergo" "github.com/grafana/dskit/server" - "github.com/imdario/mergo" ) // MergeWithDefaults applies server.Config defaults to a given and different server.Config. diff --git a/cmd/logcli/main.go b/cmd/logcli/main.go index 0cf8763682ee..835216c2bb83 100644 --- a/cmd/logcli/main.go +++ b/cmd/logcli/main.go @@ -11,9 +11,9 @@ import ( "strings" "time" + "github.com/alecthomas/kingpin/v2" "github.com/prometheus/common/config" "github.com/prometheus/common/version" - "gopkg.in/alecthomas/kingpin.v2" "github.com/grafana/loki/v3/pkg/logcli/client" "github.com/grafana/loki/v3/pkg/logcli/detected" diff --git a/cmd/lokitool/main.go b/cmd/lokitool/main.go index 9989a1f03a18..76b9e0ca2ff8 100644 --- a/cmd/lokitool/main.go +++ b/cmd/lokitool/main.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - "gopkg.in/alecthomas/kingpin.v2" + "github.com/alecthomas/kingpin/v2" "github.com/prometheus/common/version" diff --git a/docs/sources/get-started/labels/structured-metadata.md b/docs/sources/get-started/labels/structured-metadata.md index f1877139fef7..43de657337f5 100644 --- a/docs/sources/get-started/labels/structured-metadata.md +++ b/docs/sources/get-started/labels/structured-metadata.md @@ -23,8 +23,10 @@ You should only use structured metadata in the following situations: - If you are ingesting data in OpenTelemetry format, using Grafana Alloy or an OpenTelemetry Collector. Structured metadata was designed to support native ingestion of OpenTelemetry data. - If you have high cardinality metadata that should not be used as a label and does not exist in the log line. Some examples might include `process_id` or `thread_id` or Kubernetes pod names. - -It is an antipattern to extract information that already exists in your log lines and put it into structured metadata. +- If you are using [Explore Logs](https://grafana.com/docs/grafana-cloud/visualizations/simplified-exploration/logs/) to visualize and explore your Loki logs. You must set `discover_log_levels` and `allow_structured_metadata` to `true` in your Loki configuration. +- If you are a large-scale customer, who is ingesting more than 75TB of logs a month and are using [Bloom filters](https://grafana.com/docs/loki//operations/bloom-filters/) (Experimental), starting in [Loki 3.3](https://grafana.com/docs/loki//release-notes/v3-3/) Bloom filters now utilize structured metadata. + +We do not recommend extracting information that already exists in your log lines and putting it into structured metadata. ## Attaching structured metadata to log lines @@ -37,9 +39,10 @@ See the [Promtail: Structured metadata stage](https://grafana.com/docs/loki//send-data/logstash/). {{< admonition type="warning" >}} -Structured metadata size is taken into account while asserting ingestion rate limiting. +Structured metadata size is taken into account while asserting ingestion rate limiting. Along with that, there are separate limits on how much structured metadata can be attached per log line. -``` + +```yaml # Maximum size accepted for structured metadata per log line. # CLI flag: -limits.max-structured-metadata-size [max_structured_metadata_size: | default = 64KB] @@ -48,6 +51,7 @@ Along with that, there are separate limits on how much structured metadata can b # CLI flag: -limits.max-structured-metadata-entries-count [max_structured_metadata_entries_count: | default = 128] ``` + {{< /admonition >}} ## Querying structured metadata diff --git a/docs/sources/operations/bloom-filters.md b/docs/sources/operations/bloom-filters.md index 7394e26ec4f0..4704adc8e1d9 100644 --- a/docs/sources/operations/bloom-filters.md +++ b/docs/sources/operations/bloom-filters.md @@ -13,7 +13,8 @@ aliases: # Bloom filters (Experimental) {{< admonition type="warning" >}} -This feature is an [experimental feature](/docs/release-life-cycle/). Engineering and on-call support is not available. No SLA is provided. +This feature is an [experimental feature](/docs/release-life-cycle/). Engineering and on-call support is not available. No SLA is provided. +Note that this feature is intended for users who are ingesting more than 75TB of logs a month, as it is designed to accelerate queries against large volumes of logs. {{< /admonition >}} Loki leverages [bloom filters](https://en.wikipedia.org/wiki/Bloom_filter) to speed up queries by reducing the amount of data Loki needs to load from the store and iterate through. @@ -110,7 +111,7 @@ overrides: period: 40d ``` -### Sizing and configuration +### Planner and Builder sizing and configuration The single planner instance runs the planning phase for bloom blocks for each tenant in the given interval and puts the created tasks to an internal task queue. Builders process tasks sequentially by pulling them from the queue. The amount of builder replicas required to complete all pending tasks before the next planning iteration depends on the value of `-bloom-build.planner.bloom_split_series_keyspace_by`, the number of tenants, and the log volume of the streams. @@ -131,7 +132,7 @@ The sharding of the data is performed on the client side using DNS discovery of You can find all the configuration options for this component in the Configure section for the [Bloom Gateways][bloom-gateway-cfg]. Refer to the [Enable bloom filters](#enable-bloom-filters) section above for a configuration snippet enabling this feature. -### Sizing and configuration +### Gateway sizing and configuration Bloom Gateways use their local file system as a Least Recently Used (LRU) cache for blooms that are downloaded from object storage. The size of the blooms depend on the ingest volume and number of unique structured metadata key-value pairs, as well as on build settings of the blooms, namely false-positive-rate. @@ -140,7 +141,7 @@ With default settings, bloom filters make up <1% of the raw structured metadata Since reading blooms depends heavily on disk IOPS, Bloom Gateways should make use of multiple, locally attached SSD disks (NVMe) to increase I/O throughput. Multiple directories on different disk mounts can be specified using the `-bloom.shipper.working-directory` [setting][storage-config-cfg] when using a comma separated list of mount points, for example: -``` +```yaml -bloom.shipper.working-directory="/mnt/data0,/mnt/data1,/mnt/data2,/mnt/data3" ``` @@ -150,7 +151,7 @@ The product of three settings control the maximum amount of bloom data in memory Example, assuming 4 CPU cores: -``` +```yaml -bloom-gateway.worker-concurrency=4 // 1x NUM_CORES -bloom-gateway.block-query-concurrency=8 // 2x NUM_CORES -bloom.max-query-page-size=64MiB diff --git a/docs/sources/query/query_accceleration.md b/docs/sources/query/query_accceleration.md index 9117ecb209f2..a760b9e66faa 100644 --- a/docs/sources/query/query_accceleration.md +++ b/docs/sources/query/query_accceleration.md @@ -1,5 +1,5 @@ --- -title: Query acceleration (Experimental) +title: Query acceleration menuTitle: Query acceleration description: Provides instructions on how to write LogQL queries to benefit from query acceleration. weight: 900 @@ -8,10 +8,11 @@ keywords: - query acceleration --- -# Query acceleration (Experimental) +# Query acceleration {{< admonition type="warning" >}} -Query acceleration using blooms is an [experimental feature](/docs/release-life-cycle/). Engineering and on-call support is not available. No SLA is provided. +In Loki and Grafana Enterprise Logs (GEL), Query acceleration using blooms is an [experimental feature](/docs/release-life-cycle/). Engineering and on-call support is not available. No SLA is provided. +In Grafana Cloud, Query acceleration using blooms is enabled for large-scale customers that send more than 75TB of logs a month as a [public preview](/docs/release-life-cycle/) with limited support and no SLA. {{< /admonition >}} If [bloom filters][] are enabled, you can write LogQL queries using [structured metadata][] to benefit from query acceleration. @@ -26,19 +27,26 @@ If [bloom filters][] are enabled, you can write LogQL queries using [structured Queries will be accelerated for any [label filter expression][] that satisfies _all_ of the following criteria: * The label filter expression using **string equality**, such as `| key="value"`. - * `or` and `and` operators can be used to match multiple values, such as `| detected_level="error" or detected_level="warn"`. - * _Basic_ regular expressions are automatically simplified into a supported expression: - * `| key=~"value"` is converted to `| key="value"`. - * `| key=~"value1|value2"` is converted to `| key="value1" or key="value2"`. - * `| key=~".+"` checks for existence of `key`. `.*` is not supported. + * `or` and `and` operators can be used to match multiple values, such as `| detected_level="error" or detected_level="warn"`. + * _Basic_ regular expressions are automatically simplified into a supported expression: + * `| key=~"value"` is converted to `| key="value"`. + * `| key=~"value1|value2"` is converted to `| key="value1" or key="value2"`. + * `| key=~".+"` checks for existence of `key`. `.*` is not supported. * The label filter expression is querying for structured metadata and not a stream label. * The label filter expression is placed before any [parser expression][], [labels format expression][], [drop labels expression][], or [keep labels expression][]. -To take full advantage of query acceleration with blooms, ensure that filtering structured metadata is done before any parse expression: +To take full advantage of query acceleration with blooms, ensure that filtering structured metadata is done before any parser expression: + +In the following example, the query is not accelerated because the structured metadata filter, `detected_level="error"`, is after a parser stage, `json`. + +```logql +{cluster="prod"} | logfmt | json | detected_level="error" +``` + +In the following example, the query is accelerated because the structured metadata filter is before any parser stage. ```logql -{cluster="prod"} | logfmt | json | detected_level="error" # NOT ACCELERATED: structured metadata filter is after a parse stage -{cluster="prod"} | detected_level="error" | logfmt | json # ACCELERATED: structured metadata filter is before any parse stage +{cluster="prod"} | detected_level="error" | logfmt | json ``` [bloom filters]: https://grafana.com/docs/loki//operations/bloom-filters/ diff --git a/docs/sources/release-notes/_index.md b/docs/sources/release-notes/_index.md index d8c5f26f24a4..1c59760c62c9 100644 --- a/docs/sources/release-notes/_index.md +++ b/docs/sources/release-notes/_index.md @@ -8,6 +8,7 @@ weight: 100 Release notes for Loki are in the CHANGELOG for the release and listed here by version number. +- [V3.3 release notes](https://grafana.com/docs/loki//release-notes/v3-3/) - [V3.2 release notes](https://grafana.com/docs/loki//release-notes/v3-2/) - [V3.1 release notes](https://grafana.com/docs/loki//release-notes/v3-1/) - [V3.0 release notes](https://grafana.com/docs/loki//release-notes/v3-0/) diff --git a/docs/sources/release-notes/v2-3.md b/docs/sources/release-notes/v2-3.md index 87aff23ed0e9..3883a2493c10 100644 --- a/docs/sources/release-notes/v2-3.md +++ b/docs/sources/release-notes/v2-3.md @@ -1,7 +1,7 @@ --- title: V2.3 description: Version 2.3 release notes -weight: 99 +weight: 900 --- # V2.3 diff --git a/docs/sources/release-notes/v2-4.md b/docs/sources/release-notes/v2-4.md index 6465503e3df3..aa824fbe6e13 100644 --- a/docs/sources/release-notes/v2-4.md +++ b/docs/sources/release-notes/v2-4.md @@ -1,7 +1,7 @@ --- title: V2.4 description: Version 2.4 release notes -weight: 88 +weight: 850 --- # V2.4 diff --git a/docs/sources/release-notes/v2-5.md b/docs/sources/release-notes/v2-5.md index 9ffc947349a7..9aa37d983ecf 100644 --- a/docs/sources/release-notes/v2-5.md +++ b/docs/sources/release-notes/v2-5.md @@ -1,7 +1,7 @@ --- title: V2.5 description: Version 2.5 release notes -weight: 77 +weight: 800 --- # V2.5 diff --git a/docs/sources/release-notes/v2-6.md b/docs/sources/release-notes/v2-6.md index 8c873fa9520b..da0904a47803 100644 --- a/docs/sources/release-notes/v2-6.md +++ b/docs/sources/release-notes/v2-6.md @@ -1,7 +1,7 @@ --- title: V2.6 description: Version 2.6 release notes -weight: 66 +weight: 750 --- # V2.6 diff --git a/docs/sources/release-notes/v2-7.md b/docs/sources/release-notes/v2-7.md index 416ee8e03e7c..6efed071d2b5 100644 --- a/docs/sources/release-notes/v2-7.md +++ b/docs/sources/release-notes/v2-7.md @@ -1,7 +1,7 @@ --- title: V2.7 description: Version 2.7 release notes -weight: 60 +weight: 700 --- # V2.7 diff --git a/docs/sources/release-notes/v2-8.md b/docs/sources/release-notes/v2-8.md index 75150ad67cd6..e791470c1281 100644 --- a/docs/sources/release-notes/v2-8.md +++ b/docs/sources/release-notes/v2-8.md @@ -1,7 +1,7 @@ --- title: V2.8 description: Version 2.8 release notes -weight: 55 +weight: 650 --- # V2.8 diff --git a/docs/sources/release-notes/v2-9.md b/docs/sources/release-notes/v2-9.md index c1d72e622144..3c113c53b3f3 100644 --- a/docs/sources/release-notes/v2-9.md +++ b/docs/sources/release-notes/v2-9.md @@ -1,7 +1,7 @@ --- title: V2.9 description: Version 2.9 release notes -weight: 50 +weight: 600 --- # V2.9 diff --git a/docs/sources/release-notes/v3-0.md b/docs/sources/release-notes/v3-0.md index c04dcda82638..5b382e2f1017 100644 --- a/docs/sources/release-notes/v3-0.md +++ b/docs/sources/release-notes/v3-0.md @@ -1,7 +1,7 @@ --- title: v3.0 description: Version 3.0 release notes. -weight: 30 +weight: 550 --- # v3.0 diff --git a/docs/sources/release-notes/v3-1.md b/docs/sources/release-notes/v3-1.md index f623caad2b46..168c7de810d5 100644 --- a/docs/sources/release-notes/v3-1.md +++ b/docs/sources/release-notes/v3-1.md @@ -1,7 +1,7 @@ --- title: v3.1 description: Version 3.1 release notes. -weight: 20 +weight: 500 --- # v3.1 diff --git a/docs/sources/release-notes/v3-2.md b/docs/sources/release-notes/v3-2.md index b18b38ab7bb8..89b0b48d2674 100644 --- a/docs/sources/release-notes/v3-2.md +++ b/docs/sources/release-notes/v3-2.md @@ -1,7 +1,7 @@ --- title: v3.2 description: Version 3.2 release notes. -weight: 10 +weight: 450 --- # v3.2 diff --git a/docs/sources/release-notes/v3-3.md b/docs/sources/release-notes/v3-3.md new file mode 100644 index 000000000000..d5c43e32355f --- /dev/null +++ b/docs/sources/release-notes/v3-3.md @@ -0,0 +1,208 @@ +--- +title: v3.3 +description: Version 3.3 release notes. +weight: 400 +--- + +# v3.3 + +Grafana Labs and the Loki team are excited to announce the release of Loki 3.3. Here's a summary of new enhancements and important fixes. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-3.3.x/CHANGELOG.md). + +## Features and enhancements + +Key features in Loki 3.3.0 include the following: + +- **Query acceleration with Bloom filters** (experimental): The Bloom filters feature introduced in Loki 3.0 remains experimental. There has been a major shift in how Loki leverages bloom filters — pivoting from free-text search to harnessing the power of structured metadata ([#14061](https://github.com/grafana/loki/issues/14061)) ([a2fbaa8](https://github.com/grafana/loki/commit/a2fbaa8e09b6eebff2f7c20746e84f1365bd7433)). This change leverages the growing adoption of OpenTelemetry in observability stacks. The Bloom Compactor component has been removed and replaced by the Bloom Planner and Bloom Builder components ([#13997](https://github.com/grafana/loki/issues/13997)) ([bf60455](https://github.com/grafana/loki/commit/bf60455c8e52b87774df9ca90232b4c72d72e46b)) and blooms has a new block schema (V3) ([#14038](https://github.com/grafana/loki/issues/14038)) ([5395daf](https://github.com/grafana/loki/commit/5395daf898c2d0bbc4756ab6260c54feda960911)) which is a breaking change. + +- **Explore Logs:** Starting with Grafana v11.3, the plugin for the Explore Logs app is installed in Grafana by default. This release includes enhancements to improve pattern ingester tracing ([#14707](https://github.com/grafana/loki/issues/14707)) ([80aec25](https://github.com/grafana/loki/commit/80aec2548203957dbb834ba69e6d734d9054416d)), to apply patterns line length limit to json message keys ([#14296](https://github.com/grafana/loki/issues/14296)) ([41fafd8](https://github.com/grafana/loki/commit/41fafd87933224d5d43592e91e339322fc90a466)), add configuration to set `max-allowed-line-length` in the pattern ingester ([#14070](https://github.com/grafana/loki/issues/14070)) ([0780456](https://github.com/grafana/loki/commit/0780456662b67edde69004cf4ee3873c23d5094b)) and move metric aggregation to a per-tenant configuration ([#14709](https://github.com/grafana/loki/issues/14709)) ([c1fde26](https://github.com/grafana/loki/commit/c1fde26730b4fc54e4bbc724d1b29f653541f720)). + +- **helm:** Multiple improvements to the Helm charts, including: Updating the chart with Loki version 3.2.0 ([#14281](https://github.com/grafana/loki/issues/14281)) ([11b92ee](https://github.com/grafana/loki/commit/11b92eeb95612a2bb002ea22f048c55ae20557a2)), replacing bloom compactor with bloom planner and builder ([#14003](https://github.com/grafana/loki/issues/14003)) ([08e61ca](https://github.com/grafana/loki/commit/08e61ca4db086b573ef636a156bfc624132515be)), adding the ability to configure `client_max_body_size` ([#12924](https://github.com/grafana/loki/issues/12924)) ([809a024](https://github.com/grafana/loki/commit/809a024581c1f600744b9db0b2b2142234317082)), adding additional service annotations for components in distributed mode ([#14131](https://github.com/grafana/loki/issues/14131)) ([5978f13](https://github.com/grafana/loki/commit/5978f1344c84525e6b8bda45869b867b7e878956)), adding configurable `extraEnvFrom` to admin-api and enterprise-gateway ([#14533](https://github.com/grafana/loki/issues/14533)) ([5d78a3a](https://github.com/grafana/loki/commit/5d78a3a3fd1f630d6b012a9240fa081e63bcb7ef)), adding `kubeVersionOverride` for Helm chart ([#14434](https://github.com/grafana/loki/issues/14434)) ([0935d77](https://github.com/grafana/loki/commit/0935d77df08e6ad40a9f498f53e94e335b020ded)), adding persistence option to memcached on Helm chart ([#13619](https://github.com/grafana/loki/issues/13619)) ([ef1df0e](https://github.com/grafana/loki/commit/ef1df0e66fc8e2fe9327a66aea31279ca5c7307a)), adding tolerations to pattern-ingester statefulset ([#13605](https://github.com/grafana/loki/issues/13605)) ([09530c0](https://github.com/grafana/loki/commit/09530c0f4a1503713a76c68153b4da5287f9b79f)), allowing setting node attributes to `tokengen` and `provisioner` ([#14311](https://github.com/grafana/loki/issues/14311)) ([c708ae6](https://github.com/grafana/loki/commit/c708ae691ca2d9a26b1c2a4591ed32dbfdd94619)), updating Loki Helm chart for restricted environments ([#14440](https://github.com/grafana/loki/issues/14440)) ([adc7538](https://github.com/grafana/loki/commit/adc75389a39e3aaad69303b82b0d68ec3d94485c)), and supporting ruler sidecar in singleBinary mode ([#13572](https://github.com/grafana/loki/issues/13572)) ([684baf7](https://github.com/grafana/loki/commit/684baf7dbacef4b85a08db8de9934458745124d8)). + +- **mixins:** Multiple improvements to the Loki mixins, including: adding a missing cluster label to mixins ([#12870](https://github.com/grafana/loki/issues/12870)) ([547ca70](https://github.com/grafana/loki/commit/547ca708b9b56e2761bd19ebfcfc9f8571d9af2a)), adding support for partition ingester in dashboards ([#14498](https://github.com/grafana/loki/issues/14498)) ([70deebf](https://github.com/grafana/loki/commit/70deebf26e88c6f2b10c78b3b8ce785c8a16e03b)), adding a Loki compaction not successful alert ([#14239](https://github.com/grafana/loki/issues/14239)) ([da04f50](https://github.com/grafana/loki/commit/da04f5007edd85f35d1af5ba8c2c5a4eb96d2149)), allowing overriding of some labels by parameterizing mixin recording/alert rules ([#11495](https://github.com/grafana/loki/issues/11495)) ([f1425b6](https://github.com/grafana/loki/commit/f1425b6c24e9d90c99477f67289c3aa34f69573d)), allowing disabling of bloom dashboards ([#14177](https://github.com/grafana/loki/issues/14177)) ([ce2e6d5](https://github.com/grafana/loki/commit/ce2e6d520b48fe9c5c7593ae2400a6983905782e)), allowing hiding of unused rows in loki-operational ([#13646](https://github.com/grafana/loki/issues/13646)) ([3aa4f22](https://github.com/grafana/loki/commit/3aa4f2227e4178f05e6b13cffc044989c7839372)), and merging read-resources and write-resources dashboards for SSD into one dashboard that also includes the backend-resources([#13471](https://github.com/grafana/loki/issues/13471)) ([45b8719](https://github.com/grafana/loki/commit/45b8719aa768db35d4e7559fd87e22056248b912)). + +- **operator:** Multiple updates to the Operator, including: Adding support for Loki OTLP limits configuration ([#13446](https://github.com/grafana/loki/issues/13446)) ([d02f435](https://github.com/grafana/loki/commit/d02f435d3bf121b19e15de4f139c95a6d010b25c)), providing default OTLP attribute configuration ([#14410](https://github.com/grafana/loki/issues/14410)) ([1b52387](https://github.com/grafana/loki/commit/1b5238721994c00764b6a7e7d63269c5b56d2480)), adding a User-guide for OTLP configuration ([#14620](https://github.com/grafana/loki/issues/14620)) ([27b4071](https://github.com/grafana/loki/commit/27b40713540bd60918780cdd4cb645e6761427cb)), updating the Loki operand to v3.1.1 ([#14042](https://github.com/grafana/loki/issues/14042)) ([7ae1588](https://github.com/grafana/loki/commit/7ae1588200396b73a16fadd2610670a5ce5fd747)), updating the Loki operand to v3.2.1 ([#14526](https://github.com/grafana/loki/issues/14526)) ([5e970e5](https://github.com/grafana/loki/commit/5e970e50b166e73f5563e21c23db3ea99b24642e)), renaming the Loki API go module ([#14568](https://github.com/grafana/loki/issues/14568)) ([976d8ab](https://github.com/grafana/loki/commit/976d8ab81c1a79f35d7cec96f6a9c35a9947fa48)), introducing 1x.pico size ([#14407](https://github.com/grafana/loki/issues/14407)) ([57de81d](https://github.com/grafana/loki/commit/57de81d8c27e221832790443cebaf141353c3e3f)), migrating the project layout to kubebuilder go/v4 ([#14447](https://github.com/grafana/loki/issues/14447)) ([dbb3b6e](https://github.com/grafana/loki/commit/dbb3b6edc96f3545a946319c0324518800d286cf)), declaring feature FIPS support for OpenShift only ([#14308](https://github.com/grafana/loki/issues/14308)) ([720c303](https://github.com/grafana/loki/commit/720c3037923c174e71a02d99d4bee6271428fbdb)). + +- **docs:** New videos and tutorials, including a New Cloud deployment guide for [Deploying Loki on AWS](https://grafana.com/docs/loki//setup/install/helm/deployment-guides/aws/), a new [Fluent Bit tutorial](https://grafana.com/docs/loki//send-data/fluentbit/fluent-bit-loki-tutorial/) and a [Getting Started with OpenTelemetry Collector tutorial](https://grafana.com/docs/loki//send-data/otel/otel-collector-getting-started/). Also there are improvements to the Query documentation, including a new [Query Best Practices topic](https://grafana.com/docs/loki//query/bp-query/), a reorganized [Query Template](https://grafana.com/docs/loki//query/template_functions/), and a revised [LogQL Simulator](https://grafana.com/docs/loki//query/analyzer/) topic. + +Other improvements include the following: + +- **api:** Added endpoint to return detected field values ([#14350](https://github.com/grafana/loki/issues/14350)) ([7983f94](https://github.com/grafana/loki/commit/7983f94b15b422b94517641bd9cec5c9da6903e1)). +- **blooms:** Add disk-backed queue for the bloom-planner (backport k227) ([#14927](https://github.com/grafana/loki/issues/14927)) ([1f6828b](https://github.com/grafana/loki/commit/1f6828b25c5c5d6ad5eda3be60a435db8ca55fc3)). +- **blooms:** Only write key and key=value to blooms ([#14686](https://github.com/grafana/loki/issues/14686)) ([3af0004](https://github.com/grafana/loki/commit/3af0004cb4d4dafbcbe099e4409edf6e6ff056a5)). +- **blooms:** Do not add empty blooms to offsets ([#14577](https://github.com/grafana/loki/issues/14577)) ([51c42e8](https://github.com/grafana/loki/commit/51c42e864563f2fa9ffc160cb13f6d6126ea5c6d)). +- **blooms:** Extract task computing into a strategy interface ([#13690](https://github.com/grafana/loki/issues/13690)) ([ab5e6ea](https://github.com/grafana/loki/commit/ab5e6eaaeea24f93f434dcece6ff5d9dc83e6d32)). +- **blooms:** New bloom planning using chunk size TSDB stats ([#14547](https://github.com/grafana/loki/issues/14547)) ([673ede1](https://github.com/grafana/loki/commit/673ede16a5f675684f9e6a53903335af5075a507)). +- **blooms:** Remove compression of `.tar` archived bloom blocks ([#14159](https://github.com/grafana/loki/issues/14159)) ([cdf084f](https://github.com/grafana/loki/commit/cdf084fdaeaf632e7c078022c6ad4322bfef2989)). +- **compactor:** Add backoff mechanism to the retention process ([#14182](https://github.com/grafana/loki/issues/14182)) ([3136880](https://github.com/grafana/loki/commit/31368806a9c5e0ff6c43045e008861f26ed61af3)). +- **distributor:** Track discarded data by usageTracker ([#14081](https://github.com/grafana/loki/issues/14081)) ([c65721e](https://github.com/grafana/loki/commit/c65721e7ade0ef89fd282d9f764fb2d05f6b9c42)). +- **distributor:** Add ability to log stream selectors before service name detection ([#14154](https://github.com/grafana/loki/issues/14154)) ([d7ff426](https://github.com/grafana/loki/commit/d7ff42664681794b9ef5026ac3758cdd9569ac1a)). +- **distributors:** Use a pool of worker to push to ingesters. ([#14245](https://github.com/grafana/loki/issues/14245)) ([f80d68a](https://github.com/grafana/loki/commit/f80d68a1edbd85a605be882eb0104b169343cf00)). +- **fluentd:** Support custom http headers ([#14299](https://github.com/grafana/loki/issues/14299)) ([e59035e](https://github.com/grafana/loki/commit/e59035e17315f453d4b2e2334330bc062d40f0fd)). +- **ingester:** Implement owned streams calculation using Partition Ring ([#14282](https://github.com/grafana/loki/issues/14282)) ([3c36ba9](https://github.com/grafana/loki/commit/3c36ba949d65e803cc6702b8664f87aca07ed052)). +- **ingester:** Implement partition shuffle sharding for ingester ([#14304](https://github.com/grafana/loki/issues/14304)) ([1a4436c](https://github.com/grafana/loki/commit/1a4436c41721e3e6aca82c26abaec8fe6f775d9f)). +- **jsonnet:** Allow to name prefix zoned ingesters ([#14260](https://github.com/grafana/loki/issues/14260)) ([fac3177](https://github.com/grafana/loki/commit/fac3177814b8d2914eb3af618d571104eba18934)). +- **labels:** Add app_name as a service label ([#13660](https://github.com/grafana/loki/issues/13660)) ([f2a16f4](https://github.com/grafana/loki/commit/f2a16f43b27503ba9ee76bac2b44d825ce030e0f)). +- **logcli:** Add gzip compression option ([#14598](https://github.com/grafana/loki/issues/14598)) ([4d3f9f5](https://github.com/grafana/loki/commit/4d3f9f5a7b483b563348c322958486825d314526)). +- **logql:** Introduce shardable probabilistic topk for instant queries. (backport k227) ([#14765](https://github.com/grafana/loki/issues/14765)) ([02eb024](https://github.com/grafana/loki/commit/02eb02458e99d4dcb2f734f6a8e83bbd76a8ea4f)). +- **logql:** Micro-optimizations for IP filter ([#14072](https://github.com/grafana/loki/issues/14072)) ([c5083c7](https://github.com/grafana/loki/commit/c5083c7f1ff2f86c74b96c9a87cead78ee6fb3cd)). +- **logql:** Add query user and query source to "executing query" log lines ([#14320](https://github.com/grafana/loki/issues/14320)) ([4d69929](https://github.com/grafana/loki/commit/4d6992982d99a542f1e99af18b691830b71469e0)). +- **promtail:** Add structured metadata to the promtail push API ([#14153](https://github.com/grafana/loki/issues/14153)) ([66cffcb](https://github.com/grafana/loki/commit/66cffcb427bda28af6fbcfcf85a34771db3787bc)). +- **querier:** Move detected field logic to query frontend ([#14212](https://github.com/grafana/loki/issues/14212)) ([36ace66](https://github.com/grafana/loki/commit/36ace66b73e9f9ad2a2d367fbc20803c0d9779c2)). +- **ruler:** Enable ruler store that uses clients from thanos-io/objstore pkg ([#11713](https://github.com/grafana/loki/issues/11713)) ([8bca2e7](https://github.com/grafana/loki/commit/8bca2e76089e0b9894b7a4c18a950f4baaa5a412)). +- **storage:** Add retries for s3 ObjectExists calls ([#14062](https://github.com/grafana/loki/issues/14062)) ([73cbbb0](https://github.com/grafana/loki/commit/73cbbb0f2257b9eb5a3bf5d2cf1f4d4d2490d47d)). +- **storage:** Implement IsRetryableErr for S3ObjectClient ([#14174](https://github.com/grafana/loki/issues/14174)) ([fc90a63](https://github.com/grafana/loki/commit/fc90a63636c689993bd9b568f9c54198bfb1f3ae)). +- **storage:** Introduce new `ObjectExistsWithSize` API to ([#14268](https://github.com/grafana/loki/issues/14268)) ([ac422b3](https://github.com/grafana/loki/commit/ac422b3bc3e822b4525401496a8b73e91d566128)). +- **storage:** AWS backend using thanos.io/objstore ([#11221](https://github.com/grafana/loki/issues/11221)) ([b872246](https://github.com/grafana/loki/commit/b87224647dc88901c61cb4bd571dfda9405a7826)). +- **storage:** Azure backend using thanos.io/objstore ([#11315](https://github.com/grafana/loki/issues/11315)) ([5824e3d](https://github.com/grafana/loki/commit/5824e3d35cd1273ccd1a63d7381098617a7697dd)). +- **storage:** GCS backend using thanos.io/objstore ([#11132](https://github.com/grafana/loki/issues/11132)) ([c059ace](https://github.com/grafana/loki/commit/c059ace53edba79864a567035b120db80addf23c)). +- **structured metadata:** Include structured_metadata size while asserting rate limit ([#14571](https://github.com/grafana/loki/issues/14571)) ([a962edb](https://github.com/grafana/loki/commit/a962edba332f4fdfee29cf11e70019b1b498c258)). + +## Deprecations + +One of the focuses of Loki 3.0 was cleaning up unused code and old features that had been previously deprecated but not removed. Loki 3.0 removed a number of previous deprecations and introduces some new deprecations. Some of the main areas with changes include: + +- [Deprecated storage options](https://grafana.com/docs/loki//storage/) including the deprecation of the BoltDB store. + +- [Deprecated configuration options](https://grafana.com/docs/loki//configure/). + +- [API endpoint deprecations](https://grafana.com/docs/loki//reference/api/#deprecated-endpoints). + +To learn more about breaking changes in this release, refer to the [Upgrade guide](https://grafana.com/docs/loki//setup/upgrade/). + +{{< docs/shared source="alloy" lookup="agent-deprecation.md" version="next" >}} + +## Upgrade Considerations + +For important upgrade guidance, refer to the [Upgrade Guide](https://grafana.com/docs/loki//setup/upgrade/). + +- **BREAKING CHANGE - blooms:** Introduce a new block schema (V3) ([#14038](https://github.com/grafana/loki/issues/14038)). +- **BREAKING CHANGE - blooms:** Index structured metadata into blooms ([#14061](https://github.com/grafana/loki/issues/14061)). +- **BREAKING CHANGE - operator:** Migrate project layout to kubebuilder go/v4 ([#14447](https://github.com/grafana/loki/issues/14447)). +- **BREAKING CHANGE - operator:** Rename Loki API go module ([#14568](https://github.com/grafana/loki/issues/14568)). +- **BREAKING CHANGE - operator:** Provide default OTLP attribute configuration ([#14410](https://github.com/grafana/loki/issues/14410)). + +## Bug fixes + +### 3.3.0 (2024-11-19) + +- **blooms:** Add tenant limits as dependency to pattern ingester ([#14665](https://github.com/grafana/loki/issues/14665)) ([31eea90](https://github.com/grafana/loki/commit/31eea9042ada6650227eb281a36410ab521817a8)). +- **blooms:** Check length of tasks before accessing first element in slice ([#14634](https://github.com/grafana/loki/issues/14634)) ([601f549](https://github.com/grafana/loki/commit/601f549656efa5ac769a685169d5bc84eff15a35)). +- **blooms:** Copy chunks from ForSeries (backport k227) ([#14864](https://github.com/grafana/loki/issues/14864)) ([d10f79c](https://github.com/grafana/loki/commit/d10f79c700c100d7333e682287aabbaa3c029768)). +- **blooms:** Do not restart builders when planner disconnects (backport k227) ([#14922](https://github.com/grafana/loki/issues/14922)) ([213e8ee](https://github.com/grafana/loki/commit/213e8eeba6e7fb138069e2858d62f1e3c4556a0e)). +- **blooms:** Exclude label filters where label name is part of the series labels. ([#14661](https://github.com/grafana/loki/issues/14661)) ([d1668f6](https://github.com/grafana/loki/commit/d1668f6a110f7119ebb1cc0e582be369b2af95b8)). +- **blooms:** Fix panic in initialization of the bloom planner and builder ([#14110](https://github.com/grafana/loki/issues/14110)) ([8307c42](https://github.com/grafana/loki/commit/8307c42c541e769c9d0133df3856af049a815b73)). +- **blooms:** Fix strategy logger and add task test (backport k227) ([#14921](https://github.com/grafana/loki/issues/14921)) ([dc36a1e](https://github.com/grafana/loki/commit/dc36a1e1288a03b68d269ba261f41ac7c2942962)). +- **blooms:** Fix tenants slice on loadTenantTables (backport k227) ([#14901](https://github.com/grafana/loki/issues/14901)) ([540dd5a](https://github.com/grafana/loki/commit/540dd5a5ccb53bc2ee4236871632c7e1daa7f7e5)). +- **blooms:** Skip multi-tenant TSDBs during bloom planning (backport k227) ([#14888](https://github.com/grafana/loki/issues/14888)) ([631cff3](https://github.com/grafana/loki/commit/631cff345cdab110202d757572fbbf8088c0be87)). +- **blooms:** Improve docs for min and max table offsets (backport k227) ([#14929](https://github.com/grafana/loki/issues/14929)) ([3161fdc](https://github.com/grafana/loki/commit/3161fdcc6dc1e80a86933a59e6af102c10336c39)). +- **blooms:** Fix lint errors ([#14574](https://github.com/grafana/loki/issues/14574)) ([99ef900](https://github.com/grafana/loki/commit/99ef9009e5e2e74f76c865fbb3feaf1559f4b47c)). +- **build:** Use Debian Bullseye base image for build image ([#14368](https://github.com/grafana/loki/issues/14368)) ([3beb8ff](https://github.com/grafana/loki/commit/3beb8ff9cfe7f765b5d5db87892981a223d72f50)). +- **canary:** Reconnect immediately upon tail max duration ([#14287](https://github.com/grafana/loki/issues/14287)) ([9267ee3](https://github.com/grafana/loki/commit/9267ee3561ccbb90589600d7b045f7e05b1b2ee0)). +- **ci:** Fix `Publish Rendered Helm Chart Diff` workflow ([#14365](https://github.com/grafana/loki/issues/14365)) ([6de6420](https://github.com/grafana/loki/commit/6de64209547ec970cb27564be87fe2085307e183)). +- **ci:** Update helm diff rendering workflow ([#14424](https://github.com/grafana/loki/issues/14424)) ([916e511](https://github.com/grafana/loki/commit/916e5115d9099e82834f0d8e123273c75c9cddec)). +- **config:** Copy Alibaba and IBM object storage configuration from common ([#14297](https://github.com/grafana/loki/issues/14297)) ([59ff1ec](https://github.com/grafana/loki/commit/59ff1ece1dacc461d03f71e41c0728396727eee6)). +- **config:** Migrate renovate configuration ([#14646](https://github.com/grafana/loki/issues/14646)) ([a67d8ef](https://github.com/grafana/loki/commit/a67d8ef219aab80071e8256a6cbb18a47c7078e6)). +- **deps:** Update aws-sdk-go-v2 monorepo ([#13986](https://github.com/grafana/loki/issues/13986)) ([6f49123](https://github.com/grafana/loki/commit/6f491233cae226d54d190521d2b935249d88ad05)). +- **deps:** update aws-sdk-go-v2 monorepo ([#14742](https://github.com/grafana/loki/issues/14742)) ([53a1ab7](https://github.com/grafana/loki/commit/53a1ab76257d900b80334d68439d7ff4bfcfd39b)). +- **deps:** update github.com/grafana/dskit digest to 687ec48 ([#14395](https://github.com/grafana/loki/issues/14395)) ([c2f38e1](https://github.com/grafana/loki/commit/c2f38e18c6b8dd134b8f3da164afc9c8625f2f2b)). +- **deps:** update github.com/grafana/dskit digest to 7c41a40 ([#14277](https://github.com/grafana/loki/issues/14277)) ([f39cdbd](https://github.com/grafana/loki/commit/f39cdbd541d85a961db655e70da713be04d9a294)). +- **deps:** update github.com/grafana/dskit digest to 931a021 ([#14032](https://github.com/grafana/loki/issues/14032)) ([7c18642](https://github.com/grafana/loki/commit/7c186425210f892d34a2ccf8ad23b475af8bf9b9)). +- **deps:** update github.com/grafana/dskit digest to b69ac1b ([#14355](https://github.com/grafana/loki/issues/14355)) ([9d7a6ea](https://github.com/grafana/loki/commit/9d7a6ea68053b576553e426d339961d50ee07080)). +- **deps:** update github.com/grafana/dskit digest to f52de24 ([#14319](https://github.com/grafana/loki/issues/14319)) ([a4f3edf](https://github.com/grafana/loki/commit/a4f3edfb52ad4a44a17aaeb753a780b08d6b552c)). +- **deps:** update github.com/twmb/franz-go/pkg/kfake digest to cea7aa5 ([#14590](https://github.com/grafana/loki/issues/14590)) ([688c42a](https://github.com/grafana/loki/commit/688c42a971589be96921ce362c7fc6792368c3da)). +- **deps:** update k8s.io/utils digest to 702e33f ([#14033](https://github.com/grafana/loki/issues/14033)) ([b7eecc7](https://github.com/grafana/loki/commit/b7eecc7a693e96f4d0fe0dcd7583ecdc4dd7283f)). +- **deps:** Update module cloud.google.com/go/bigtable to v1.33.0 ([#14580](https://github.com/grafana/loki/issues/14580)) ([a0920ed](https://github.com/grafana/loki/commit/a0920ed9929080926f0f439182cb2428e938c208)). +- **deps:** Update module cloud.google.com/go/pubsub to v1.45.0 ([#14361](https://github.com/grafana/loki/issues/14361)) ([4351238](https://github.com/grafana/loki/commit/4351238305a680852b6b29a7cdaef69e46042ee4)). +- **deps:** Update module cloud.google.com/go/pubsub to v1.45.1 ([#14650](https://github.com/grafana/loki/issues/14650)) ([f173708](https://github.com/grafana/loki/commit/f17370867b70f65528d98fbfe751d079b5909be0)). +- **deps:** Update module cloud.google.com/go/storage to v1.46.0 ([#14744](https://github.com/grafana/loki/issues/14744)) ([8e45116](https://github.com/grafana/loki/commit/8e451165add426e480b2e691c7c69252d98a2d22)). +- **deps:** Update module github.com/alicebob/miniredis/v2 to v2.33.0 ([#14721](https://github.com/grafana/loki/issues/14721)) ([7bfda25](https://github.com/grafana/loki/commit/7bfda259721c2b3858066ab71d9df09ad35895a6)). +- **deps:** Update module github.com/aws/aws-sdk-go to v1.55.5 ([#14715](https://github.com/grafana/loki/issues/14715)) ([03f0f5a](https://github.com/grafana/loki/commit/03f0f5ab1691550eea59431c9c580530c13bf259)). +- **deps:** Update module github.com/axiomhq/hyperloglog to v0.2.0 ([#14722](https://github.com/grafana/loki/issues/14722)) ([0167b22](https://github.com/grafana/loki/commit/0167b22ac6d4886a1c3157437a3c5b19e327723a)). +- **deps:** Update module github.com/baidubce/bce-sdk-go to v0.9.189 ([#14044](https://github.com/grafana/loki/issues/14044)) ([7fb34b4](https://github.com/grafana/loki/commit/7fb34b4884269e7dad7cfa27969f470d9466279d)). +- **deps:** Update module github.com/baidubce/bce-sdk-go to v0.9.192 ([#14337](https://github.com/grafana/loki/issues/14337)) ([6f7cae2](https://github.com/grafana/loki/commit/6f7cae2a7aae471c8161bd1e596a31fa89c48ae1)). +- **deps:** Update module github.com/baidubce/bce-sdk-go to v0.9.196 ([#14651](https://github.com/grafana/loki/issues/14651)) ([478085a](https://github.com/grafana/loki/commit/478085ae02a0df3b2455211326519dd4aef26499)). +- **deps:** Update module github.com/baidubce/bce-sdk-go to v0.9.197 ([#14682](https://github.com/grafana/loki/issues/14682)) ([b898294](https://github.com/grafana/loki/commit/b89829421ee3a4589efe34a4b1332fe659c9d8e7)). +- **deps:** Update module github.com/coder/quartz to v0.1.2 ([#14652](https://github.com/grafana/loki/issues/14652)) ([7459e07](https://github.com/grafana/loki/commit/7459e07adb6aac48b305d50582eac915ea26528e)). +- **deps:** Update module github.com/felixge/fgprof to v0.9.5 ([#14338](https://github.com/grafana/loki/issues/14338)) ([a2ad3aa](https://github.com/grafana/loki/commit/a2ad3aa66940faae4fef7f92aab5a383f576190e)). +- **deps:** Update module github.com/fsouza/fake-gcs-server to v1.50.2 ([#14313](https://github.com/grafana/loki/issues/14313)) ([275c97c](https://github.com/grafana/loki/commit/275c97cec7f70e68c56192c565d53a6c2a18ff78)). +- **deps:** Update module github.com/hashicorp/raft to v1.7.1 ([#14005](https://github.com/grafana/loki/issues/14005)) ([e9cec1d](https://github.com/grafana/loki/commit/e9cec1d159b02977b6104e0006902e0d6b805527)). +- **deps:** Update module github.com/ibm/go-sdk-core/v5 to v5.17.5 ([#14045](https://github.com/grafana/loki/issues/14045)) ([677d217](https://github.com/grafana/loki/commit/677d217533b7d2338e25a8b9b9e8a78045489e7c)). +- **deps:** Update module github.com/ibm/go-sdk-core/v5 to v5.18.1 ([#14716](https://github.com/grafana/loki/issues/14716)) ([8395acd](https://github.com/grafana/loki/commit/8395acd0cbd3db9c6f330bd94a22b194fad35a93)). +- **deps:** Update module github.com/ibm/ibm-cos-sdk-go to v1.11.1 ([#14342](https://github.com/grafana/loki/issues/14342)) ([aa82a7c](https://github.com/grafana/loki/commit/aa82a7c804edd6df99d3fddc581d02c3b7fa6774)). +- **deps:** Update module github.com/klauspost/compress to v1.17.10 ([#14352](https://github.com/grafana/loki/issues/14352)) ([e23c5ed](https://github.com/grafana/loki/commit/e23c5ed9fa97010ef4c985afea25af3922ca215b)). +- **deps:** Update module github.com/minio/minio-go/v7 to v7.0.76 ([#14006](https://github.com/grafana/loki/issues/14006)) ([51f9376](https://github.com/grafana/loki/commit/51f937684795982f0d234ab251017ce2c86c9e20)). +- **deps:** Update module github.com/minio/minio-go/v7 to v7.0.77 ([#14353](https://github.com/grafana/loki/issues/14353)) ([d0e3ef7](https://github.com/grafana/loki/commit/d0e3ef709a222821fd764f6af72308c302faefb3)). +- **deps:** Update module github.com/minio/minio-go/v7 to v7.0.80 ([#14654](https://github.com/grafana/loki/issues/14654)) ([eec2513](https://github.com/grafana/loki/commit/eec25130468eb648c4667361cae7630449af7ef5)). +- **deps:** Update module github.com/ncw/swift/v2 to v2.0.3 ([#14356](https://github.com/grafana/loki/issues/14356)) ([c843288](https://github.com/grafana/loki/commit/c8432887d3d4459ad4bc40deba3a3a3726a2f5eb)). +- **deps:** Update module github.com/prometheus/client_golang to v1.20.5 ([#14655](https://github.com/grafana/loki/issues/14655)) ([e12f843](https://github.com/grafana/loki/commit/e12f8436b4080db54c6d31c6af38416c6fdd7eb4)). +- **deps:** Update module github.com/schollz/progressbar/v3 to v3.17.0 ([#14720](https://github.com/grafana/loki/issues/14720)) ([4419d0f](https://github.com/grafana/loki/commit/4419d0f33e9f4f6f9305d89dd6f2ca47e3a18d8c)). +- **deps:** Update module github.com/shirou/gopsutil/v4 to v4.24.10 ([#14719](https://github.com/grafana/loki/issues/14719)) ([3280376](https://github.com/grafana/loki/commit/32803762781c53ec3fe1bdb64841eb24aeed48f5)). +- **deps:** Update module github.com/shirou/gopsutil/v4 to v4.24.9 ([#14357](https://github.com/grafana/loki/issues/14357)) ([c8e6a9d](https://github.com/grafana/loki/commit/c8e6a9d38f36ccf1f32e634765bb2363628f3710)). +- **deps:** Update module github.com/shopify/sarama to v1.43.3 ([#14059](https://github.com/grafana/loki/issues/14059)) ([1cf4813](https://github.com/grafana/loki/commit/1cf48131d42db7302d6bcf980c355b018fcedb06)). +- **deps:** Update module github.com/spf13/afero to v1.11.0 ([#14060](https://github.com/grafana/loki/issues/14060)) ([bbbd82b](https://github.com/grafana/loki/commit/bbbd82bc73322d662ba81efeda3884efcdc09708)). +- **deps:** Update module go.etcd.io/bbolt to v1.3.11 ([#14358](https://github.com/grafana/loki/issues/14358)) ([b7bccfc](https://github.com/grafana/loki/commit/b7bccfcec3275b1d6d76c7450415ac8744e4d7b0)). +- **deps:** Update module golang.org/x/net to v0.29.0 ([#14341](https://github.com/grafana/loki/issues/14341)) ([1b6b9da](https://github.com/grafana/loki/commit/1b6b9da4e126738037e24d09309b62eac7d54a10)). +- **deps:** Downgrade grpc to fix regression ([#14065](https://github.com/grafana/loki/issues/14065)) ([8c38d46](https://github.com/grafana/loki/commit/8c38d462f5a057497ab222d463223400f2e7b4ab)). +- **deps:** Fix missing dep PartitionRing for Ingester ([#14292](https://github.com/grafana/loki/issues/14292)) ([6354ded](https://github.com/grafana/loki/commit/6354deda90a9430856447e27123b3a33fd1b77a0)). +- **deps:** nix build, downgrade toolchain to go1.23.1 ([#14442](https://github.com/grafana/loki/issues/14442)) ([26dfd62](https://github.com/grafana/loki/commit/26dfd628f0effe2367420f591da36727ebe78806)). +- **deps:** Revert "fix(deps): Update module github.com/shirou/gopsutil/v4 to v4.24.9 ([#14357](https://github.com/grafana/loki/issues/14357))" ([#14437](https://github.com/grafana/loki/issues/14437)) ([d53955b](https://github.com/grafana/loki/commit/d53955bbff5abae63a166099cef1f26b450a31f1)). +- **detected_fields:** Return parsed labels when parsers are passed ([#14047](https://github.com/grafana/loki/issues/14047)) ([aa1ac99](https://github.com/grafana/loki/commit/aa1ac99f4d369c87fd0db4fcf853ebce534e3500)). +- **detected_fields:** Always return empty array as `null` ([#14112](https://github.com/grafana/loki/issues/14112)) ([93009d4](https://github.com/grafana/loki/commit/93009d4e8ce520a3925bf5c0baff940db6c9caba)). +- **docs:** Correct OTLP documentation typo ([#14602](https://github.com/grafana/loki/issues/14602)) ([063c590](https://github.com/grafana/loki/commit/063c590faa4aa30540572c5d6fdc1da8a6a25ee4)). +- **distributor:** Always write detected_level when enabled, even if unknown ([#14464](https://github.com/grafana/loki/issues/14464)) ([41c6b6c](https://github.com/grafana/loki/commit/41c6b6c2c2f5f56ca76cf75ed05689564b9e9dcd)). +- **distributor:** Validate partition ring is kafka is enabled ([#14303](https://github.com/grafana/loki/issues/14303)) ([8438d41](https://github.com/grafana/loki/commit/8438d415931f0a3763d551eb36c3d9f476f70713)). +- **distributor:** Level detection for warning level ([#14444](https://github.com/grafana/loki/issues/14444)) ([242a852](https://github.com/grafana/loki/commit/242a852d7d471351ea294fc09e2b5dc62eec0d03)). +- **explore logs:** Correctly create logfmt string ([#14124](https://github.com/grafana/loki/issues/14124)) ([63e84b4](https://github.com/grafana/loki/commit/63e84b476a9a7b97a121847659172fadbb8a1eee)). +- **explore logs:** Allow any level for aggregated metrics ([#14255](https://github.com/grafana/loki/issues/14255)) ([c001a1d](https://github.com/grafana/loki/commit/c001a1d93af5438fef521460dcba650b44629a93)). +- **helm:** Add missing `loki.storage.azure.chunkDelimiter` parameter to Helm chart ([#14011](https://github.com/grafana/loki/issues/14011)) ([08c70cc](https://github.com/grafana/loki/commit/08c70cca2e7b3a7444b0ec9822a6d5fd58ae70d5)). +- **helm:** Check for `rbac.namespaced` condition before creating roles ([#14201](https://github.com/grafana/loki/issues/14201)) ([3f47f09](https://github.com/grafana/loki/commit/3f47f09a6956719480677f6af02f58394d7f26bb)). +- **helm:** Fix persistence configuration for Memcached ([#14049](https://github.com/grafana/loki/issues/14049)) ([ee6e1cf](https://github.com/grafana/loki/commit/ee6e1cf78864ad3ed915056f695e1f556cc4a22e)). +- **helm:** Fix wrong port name referenced for ingress NetworkPolicy ([#12907](https://github.com/grafana/loki/issues/12907)) ([963a25b](https://github.com/grafana/loki/commit/963a25bf417bbd4171c4d9a2b501330fd663410f)). +- **helm:** Various fixes and enhancements for bloom components ([#14128](https://github.com/grafana/loki/issues/14128)) ([dc0cbd4](https://github.com/grafana/loki/commit/dc0cbd42dcb8e53152573f0baf03ad93aa0d3cd8)). +- **ingester:** Report PSRL error message correctly ([#14187](https://github.com/grafana/loki/issues/14187)) ([a475153](https://github.com/grafana/loki/commit/a47515300a5cfac667eca1ca8e8d1a71e590b7d2)). +- **ingester:** Wait for OwnedStreams service in Ingester startup ([#14208](https://github.com/grafana/loki/issues/14208)) ([a4aee4f](https://github.com/grafana/loki/commit/a4aee4f4ff494b525f68c9c6c1ae3417a8e61ebe)). +- **lambda-promtail:** Add s3 principal to iam policy attached to sqs in lambda-promtail terraform code ([#14619](https://github.com/grafana/loki/issues/14619)) ([db0889e](https://github.com/grafana/loki/commit/db0889e2748b69a5c60d044dfab44bc652f1464d)). +- **logcli:** Check for errors before checking for `exists` when fetching data (backport k227) ([#14906](https://github.com/grafana/loki/issues/14906)) ([31b2a63](https://github.com/grafana/loki/commit/31b2a63ee23098fbd0151ef93020bd1cac093afe)). +- **logcli:** Create new tail response for every line ([#14525](https://github.com/grafana/loki/issues/14525)) ([bcfd0d1](https://github.com/grafana/loki/commit/bcfd0d1ad1c72c6c3861c8263989f2ce683eee08)). +- **logql:** Fix panic in json parsing when using empty array index ([#14393](https://github.com/grafana/loki/issues/14393)) ([833bf0d](https://github.com/grafana/loki/commit/833bf0def6a07e2f58996f54b4b983858750e3e3)). +- **logql:** Update JSONExpressionParser to not unescape extracted values if it is JSON object. ([#14499](https://github.com/grafana/loki/issues/14499)) ([08b1a90](https://github.com/grafana/loki/commit/08b1a9080b03bc041471f1ef72c4e3d7c6aea4f4)). +- **logql:** Allow rename of structured metadata labels ([#13955](https://github.com/grafana/loki/issues/13955)) ([2d4792a](https://github.com/grafana/loki/commit/2d4792a54fb52caa5cd904a17349b04410fae4c0)). +- **logql:** Renamed label should use ParsedLabel category ([#14515](https://github.com/grafana/loki/issues/14515)) ([82fb2f0](https://github.com/grafana/loki/commit/82fb2f0ae2403686b55fdb2fd5be248f706eddab)). +- **logql:** Skipping label if it contains special symbol ([#14068](https://github.com/grafana/loki/issues/14068)) ([55e374e](https://github.com/grafana/loki/commit/55e374e85e7275da8f40d1149defd88f31856f25)). +- **mixin:** Remove pod label from disk usage aggregation ([#14180](https://github.com/grafana/loki/issues/14180)) ([5d45c96](https://github.com/grafana/loki/commit/5d45c96ce12f7f16c21e61db1a78e94a09c16007)). +- **mixins:** Fix loki-resources-overview panel layout ([#14178](https://github.com/grafana/loki/issues/14178)) ([8f54ec6](https://github.com/grafana/loki/commit/8f54ec65881bcad90078464d663af9110ef72603)). +- **mixins:** Add backend path section in loki-operational for single scalable deployment ([#13023](https://github.com/grafana/loki/issues/13023)) ([16881ab](https://github.com/grafana/loki/commit/16881ab0d3b9e9e6bfc37f22ff69f5f1019a0df1)). +- **mixins:** Disk space utilization panels with latest KSM versions ([#13486](https://github.com/grafana/loki/issues/13486)) ([0ea7431](https://github.com/grafana/loki/commit/0ea7431139ae0a18ef4e90bed836a7a6b92ab890)). +- **mixins:** Retention dashboards fix metric name ([#14617](https://github.com/grafana/loki/issues/14617)) ([c762b9b](https://github.com/grafana/loki/commit/c762b9b5d3877e7cbfc41d8ab9a1a4287ebe97b2)). +- **oltp:** Enable service detection for otlp endpoint ([#14036](https://github.com/grafana/loki/issues/14036)) ([4f962ef](https://github.com/grafana/loki/commit/4f962ef7af250fc347dbed15583787d0238f6e9f)). +- **operator:** Add 1x.pico OpenShift UI dropdown menu ([#14660](https://github.com/grafana/loki/issues/14660)) ([4687f37](https://github.com/grafana/loki/commit/4687f377db0a7ae07ffdea354582c882c10b72c4)). +- **operator:** Add missing groupBy label for all rules on OpenShift ([#14279](https://github.com/grafana/loki/issues/14279)) ([ce7b2e8](https://github.com/grafana/loki/commit/ce7b2e89d9470e4e6a61a94f2b51ff8b938b5a5e)). +- **operator:** Correctly ignore again BlotDB dashboards ([#14587](https://github.com/grafana/loki/issues/14587)) ([4879d10](https://github.com/grafana/loki/commit/4879d106bbeea29e331ddb7c9a49274600190032)). +- **operator:** Disable automatic discovery of service name ([#14506](https://github.com/grafana/loki/issues/14506)) ([3834c74](https://github.com/grafana/loki/commit/3834c74966b307411732cd3cbaf66305008b10eb)). +- **operator:** Disable log level discovery for OpenShift tenancy modes ([#14613](https://github.com/grafana/loki/issues/14613)) ([5034d34](https://github.com/grafana/loki/commit/5034d34ad23451954ea2459c341456da8d93d020)). +- **operator:** Fix building the size-calculator image ([#14573](https://github.com/grafana/loki/issues/14573)) ([a79b8fe](https://github.com/grafana/loki/commit/a79b8fe7802964cbb96bde75a7502a8b1e8a23ab)). +- **operator:** Fix make build target for size-calculator ([#14551](https://github.com/grafana/loki/issues/14551)) ([e727187](https://github.com/grafana/loki/commit/e727187ec3be2f10c80e984d00c40dad0308b036)). +- **operator:** Move OTLP attribute for statefulset name to stream labels ([#14630](https://github.com/grafana/loki/issues/14630)) ([5df3594](https://github.com/grafana/loki/commit/5df3594f791d77031c53d7b0f5b01191de8a23f2)). +- **operator:** Use empty initialized pod status map when no pods ([#14314](https://github.com/grafana/loki/issues/14314)) ([6f533ed](https://github.com/grafana/loki/commit/6f533ed4386ee2db61680a9021934bfe9a9ba749)). +- **operator:** Update renovate ignore for operator API with new module path ([#14581](https://github.com/grafana/loki/issues/14581)) ([c9b2907](https://github.com/grafana/loki/commit/c9b2907f3c97cf0a14837c0b27cad7a06d84f447)). +- **pattern:** Fixes latency metric namespace for tee to pattern ([#14241](https://github.com/grafana/loki/issues/14241)) ([ae955ed](https://github.com/grafana/loki/commit/ae955ed30d841675dbb9e30327b84728050e724a)). +- **promtail:** Fix configuration unmarshalling ([#14408](https://github.com/grafana/loki/issues/14408)) ([a05431f](https://github.com/grafana/loki/commit/a05431f879a8c29fac6356b6c46be62133c3e93c)). +- **promtail:** Fix parser for azureeventhubs message without time field ([#14218](https://github.com/grafana/loki/issues/14218)) ([2e62abb](https://github.com/grafana/loki/commit/2e62abbf47c47041027baf240722b3d76e7bd9a3)). +- **promtail:** Validate scrape_config job name, do not allow duplicate job names ([#13719](https://github.com/grafana/loki/issues/13719)) ([f2d3499](https://github.com/grafana/loki/commit/f2d349924c2aa0453e49fc607603a189108666ec)). +- **querier:** Propagate query stats from quantile & topk queries ([#13831](https://github.com/grafana/loki/issues/13831)) ([78b275b](https://github.com/grafana/loki/commit/78b275bf1092d834065315207666d6fd1c505f06)). +- **querier:** Correct _extracted logic in detected fields ([#14064](https://github.com/grafana/loki/issues/14064)) ([1b3ba53](https://github.com/grafana/loki/commit/1b3ba530b8fab9aac999387a135a76a62de3e000)). +- **querier:** Timestamp. Time should be called with milliseconds ([#14196](https://github.com/grafana/loki/issues/14196)) ([f8d9143](https://github.com/grafana/loki/commit/f8d9143eead92d8727053e065c2d3403f689e4b5)). +- **querier:** Report correct status code for metric and log queries in metrics.go ([#12102](https://github.com/grafana/loki/issues/12102)) ([900751c](https://github.com/grafana/loki/commit/900751c3bb008c50441c47eef3927a27201b1a11)). +- **query:** Fix bug in query result marshaling for invalid utf8 characters ([#14585](https://github.com/grafana/loki/issues/14585)) ([f411a07](https://github.com/grafana/loki/commit/f411a0795af67630a0a70a88ce64fa071de50a56)). +- Remove usage of unsafe string in label adapter unmarshall ([#14216](https://github.com/grafana/loki/issues/14216)) ([758364c](https://github.com/grafana/loki/commit/758364c7775fba22a84498089a476c21f737d32f)). +- Rename misspelled filename ([#14237](https://github.com/grafana/loki/issues/14237)) ([cf1d4a3](https://github.com/grafana/loki/commit/cf1d4a31af5c376e82756eaaab267369f862265d)). +- **sharding:** Apply offset to both `from` and `through` in shard request ([#14256](https://github.com/grafana/loki/issues/14256)) ([17c472d](https://github.com/grafana/loki/commit/17c472d9abea6b1cae21de5fe2af8b365bdaf137)). +- **storage:** Add additional validation for timeout while retrieving headers ([#14217](https://github.com/grafana/loki/issues/14217)) ([8322e51](https://github.com/grafana/loki/commit/8322e518e68de286b2bc58cf15ea9fe947eeec86)). +- **storage:** Do not retain span logger created with index set initialized at query time ([#14027](https://github.com/grafana/loki/issues/14027)) ([4e41744](https://github.com/grafana/loki/commit/4e4174400fba410b9f32e0e43c1d866d283a9e62)). +- **storage:** Expand matching for additional variations ([#14221](https://github.com/grafana/loki/issues/14221)) ([71d7291](https://github.com/grafana/loki/commit/71d7291c9c00c3887d9a509991eb4d3e15ae8699)). +- **storage:** Disable client retries when congestion control is enabled in S3([#14588](https://github.com/grafana/loki/issues/14588)) ([cff9f43](https://github.com/grafana/loki/commit/cff9f43dd6fb5e90c875c14c138ea39b58202dff)). +- **storage:** Have GetObject check for canceled context ([#14420](https://github.com/grafana/loki/issues/14420)) ([5f325aa](https://github.com/grafana/loki/commit/5f325aac56e41848979e9e33a4a443e31ea525d0)). +- **storage:** Transform `ObjectExistsWithSize` into `GetAttributes` ([#14329](https://github.com/grafana/loki/issues/14329)) ([2f56f50](https://github.com/grafana/loki/commit/2f56f50cc6591ca482358933c719d005446d0c01)). +- **storage:** Update AWS storage timeout error for Go 1.23 behavior ([#14226](https://github.com/grafana/loki/issues/14226)) ([a4642b5](https://github.com/grafana/loki/commit/a4642b55e9b374ccd974b662e7b17a2389c3dcbd)). diff --git a/docs/sources/send-data/otel/_index.md b/docs/sources/send-data/otel/_index.md index 176cd0afd410..ce0b1de64483 100644 --- a/docs/sources/send-data/otel/_index.md +++ b/docs/sources/send-data/otel/_index.md @@ -79,6 +79,7 @@ Since the OpenTelemetry protocol differs from the Loki storage model, here is ho - cloud.region - container.name - deployment.environment + - deployment.environment.name - k8s.cluster.name - k8s.container.name - k8s.cronjob.name diff --git a/docs/sources/send-data/promtail/cloud/eks/values.yaml b/docs/sources/send-data/promtail/cloud/eks/values.yaml index 5d10d87dedca..8d8fcb7b87d2 100644 --- a/docs/sources/send-data/promtail/cloud/eks/values.yaml +++ b/docs/sources/send-data/promtail/cloud/eks/values.yaml @@ -17,7 +17,7 @@ initContainer: image: repository: grafana/promtail - tag: 1.6.1 + tag: 3.3.0 pullPolicy: IfNotPresent ## Optionally specify an array of imagePullSecrets. ## Secrets must be manually created in the namespace. diff --git a/docs/sources/setup/install/helm/reference.md b/docs/sources/setup/install/helm/reference.md index 028843f283dc..5dfd74ea279d 100644 --- a/docs/sources/setup/install/helm/reference.md +++ b/docs/sources/setup/install/helm/reference.md @@ -82,7 +82,8 @@ This is the generated reference for the Loki Helm Chart values. "type": "RollingUpdate" }, "terminationGracePeriodSeconds": 60, - "tolerations": [] + "tolerations": [], + "topologySpreadConstraints": [] } @@ -280,6 +281,15 @@ This is the generated reference for the Loki Helm Chart values.
 []
 
+ + + + adminApi.topologySpreadConstraints + list + Topology Spread Constraints for admin-api pods +
+[]
+
@@ -3052,7 +3062,7 @@ null "pullPolicy": "IfNotPresent", "registry": "docker.io", "repository": "grafana/enterprise-logs", - "tag": null + "tag": "3.3.0" }, "license": { "contents": "NOTAVALIDLICENSE" @@ -3225,7 +3235,7 @@ null string Docker image tag
-null
+"3.3.0"
 
@@ -6080,7 +6090,7 @@ null string Overrides the image tag whose default is the chart's appVersion
-null
+"3.3.0"
 
diff --git a/docs/sources/setup/upgrade/_index.md b/docs/sources/setup/upgrade/_index.md index ae4a80e8909a..dbfb98dad624 100644 --- a/docs/sources/setup/upgrade/_index.md +++ b/docs/sources/setup/upgrade/_index.md @@ -76,6 +76,18 @@ Their YAML counterparts in the `limits_config` block are kept identical. All other CLI arguments (and their YAML counterparts) prefixed with `-bloom-compactor.` have been removed. + +## 3.3.0 + +### Loki + +#### Experimental Bloom Filters + +With Loki 3.3.0, the bloom block format changed and any previously created block is incompatible with the new format. +Before upgrading, we recommend deleting all the existing bloom blocks in the object store. We store bloom blocks and +metas inside the `bloom` path in the configured object store. To get rid of all the bloom blocks, delete all the objects +inside the `bloom` path in the object store. + ## 3.0.0 {{< admonition type="note" >}} diff --git a/docs/sources/shared/configuration.md b/docs/sources/shared/configuration.md index ec2fc098afba..40ee8f681fc1 100644 --- a/docs/sources/shared/configuration.md +++ b/docs/sources/shared/configuration.md @@ -137,6 +137,57 @@ Pass the `-config.expand-env` flag at the command line to enable this way of set # itself to a key value store. [ingester: ] +block_builder: + # How many flushes can happen concurrently + # CLI flag: -blockbuilder.concurrent-flushes + [concurrent_flushes: | default = 1] + + # How many workers to process writes, defaults to number of available cpus + # CLI flag: -blockbuilder.concurrent-writers + [concurrent_writers: | default = 1] + + # The targeted _uncompressed_ size in bytes of a chunk block When this + # threshold is exceeded the head block will be cut and compressed inside the + # chunk. + # CLI flag: -blockbuilder.chunks-block-size + [chunk_block_size: | default = 256KB] + + # A target _compressed_ size in bytes for chunks. This is a desired size not + # an exact size, chunks may be slightly bigger or significantly smaller if + # they get flushed for other reasons (e.g. chunk_idle_period). A value of 0 + # creates chunks with a fixed 10 blocks, a non zero value will create chunks + # with a variable number of blocks to meet the target size. + # CLI flag: -blockbuilder.chunk-target-size + [chunk_target_size: | default = 1536KB] + + # The algorithm to use for compressing chunk. (none, gzip, lz4-64k, snappy, + # lz4-256k, lz4-1M, lz4, flate, zstd) + # CLI flag: -blockbuilder.chunk-encoding + [chunk_encoding: | default = "snappy"] + + # The maximum duration of a timeseries chunk in memory. If a timeseries runs + # for longer than this, the current chunk will be flushed to the store and a + # new chunk created. + # CLI flag: -blockbuilder.max-chunk-age + [max_chunk_age: | default = 2h] + + # The interval at which to run. + # CLI flag: -blockbuilder.interval + [interval: | default = 10m] + + backoff_config: + # Minimum delay when backing off. + # CLI flag: -blockbuilder.backoff..backoff-min-period + [min_period: | default = 100ms] + + # Maximum delay when backing off. + # CLI flag: -blockbuilder.backoff..backoff-max-period + [max_period: | default = 10s] + + # Number of times to backoff and retry before failing. + # CLI flag: -blockbuilder.backoff..backoff-retries + [max_retries: | default = 10] + pattern_ingester: # Whether the pattern ingester is enabled. # CLI flag: -pattern-ingester.enabled @@ -2264,7 +2315,7 @@ write_failures_logging: otlp_config: # List of default otlp resource attributes to be picked as index labels # CLI flag: -distributor.otlp.default_resource_attributes_as_index_labels - [default_resource_attributes_as_index_labels: | default = [service.name service.namespace service.instance.id deployment.environment cloud.region cloud.availability_zone k8s.cluster.name k8s.namespace.name k8s.pod.name k8s.container.name container.name k8s.replicaset.name k8s.deployment.name k8s.statefulset.name k8s.daemonset.name k8s.cronjob.name k8s.job.name]] + [default_resource_attributes_as_index_labels: | default = [service.name service.namespace service.instance.id deployment.environment deployment.environment.name cloud.region cloud.availability_zone k8s.cluster.name k8s.namespace.name k8s.pod.name k8s.container.name container.name k8s.replicaset.name k8s.deployment.name k8s.statefulset.name k8s.daemonset.name k8s.cronjob.name k8s.job.name]] # Enable writes to Kafka during Push requests. # CLI flag: -distributor.kafka-writes-enabled @@ -3765,6 +3816,10 @@ shard_streams: # CLI flag: -bloom-build.block-encoding [bloom_block_encoding: | default = "none"] +# Experimental. Prefetch blocks on bloom gateways as soon as they are built. +# CLI flag: -bloom-build.prefetch-blocks +[bloom_prefetch_blocks: | default = false] + # Experimental. The maximum bloom block size. A value of 0 sets an unlimited # size. Default is 200MB. The actual block size might exceed this limit since # blooms will be added to blocks until the block exceeds the maximum block size. diff --git a/go.mod b/go.mod index bab9fd32ca61..21e9f82c1917 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( cloud.google.com/go/bigtable v1.33.0 cloud.google.com/go/pubsub v1.45.1 cloud.google.com/go/storage v1.47.0 + dario.cat/mergo v1.0.1 github.com/Azure/azure-pipeline-go v0.2.3 github.com/Azure/azure-storage-blob-go v0.15.0 github.com/Azure/go-autorest/autorest/adal v0.9.24 @@ -16,11 +17,12 @@ require ( github.com/Masterminds/sprig/v3 v3.3.0 github.com/NYTimes/gziphandler v1.1.1 github.com/Workiva/go-datastructures v1.1.5 + github.com/alecthomas/kingpin/v2 v2.4.0 github.com/alicebob/miniredis/v2 v2.33.0 github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible github.com/aws/aws-sdk-go v1.55.5 github.com/baidubce/bce-sdk-go v0.9.200 - github.com/bmatcuk/doublestar v1.3.4 + github.com/bmatcuk/doublestar/v4 v4.7.1 github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 github.com/cespare/xxhash/v2 v2.3.0 github.com/containerd/fifo v1.1.0 @@ -38,8 +40,7 @@ require ( github.com/fsouza/fake-gcs-server v1.50.2 github.com/go-kit/log v0.2.1 github.com/go-logfmt/logfmt v0.6.0 - github.com/go-redis/redis/v8 v8.11.5 - github.com/gocql/gocql v0.0.0-20200526081602-cd04bd7f22a7 + github.com/gocql/gocql v1.7.0 github.com/gogo/protobuf v1.3.2 // remember to update loki-build-image/Dockerfile too github.com/gogo/status v1.1.1 github.com/golang/protobuf v1.5.4 @@ -55,11 +56,10 @@ require ( github.com/grafana/gomemcache v0.0.0-20240229205252-cd6a66d6fb56 github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc github.com/grafana/tail v0.0.0-20230510142333-77b18831edf0 - github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 + github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 github.com/hashicorp/consul/api v1.30.0 - github.com/hashicorp/golang-lru v0.6.0 - github.com/imdario/mergo v0.3.16 + github.com/hashicorp/golang-lru/v2 v2.0.7 github.com/influxdata/telegraf v1.16.3 github.com/jmespath/go-jmespath v0.4.0 github.com/joncrlsn/dque v0.0.0-20211108142734-c2ef48c5192a @@ -86,10 +86,11 @@ require ( github.com/prometheus/client_model v0.6.1 github.com/prometheus/common v0.60.1 github.com/prometheus/prometheus v0.53.2-0.20240726125539-d4f098ae80fb + github.com/redis/go-redis/v9 v9.7.0 github.com/segmentio/fasthash v1.0.3 github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 - github.com/sony/gobreaker v0.5.0 + github.com/sony/gobreaker/v2 v2.0.0 github.com/spf13/afero v1.11.0 github.com/stretchr/testify v1.9.0 github.com/uber/jaeger-client-go v2.30.0+incompatible @@ -102,12 +103,11 @@ require ( golang.org/x/sync v0.9.0 golang.org/x/sys v0.27.0 golang.org/x/time v0.8.0 - google.golang.org/api v0.206.0 + google.golang.org/api v0.208.0 google.golang.org/grpc v1.68.0 - gopkg.in/alecthomas/kingpin.v2 v2.2.6 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 - k8s.io/klog v1.0.0 + k8s.io/klog/v2 v2.130.1 ) require ( @@ -118,7 +118,6 @@ require ( github.com/IBM/ibm-cos-sdk-go v1.11.1 github.com/axiomhq/hyperloglog v0.2.0 github.com/buger/jsonparser v1.1.1 - github.com/coder/quartz v0.1.2 github.com/d4l3k/messagediff v1.2.1 github.com/dolthub/swiss v0.2.1 github.com/efficientgo/core v1.0.0-rc.3 @@ -126,7 +125,6 @@ require ( github.com/gogo/googleapis v1.4.1 github.com/grafana/jsonparser v0.0.0-20241004153430-023329977675 github.com/grafana/loki/pkg/push v0.0.0-20240924133635-758364c7775f - github.com/hashicorp/golang-lru/v2 v2.0.7 github.com/heroku/x v0.4.0 github.com/influxdata/tdigest v0.0.2-0.20210216194612-fc98d27c9e8b github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db @@ -144,15 +142,15 @@ require ( github.com/twmb/franz-go/plugin/kotel v1.5.0 github.com/twmb/franz-go/plugin/kprom v1.1.0 github.com/willf/bloom v2.0.3+incompatible - go.opentelemetry.io/collector/pdata v1.19.0 + go.opentelemetry.io/collector/pdata v1.20.0 go4.org/netipx v0.0.0-20230125063823-8449b0a6169f golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8 golang.org/x/oauth2 v0.24.0 golang.org/x/text v0.20.0 google.golang.org/protobuf v1.35.2 gotest.tools v2.2.0+incompatible - k8s.io/apimachinery v0.31.2 - k8s.io/utils v0.0.0-20240902221715-702e33fdd3c3 + k8s.io/apimachinery v0.31.3 + k8s.io/utils v0.0.0-20241104163129-6fe5fd82f078 ) require ( @@ -160,12 +158,11 @@ require ( cloud.google.com/go/auth v0.10.2 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.5 // indirect cloud.google.com/go/monitoring v1.21.2 // indirect - dario.cat/mergo v1.0.1 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.24.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.48.1 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.48.1 // indirect - github.com/dlclark/regexp2 v1.4.0 // indirect + github.com/dlclark/regexp2 v1.11.0 // indirect github.com/ebitengine/purego v0.8.1 // indirect github.com/fxamacker/cbor/v2 v2.7.0 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect @@ -173,6 +170,8 @@ require ( github.com/go-ole/go-ole v1.2.6 // indirect github.com/goccy/go-json v0.10.3 // indirect github.com/gorilla/handlers v1.5.2 // indirect + github.com/hashicorp/golang-lru v0.6.0 // indirect + github.com/imdario/mergo v0.3.16 // indirect github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/sys/userns v0.1.0 // indirect @@ -185,6 +184,7 @@ require ( github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/x448/float16 v0.8.4 // indirect + github.com/xhit/go-str2duration/v2 v2.1.0 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.29.0 // indirect go.opentelemetry.io/otel/sdk v1.29.0 // indirect @@ -214,8 +214,7 @@ require ( github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.3.0 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect - github.com/alecthomas/chroma v0.10.0 - github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/alecthomas/chroma/v2 v2.14.0 github.com/alecthomas/units v0.0.0-20240626203959-61d1e3462e30 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/armon/go-metrics v0.4.1 // indirect @@ -363,15 +362,14 @@ require ( golang.org/x/mod v0.19.0 // indirect golang.org/x/term v0.26.0 // indirect golang.org/x/tools v0.23.0 // indirect - google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28 // indirect + google.golang.org/genproto v0.0.0-20241113202542-65e8d215514f // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f // indirect gopkg.in/fsnotify/fsnotify.v1 v1.4.7 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect k8s.io/api v0.29.3 // indirect k8s.io/client-go v0.29.3 // indirect - k8s.io/klog/v2 v2.130.1 // indirect k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 // indirect rsc.io/binaryregexp v0.2.0 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect @@ -379,7 +377,7 @@ require ( sigs.k8s.io/yaml v1.4.0 // indirect ) -replace github.com/Azure/azure-sdk-for-go => github.com/Azure/azure-sdk-for-go v36.2.0+incompatible +replace github.com/Azure/azure-sdk-for-go => github.com/Azure/azure-sdk-for-go v68.0.0+incompatible replace github.com/Azure/azure-storage-blob-go => github.com/MasslessParticle/azure-storage-blob-go v0.14.1-0.20240322194317-344980fda573 diff --git a/go.sum b/go.sum index e84a71327fa2..5bb9b958b969 100644 --- a/go.sum +++ b/go.sum @@ -760,8 +760,8 @@ github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9a github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-pipeline-go v0.2.3 h1:7U9HBg1JFK3jHl5qmo4CTZKFTVgMwdFHMVtCdfBE21U= github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= -github.com/Azure/azure-sdk-for-go v36.2.0+incompatible h1:09cv2WoH0g6jl6m2iT+R9qcIPZKhXEL0sbmLhxP895s= -github.com/Azure/azure-sdk-for-go v36.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= +github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0 h1:GJHeeA2N7xrG3q30L2UXDyuWRzDM900/65j70wcM4Ww= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc= @@ -915,10 +915,15 @@ github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= -github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek= -github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= +github.com/alecthomas/assert/v2 v2.7.0 h1:QtqSACNS3tF7oasA8CU6A6sXZSBDqnm7RfpLl9bZqbE= +github.com/alecthomas/assert/v2 v2.7.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/chroma/v2 v2.14.0 h1:R3+wzpnUArGcQz7fCETQBzO5n9IMNi13iIs46aU4V9E= +github.com/alecthomas/chroma/v2 v2.14.0/go.mod h1:QolEbTfmUHIMVpBqxeDnNBj2uoeI4EbYP4i6n68SG4I= +github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY= +github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= +github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc= +github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -1025,13 +1030,17 @@ github.com/bitly/go-hostpool v0.1.0 h1:XKmsF6k5el6xHG3WPJ8U0Ku/ye7njX7W81Ng7O2io github.com/bitly/go-hostpool v0.1.0/go.mod h1:4gOCgp6+NZnVqlKyZ/iBZFTAJKembaVENUpMkpg42fw= github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw= github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0= -github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0= -github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= +github.com/bmatcuk/doublestar/v4 v4.7.1 h1:fdDeAqgT47acgwd9bd9HxJRDmc9UAmPpc+2m0CXv75Q= +github.com/bmatcuk/doublestar/v4 v4.7.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4= @@ -1092,8 +1101,6 @@ github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78/go.mod h1:W+zGtBO5Y1Ig github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/coder/quartz v0.1.2 h1:PVhc9sJimTdKd3VbygXtS4826EOCpB1fXoRlLnCrE+s= -github.com/coder/quartz v0.1.2/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/fifo v1.1.0 h1:4I2mbh5stb1u6ycIABlBw9zgtlK8viPI9QkQNRQEEmY= github.com/containerd/fifo v1.1.0/go.mod h1:bmC4NWMbXlt2EZ0Hc7Fx7QzTFxgPID13eH0Qu+MAb2o= @@ -1158,8 +1165,8 @@ github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= -github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= +github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnaeon/go-vcr v0.0.0-20180814043457-aafff18a5cc2/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= github.com/docker/distribution v2.6.0-rc.1.0.20170726174610-edc3ab29cdff+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= @@ -1494,8 +1501,6 @@ github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91 github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= -github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= -github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g= github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -1769,8 +1774,8 @@ github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:Fecb github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= -github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 h1:pRhl55Yx1eC7BZ1N+BBWwnKaMyD8uC+34TLdndZMAKk= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0/go.mod h1:XKMd7iuf/RGPSMJ/U4HP0zS2Z9Fh8Ps9a+6X26m/tmI= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -1931,6 +1936,8 @@ github.com/heroku/x v0.4.0 h1:Bd8DERAUIrxqQ8p2TAXEoUYIFHCvpP4UJYp1+o7KoPQ= github.com/heroku/x v0.4.0/go.mod h1:3Ji2zMA37qO4BK/4yzXvjlDIUdeXJvArUm2PB0ZEW5g= github.com/hetznercloud/hcloud-go/v2 v2.10.2 h1:9gyTUPhfNbfbS40Spgij5mV5k37bOZgt8iHKCbfGs5I= github.com/hetznercloud/hcloud-go/v2 v2.10.2/go.mod h1:xQ+8KhIS62W0D78Dpi57jsufWh844gUw1az5OUvaeq8= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= @@ -2448,6 +2455,8 @@ github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqn github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/renier/xmlrpc v0.0.0-20170708154548-ce4a1a486c03/go.mod h1:gRAiPF5C5Nd0eyyRdqIu9qTiFSoZzpTq727b5B8fkkU= @@ -2529,6 +2538,8 @@ github.com/soniah/gosnmp v1.25.0/go.mod h1:8YvfZxH388NIIw2A+X5z2Oh97VcNhtmxDLt5Q github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/sony/gobreaker v0.5.0 h1:dRCvqm0P490vZPmy7ppEk2qCnCieBooFJ+YoXGYB+yg= github.com/sony/gobreaker v0.5.0/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/sony/gobreaker/v2 v2.0.0 h1:23AaR4JQ65y4rz8JWMzgXw2gKOykZ/qfqYunll4OwJ4= +github.com/sony/gobreaker/v2 v2.0.0/go.mod h1:8JnRUz80DJ1/ne8M8v7nmTs2713i58nIt4s7XcGe/DI= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= @@ -2671,6 +2682,8 @@ github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2 github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= +github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= @@ -2736,8 +2749,8 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/collector/pdata v1.19.0 h1:jmnU5R8TOCbwRr4B8sjdRxM7L5WnEKlQWX1dtLYxIbE= -go.opentelemetry.io/collector/pdata v1.19.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= +go.opentelemetry.io/collector/pdata v1.20.0 h1:ePcwt4bdtISP0loHaE+C9xYoU2ZkIvWv89Fob16o9SM= +go.opentelemetry.io/collector/pdata v1.20.0/go.mod h1:Ox1YVLe87cZDB/TL30i4SUz1cA5s6AM6SpFMfY61ICs= go.opentelemetry.io/collector/semconv v0.105.0 h1:8p6dZ3JfxFTjbY38d8xlQGB1TQ3nPUvs+D0RERniZ1g= go.opentelemetry.io/collector/semconv v0.105.0/go.mod h1:yMVUCNoQPZVq/IPfrHrnntZTWsLf5YGZ7qwKulIl5hw= go.opentelemetry.io/contrib/detectors/gcp v1.29.0 h1:TiaiXB4DpGD3sdzNlYQxruQngn5Apwzi1X0DRhuGvDQ= @@ -2798,7 +2811,6 @@ go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9E go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= go4.org/netipx v0.0.0-20230125063823-8449b0a6169f h1:ketMxHg+vWm3yccyYiq+uK8D3fRmna2Fcj+awpQp84s= @@ -3193,7 +3205,6 @@ golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -3476,8 +3487,8 @@ google.golang.org/api v0.122.0/go.mod h1:gcitW0lvnyWjSp9nKxAbdHKIZ6vF4aajGueeslZ google.golang.org/api v0.124.0/go.mod h1:xu2HQurE5gi/3t1aFCvhPD781p0a3p11sdunTJ2BlP4= google.golang.org/api v0.125.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= -google.golang.org/api v0.206.0 h1:A27GClesCSheW5P2BymVHjpEeQ2XHH8DI8Srs2HI2L8= -google.golang.org/api v0.206.0/go.mod h1:BtB8bfjTYIrai3d8UyvPmV9REGgox7coh+ZRwm0b+W8= +google.golang.org/api v0.208.0 h1:8Y62MUGRviQnnP9/41/bYAGySPKAN9iwzV96ZvhwyVE= +google.golang.org/api v0.208.0/go.mod h1:I53S168Yr/PNDNMi5yPnDc0/LGRZO6o7PoEbl/HY3CM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -3635,8 +3646,8 @@ google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:xZnkP7mR google.golang.org/genproto v0.0.0-20230629202037-9506855d4529/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64= google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:O9kGHb51iE/nOGvQaDUuadVYqovW56s5emA88lQnj6Y= google.golang.org/genproto v0.0.0-20230711160842-782d3b101e98/go.mod h1:S7mY02OqCJTD0E1OiQy1F72PWFB4bZJ87cAtLPYgDR0= -google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28 h1:KJjNNclfpIkVqrZlTWcgOOaVQ00LdBnoEaRfkUx760s= -google.golang.org/genproto v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:mt9/MofW7AWQ+Gy179ChOnvmJatV8YHUmrcedo9CIFI= +google.golang.org/genproto v0.0.0-20241113202542-65e8d215514f h1:zDoHYmMzMacIdjNe+P2XiTmPsLawi/pCbSPfxt6lTfw= +google.golang.org/genproto v0.0.0-20241113202542-65e8d215514f/go.mod h1:Q5m6g8b5KaFFzsQFIGdJkSJDGeJiybVenoYFMMa3ohI= google.golang.org/genproto/googleapis/api v0.0.0-20230525234020-1aefcd67740a/go.mod h1:ts19tUU+Z0ZShN1y3aPyq2+O3d5FUNNgT6FtOzmrNn8= google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= @@ -3654,8 +3665,8 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:8mL13HKkDa+IuJ8yruA3ci0q+0vsUz4m//+ottjwS5o= google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 h1:XVhgTWWV3kGQlwJHR3upFWZeTsei6Oks1apkZSeonIE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f h1:C1QccEa9kUwvMgEUORqQD9S17QesQijxjZ84sO82mfo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -3737,7 +3748,6 @@ google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHh google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= -gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -3818,8 +3828,8 @@ k8s.io/api v0.29.3/go.mod h1:y2yg2NTyHUUkIoTC+phinTnEa3KFM6RZ3szxt014a80= k8s.io/apimachinery v0.17.1/go.mod h1:b9qmWdKlLuU9EBh+06BtLcSf/Mu89rWL33naRxs1uZg= k8s.io/apimachinery v0.22.2/go.mod h1:O3oNtNadZdeOMxHFVxOreoznohCpy0z6mocxbZr7oJ0= k8s.io/apimachinery v0.26.2/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= -k8s.io/apimachinery v0.31.2 h1:i4vUt2hPK56W6mlT7Ry+AO8eEsyxMD1U44NR22CLTYw= -k8s.io/apimachinery v0.31.2/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo= +k8s.io/apimachinery v0.31.3 h1:6l0WhcYgasZ/wk9ktLq5vLaoXJJr5ts6lkaQzgeYPq4= +k8s.io/apimachinery v0.31.3/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo= k8s.io/client-go v0.22.2/go.mod h1:sAlhrkVDf50ZHx6z4K0S40wISNTarf1r800F+RlCF6U= k8s.io/client-go v0.26.2/go.mod h1:u5EjOuSyBa09yqqyY7m3abZeovO/7D/WehVVlZ2qcqU= k8s.io/client-go v0.29.3 h1:R/zaZbEAxqComZ9FHeQwOh3Y1ZUs7FaHKZdQtIc2WZg= @@ -3828,7 +3838,6 @@ k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8 k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= @@ -3846,8 +3855,8 @@ k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/ k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20221107191617-1a15be271d1d/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -k8s.io/utils v0.0.0-20240902221715-702e33fdd3c3 h1:b2FmK8YH+QEwq/Sy2uAEhmqL5nPfGYbJOcaqjeYYZoA= -k8s.io/utils v0.0.0-20240902221715-702e33fdd3c3/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +k8s.io/utils v0.0.0-20241104163129-6fe5fd82f078 h1:jGnCPejIetjiy2gqaJ5V0NLwTpF4wbQ6cZIItJCSHno= +k8s.io/utils v0.0.0-20241104163129-6fe5fd82f078/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= diff --git a/integration/util/merger.go b/integration/util/merger.go index dbd88a4205fc..5458c1ce3f12 100644 --- a/integration/util/merger.go +++ b/integration/util/merger.go @@ -3,7 +3,7 @@ package util import ( "fmt" - "github.com/imdario/mergo" + "dario.cat/mergo" "gopkg.in/yaml.v2" ) diff --git a/nix/packages/loki.nix b/nix/packages/loki.nix index 6c4b7300fc82..ee161437786e 100644 --- a/nix/packages/loki.nix +++ b/nix/packages/loki.nix @@ -5,7 +5,7 @@ let pname = "lambda-promtail"; src = ./../../tools/lambda-promtail; - vendorHash = "sha256-m9nuCliQBhUo+B3K+x6c/9hOkr/iTR9kOLhiFcc1P30="; + vendorHash = "sha256-6soQ9GaQSvxg3ivJHFZbqye7+TF8XLJuylOZz+Zjal0="; doCheck = false; diff --git a/operator/jsonnet/jsonnetfile.json b/operator/jsonnet/jsonnetfile.json index 6456bf5997c1..6064cf7d1f8d 100644 --- a/operator/jsonnet/jsonnetfile.json +++ b/operator/jsonnet/jsonnetfile.json @@ -8,7 +8,7 @@ "subdir": "production/loki-mixin" } }, - "version": "v3.2.1" + "version": "v3.3.0" } ], "legacyImports": true diff --git a/operator/jsonnet/jsonnetfile.lock.json b/operator/jsonnet/jsonnetfile.lock.json index 2ba0c39bea39..31f064d701f5 100644 --- a/operator/jsonnet/jsonnetfile.lock.json +++ b/operator/jsonnet/jsonnetfile.lock.json @@ -38,8 +38,8 @@ "subdir": "production/loki-mixin" } }, - "version": "3c386cc5d13629a74cddb43c429ec290ba2e4a0a", - "sum": "mVKuwcuL1Wm+JMgt2MiwOAtKkmuClzE75+liMe0AGek=" + "version": "19bbc44845e8c90bd9a867348624967f62363a82", + "sum": "4DCfOphB9C37hqxU8fh8EPIfn1BXGiZnJaFR+EpjFCA=" }, { "source": { diff --git a/pkg/blockbuilder/controller.go b/pkg/blockbuilder/controller.go new file mode 100644 index 000000000000..f252b3d65744 --- /dev/null +++ b/pkg/blockbuilder/controller.go @@ -0,0 +1,305 @@ +package blockbuilder + +import ( + "context" + "fmt" + "time" + + "github.com/prometheus/prometheus/model/labels" + + "github.com/grafana/dskit/backoff" + + "github.com/grafana/loki/v3/pkg/kafka" + "github.com/grafana/loki/v3/pkg/kafka/partition" + + "github.com/grafana/loki/pkg/push" +) + +// [min,max) +type Offsets struct { + Min, Max int64 +} + +type Job struct { + Partition int32 + Offsets Offsets +} + +// Interface required for interacting with queue partitions. +type PartitionController interface { + Topic() string + Partition() int32 + // Returns the highest committed offset from the consumer group + HighestCommittedOffset(ctx context.Context) (int64, error) + // Returns the highest available offset in the partition + HighestPartitionOffset(ctx context.Context) (int64, error) + // Returns the earliest available offset in the partition + EarliestPartitionOffset(ctx context.Context) (int64, error) + // Commits the offset to the consumer group. + Commit(context.Context, int64) error + // Process will run load batches at a time and send them to channel, + // so it's advised to not buffer the channel for natural backpressure. + // As a convenience, it returns the last seen offset, which matches + // the final record sent on the channel. + Process(context.Context, Offsets, chan<- []AppendInput) (int64, error) + + Close() error +} + +// PartitionJobController loads a single job a time, bound to a given +// * topic +// * partition +// * offset_step_len: the number of offsets each job to contain. e.g. "10" could yield a job w / min=15, max=25 +// +// At a high level, it watches a source topic/partition (where log data is ingested) and a "committed" topic/partition. +// The "committed" partition corresponds to the offsets from the source partition which have been committed to object storage. +// In essence, the following loop is performed +// 1. load the most recent record from the "committed" partition. This contains the highest msg offset in the "source" partition +// that has been committed to object storage. We'll call that $START_POS. +// 2. Create a job with `min=$START_POS+1,end=$START_POS+1+$STEP_LEN` +// 3. Sometime later when the job has been processed, we'll commit the final processed offset from the "source" partition (which +// will be <= $END_POS) to the "committed" partition. +// +// NB(owen-d): In our case, "source" is the partition +// +// containing log data and "committed" is the consumer group +type PartitionJobController struct { + stepLen int64 + part partition.ReaderIfc + backoff backoff.Config + decoder *kafka.Decoder +} + +func NewPartitionJobController( + controller partition.ReaderIfc, + backoff backoff.Config, +) (*PartitionJobController, error) { + decoder, err := kafka.NewDecoder() + if err != nil { + return nil, err + } + return &PartitionJobController{ + stepLen: 1000, // Default step length of 1000 offsets per job + part: controller, + backoff: backoff, + decoder: decoder, + }, nil +} + +func (l *PartitionJobController) HighestCommittedOffset(ctx context.Context) (int64, error) { + return withBackoff( + ctx, + l.backoff, + func() (int64, error) { + return l.part.FetchLastCommittedOffset(ctx) + }, + ) +} + +func (l *PartitionJobController) HighestPartitionOffset(ctx context.Context) (int64, error) { + return withBackoff( + ctx, + l.backoff, + func() (int64, error) { + return l.part.FetchPartitionOffset(ctx, partition.KafkaEndOffset) + }, + ) +} + +func (l *PartitionJobController) EarliestPartitionOffset(ctx context.Context) (int64, error) { + return withBackoff( + ctx, + l.backoff, + func() (int64, error) { + return l.part.FetchPartitionOffset(ctx, partition.KafkaStartOffset) + }, + ) +} + +func (l *PartitionJobController) Process(ctx context.Context, offsets Offsets, ch chan<- []AppendInput) (int64, error) { + l.part.SetOffsetForConsumption(offsets.Min) + + var ( + lastOffset = offsets.Min - 1 + boff = backoff.New(ctx, l.backoff) + err error + ) + + for boff.Ongoing() { + var records []partition.Record + records, err = l.part.Poll(ctx) + if err != nil { + boff.Wait() + continue + } + + if len(records) == 0 { + // No more records available + break + } + + // Reset backoff on successful poll + boff.Reset() + + converted := make([]AppendInput, 0, len(records)) + for _, record := range records { + offset := records[len(records)-1].Offset + if offset >= offsets.Max { + break + } + lastOffset = offset + + stream, labels, err := l.decoder.Decode(record.Content) + if err != nil { + return 0, fmt.Errorf("failed to decode record: %w", err) + } + if len(stream.Entries) == 0 { + continue + } + + converted = append(converted, AppendInput{ + tenant: record.TenantID, + labels: labels, + labelsStr: stream.Labels, + entries: stream.Entries, + }) + + select { + case ch <- converted: + case <-ctx.Done(): + return 0, ctx.Err() + } + } + } + + return lastOffset, err +} + +// LoadJob(ctx) returns the next job by finding the most recent unconsumed offset in the partition +// Returns whether an applicable job exists, the job, and an error +func (l *PartitionJobController) LoadJob(ctx context.Context) (bool, Job, error) { + // Read the most recent committed offset + committedOffset, err := l.HighestCommittedOffset(ctx) + if err != nil { + return false, Job{}, err + } + + earliestOffset, err := l.EarliestPartitionOffset(ctx) + if err != nil { + return false, Job{}, err + } + + startOffset := committedOffset + 1 + if startOffset < earliestOffset { + startOffset = earliestOffset + } + + highestOffset, err := l.HighestPartitionOffset(ctx) + if err != nil { + return false, Job{}, err + } + if highestOffset == committedOffset { + return false, Job{}, nil + } + + // Create the job with the calculated offsets + job := Job{ + Partition: l.part.Partition(), + Offsets: Offsets{ + Min: startOffset, + Max: min(startOffset+l.stepLen, highestOffset), + }, + } + + return true, job, nil +} + +// implement a dummy controller which can be parameterized to +// deterministically simulate partitions +type dummyPartitionController struct { + topic string + partition int32 + committed int64 + highest int64 + numTenants int // number of unique tenants to simulate + streamsPerTenant int // number of streams per tenant + entriesPerOffset int // coefficient for entries per offset +} + +// used in testing +// nolint:revive +func NewDummyPartitionController(topic string, partition int32, highest int64) *dummyPartitionController { + return &dummyPartitionController{ + topic: topic, + partition: partition, + committed: 0, // always starts at zero + highest: highest, + numTenants: 2, // default number of tenants + streamsPerTenant: 2, // default streams per tenant + entriesPerOffset: 1, // default entries per offset coefficient + } +} + +func (d *dummyPartitionController) Topic() string { + return d.topic +} + +func (d *dummyPartitionController) Partition() int32 { + return d.partition +} + +func (d *dummyPartitionController) HighestCommittedOffset(_ context.Context) (int64, error) { + return d.committed, nil +} + +func (d *dummyPartitionController) HighestPartitionOffset(_ context.Context) (int64, error) { + return d.highest, nil +} + +func (d *dummyPartitionController) Commit(_ context.Context, offset int64) error { + d.committed = offset + return nil +} + +func (d *dummyPartitionController) Process(ctx context.Context, offsets Offsets, ch chan<- []AppendInput) (int64, error) { + for i := int(offsets.Min); i < int(offsets.Max); i++ { + batch := d.createBatch(i) + select { + case <-ctx.Done(): + return int64(i - 1), ctx.Err() + case ch <- batch: + } + } + return offsets.Max - 1, nil +} + +// creates (tenants*streams) inputs +func (d *dummyPartitionController) createBatch(offset int) []AppendInput { + result := make([]AppendInput, 0, d.numTenants*d.streamsPerTenant) + for i := 0; i < d.numTenants; i++ { + tenant := fmt.Sprintf("tenant-%d", i) + for j := 0; j < d.streamsPerTenant; j++ { + lbls := labels.Labels{ + {Name: "stream", Value: fmt.Sprintf("stream-%d", j)}, + } + entries := make([]push.Entry, d.entriesPerOffset) + for k := 0; k < d.entriesPerOffset; k++ { + entries[k] = push.Entry{ + Timestamp: time.Now(), + Line: fmt.Sprintf("tenant=%d stream=%d line=%d offset=%d", i, j, k, offset), + } + } + result = append(result, AppendInput{ + tenant: tenant, + labels: lbls, + labelsStr: lbls.String(), + entries: entries, + }) + } + } + return result +} + +func (d *dummyPartitionController) Close() error { + return nil +} diff --git a/pkg/blockbuilder/metrics.go b/pkg/blockbuilder/metrics.go new file mode 100644 index 000000000000..31679e34f446 --- /dev/null +++ b/pkg/blockbuilder/metrics.go @@ -0,0 +1,152 @@ +package blockbuilder + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/util/constants" +) + +type SlimgesterMetrics struct { + chunkUtilization prometheus.Histogram + chunkEntries prometheus.Histogram + chunkSize prometheus.Histogram + chunkCompressionRatio prometheus.Histogram + chunksPerTenant *prometheus.CounterVec + chunkSizePerTenant *prometheus.CounterVec + chunkAge prometheus.Histogram + chunkEncodeTime prometheus.Histogram + chunksFlushFailures prometheus.Counter + chunksFlushedPerReason *prometheus.CounterVec + chunkLifespan prometheus.Histogram + chunksEncoded *prometheus.CounterVec + chunkDecodeFailures *prometheus.CounterVec + flushedChunksStats *analytics.Counter + flushedChunksBytesStats *analytics.Statistics + flushedChunksLinesStats *analytics.Statistics + flushedChunksAgeStats *analytics.Statistics + flushedChunksLifespanStats *analytics.Statistics + flushedChunksUtilizationStats *analytics.Statistics + + chunksCreatedTotal prometheus.Counter + samplesPerChunk prometheus.Histogram + blocksPerChunk prometheus.Histogram + chunkCreatedStats *analytics.Counter +} + +func NewSlimgesterMetrics(r prometheus.Registerer) *SlimgesterMetrics { + return &SlimgesterMetrics{ + chunkUtilization: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_utilization", + Help: "Distribution of stored chunk utilization (when stored).", + Buckets: prometheus.LinearBuckets(0, 0.2, 6), + }), + chunkEntries: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_entries", + Help: "Distribution of stored lines per chunk (when stored).", + Buckets: prometheus.ExponentialBuckets(200, 2, 9), // biggest bucket is 200*2^(9-1) = 51200 + }), + chunkSize: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_size_bytes", + Help: "Distribution of stored chunk sizes (when stored).", + Buckets: prometheus.ExponentialBuckets(20000, 2, 10), // biggest bucket is 20000*2^(10-1) = 10,240,000 (~10.2MB) + }), + chunkCompressionRatio: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_compression_ratio", + Help: "Compression ratio of chunks (when stored).", + Buckets: prometheus.LinearBuckets(.75, 2, 10), + }), + chunksPerTenant: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunks_stored_total", + Help: "Total stored chunks per tenant.", + }, []string{"tenant"}), + chunkSizePerTenant: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_stored_bytes_total", + Help: "Total bytes stored in chunks per tenant.", + }, []string{"tenant"}), + chunkAge: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_age_seconds", + Help: "Distribution of chunk ages (when stored).", + // with default settings chunks should flush between 5 min and 12 hours + // so buckets at 1min, 5min, 10min, 30min, 1hr, 2hr, 4hr, 10hr, 12hr, 16hr + Buckets: []float64{60, 300, 600, 1800, 3600, 7200, 14400, 36000, 43200, 57600}, + }), + chunkEncodeTime: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_encode_time_seconds", + Help: "Distribution of chunk encode times.", + // 10ms to 10s. + Buckets: prometheus.ExponentialBuckets(0.01, 4, 6), + }), + chunksFlushFailures: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunks_flush_failures_total", + Help: "Total number of flush failures.", + }), + chunksFlushedPerReason: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunks_flushed_total", + Help: "Total flushed chunks per reason.", + }, []string{"reason"}), + chunkLifespan: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_bounds_hours", + Help: "Distribution of chunk end-start durations.", + // 1h -> 8hr + Buckets: prometheus.LinearBuckets(1, 1, 8), + }), + chunksEncoded: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunks_encoded_total", + Help: "The total number of chunks encoded in the ingester.", + }, []string{"user"}), + chunkDecodeFailures: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunk_decode_failures_total", + Help: "The number of freshly encoded chunks that failed to decode.", + }, []string{"user"}), + flushedChunksStats: analytics.NewCounter("slimgester_flushed_chunks"), + flushedChunksBytesStats: analytics.NewStatistics("slimgester_flushed_chunks_bytes"), + flushedChunksLinesStats: analytics.NewStatistics("slimgester_flushed_chunks_lines"), + flushedChunksAgeStats: analytics.NewStatistics( + "slimgester_flushed_chunks_age_seconds", + ), + flushedChunksLifespanStats: analytics.NewStatistics( + "slimgester_flushed_chunks_lifespan_seconds", + ), + flushedChunksUtilizationStats: analytics.NewStatistics( + "slimgester_flushed_chunks_utilization", + ), + chunksCreatedTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "slimgester_chunks_created_total", + Help: "The total number of chunks created in the ingester.", + }), + samplesPerChunk: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "slimgester", + Name: "samples_per_chunk", + Help: "The number of samples in a chunk.", + + Buckets: prometheus.LinearBuckets(4096, 2048, 6), + }), + blocksPerChunk: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "slimgester", + Name: "blocks_per_chunk", + Help: "The number of blocks in a chunk.", + + Buckets: prometheus.ExponentialBuckets(5, 2, 6), + }), + + chunkCreatedStats: analytics.NewCounter("slimgester_chunk_created"), + } +} diff --git a/pkg/blockbuilder/pipeline.go b/pkg/blockbuilder/pipeline.go new file mode 100644 index 000000000000..494763d8c83f --- /dev/null +++ b/pkg/blockbuilder/pipeline.go @@ -0,0 +1,99 @@ +package blockbuilder + +import ( + "context" + + "github.com/grafana/dskit/multierror" + "golang.org/x/sync/errgroup" +) + +type stage struct { + name string + parallelism int + grp *errgroup.Group + ctx context.Context + fn func(context.Context) error + cleanup func(context.Context) error // optional; will be called once the underlying group returns +} + +// pipeline is a sequence of n different stages. +type pipeline struct { + ctx context.Context // base context + // we use a separate errgroup for stage dispatch/collection + // and inherit stage-specific groups from this ctx to + // propagate cancellation + grp *errgroup.Group + stages []stage +} + +func newPipeline(ctx context.Context) *pipeline { + stagesGrp, ctx := errgroup.WithContext(ctx) + return &pipeline{ + ctx: ctx, + grp: stagesGrp, + } +} + +func (p *pipeline) AddStageWithCleanup( + name string, + parallelism int, + fn func(context.Context) error, + cleanup func(context.Context) error, +) { + grp, ctx := errgroup.WithContext(p.ctx) + p.stages = append(p.stages, stage{ + name: name, + parallelism: parallelism, + fn: fn, + cleanup: cleanup, + ctx: ctx, + grp: grp, + }) +} + +func (p *pipeline) AddStage( + name string, + parallelism int, + fn func(context.Context) error, +) { + p.AddStageWithCleanup(name, parallelism, fn, nil) +} + +func (p *pipeline) Run() error { + + for i := range p.stages { + // we're using this in subsequent async closures; + // assign it directly in-loop + s := p.stages[i] + + // spin up n workers for each stage using that stage's + // error group. + for j := 0; j < s.parallelism; j++ { + s.grp.Go(func() error { + return s.fn(s.ctx) + }) + } + + // Using the pipeline's err group, await the stage finish, + // calling any necessary cleanup fn + // NB: by using the pipeline's errgroup here, we propagate + // failures to downstream stage contexts, so once a single stage + // fails, the others will be notified. + p.grp.Go(func() error { + var errs multierror.MultiError + errs.Add(s.grp.Wait()) + if s.cleanup != nil { + // NB: we use the pipeline's context for the cleanup call b/c + // the stage's context is cancelled once `Wait` returns. + // That's ok. cleanup is always called for a relevant stage + // and just needs to know if _other_ stages failed at this point + errs.Add(s.cleanup(p.ctx)) + } + + return errs.Err() + }) + } + + // finish all stages + return p.grp.Wait() +} diff --git a/pkg/blockbuilder/pipeline_test.go b/pkg/blockbuilder/pipeline_test.go new file mode 100644 index 000000000000..9ec69d2006eb --- /dev/null +++ b/pkg/blockbuilder/pipeline_test.go @@ -0,0 +1,87 @@ +package blockbuilder + +import ( + "context" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +type testStage struct { + parallelism int + fn func(context.Context) error + cleanup func(context.Context) error +} + +func TestPipeline(t *testing.T) { + tests := []struct { + name string + stages []testStage + expectedErr error + }{ + { + name: "single stage success", + stages: []testStage{ + { + parallelism: 1, + fn: func(_ context.Context) error { + return nil + }, + }, + }, + }, + { + name: "multiple stages success", + stages: []testStage{ + { + parallelism: 2, + fn: func(_ context.Context) error { + return nil + }, + }, + { + parallelism: 1, + fn: func(_ context.Context) error { + return nil + }, + }, + }, + }, + { + name: "stage error propagates", + stages: []testStage{ + { + parallelism: 1, + fn: func(_ context.Context) error { + return errors.New("stage error") + }, + }, + }, + expectedErr: errors.New("stage error"), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := newPipeline(context.Background()) + + for i, stage := range tt.stages { + if stage.cleanup != nil { + p.AddStageWithCleanup(fmt.Sprint(i), stage.parallelism, stage.fn, stage.cleanup) + } else { + p.AddStage(fmt.Sprint(i), stage.parallelism, stage.fn) + } + } + + err := p.Run() + if tt.expectedErr != nil { + require.Error(t, err) + require.Equal(t, tt.expectedErr.Error(), err.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/pkg/blockbuilder/plan.txt b/pkg/blockbuilder/plan.txt new file mode 100644 index 000000000000..a3a80a735fa4 --- /dev/null +++ b/pkg/blockbuilder/plan.txt @@ -0,0 +1,34 @@ +# Purpose +blockbuilder is responsible for consuming ingested data in the queue (kafka, etc) and writing it in an optimized form to long term storage. While this should always remain true, it can be built and iterated upon in phases. First, let's look at the simplest possible architecture: + +* [interface] loads "jobs": partitions+offset ranges in kafka +* For each job, process data, building the storage format +* [interface] Upon completion (inc flushing to storage), commit work + * e.g. update consumer group processed offset in kafka +* consumes + +# First Impl: Alongside existing multi-zone ingester writers +Goal: modify ingester architecture towards RF1, but don't actually write to storage yet, b/c we haven't solved coordinating interim reads/writes. +Deliverable: RF1 metrics proof +* run replicas==partitions (from ingesters) +* run every $INTERVAL (5m?), +* slim down ingester write path + * remove disk (all WALs). + * ignore limits if too complex (for now) + * /dev/null backend + + +# TODO improvements +* metadata store + * include offsets committed for coordination b/w ingester-readers & long term storage +* planner/scheduler+worker architecture +* shuffle sharding + + +# Things to solve +* limits application +* job sizing -- coordinate kafka offsets w/ underlying bytes added? + * ideally we can ask kafka for "the next 1GB" in a partition, but to do this we'd need the kafka offsets (auto-incremented integers for messages within a partition) to be derived from the message size. Right now, different batch sizes can cause kafka msgs to have very different sizes. + * idea: another set of partitions to store offsets->datasize? Sounds shitty tbh & breaks the consistency bound on writes (what if kafka acks first write but doesnt ack the second?) + * what if we stored byte counter metadata in kafka records so we could O(log(n)) seek an offset range w/ the closest $SIZE + * Likely a reasonable perf tradeoff as this isn't called often (only in job planner in the future). \ No newline at end of file diff --git a/pkg/blockbuilder/slimgester.go b/pkg/blockbuilder/slimgester.go new file mode 100644 index 000000000000..705b47444a6f --- /dev/null +++ b/pkg/blockbuilder/slimgester.go @@ -0,0 +1,811 @@ +package blockbuilder + +import ( + "bytes" + "context" + "flag" + "fmt" + "math" + "sync" + "time" + + "github.com/go-kit/log" + "github.com/go-kit/log/level" + "github.com/grafana/dskit/backoff" + "github.com/grafana/dskit/services" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" + + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compression" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/types" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + + "github.com/grafana/loki/pkg/push" +) + +const ( + flushReasonFull = "full" + flushReasonMaxAge = "max_age" + onePointFiveMB = 3 << 19 +) + +type Config struct { + ConcurrentFlushes int `yaml:"concurrent_flushes"` + ConcurrentWriters int `yaml:"concurrent_writers"` + BlockSize flagext.ByteSize `yaml:"chunk_block_size"` + TargetChunkSize flagext.ByteSize `yaml:"chunk_target_size"` + ChunkEncoding string `yaml:"chunk_encoding"` + parsedEncoding compression.Codec `yaml:"-"` // placeholder for validated encoding + MaxChunkAge time.Duration `yaml:"max_chunk_age"` + Interval time.Duration `yaml:"interval"` + Backoff backoff.Config `yaml:"backoff_config"` +} + +func (cfg *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { + f.IntVar(&cfg.ConcurrentFlushes, prefix+"concurrent-flushes", 1, "How many flushes can happen concurrently") + f.IntVar(&cfg.ConcurrentWriters, prefix+"concurrent-writers", 1, "How many workers to process writes, defaults to number of available cpus") + _ = cfg.BlockSize.Set("256KB") + f.Var(&cfg.BlockSize, prefix+"chunks-block-size", "The targeted _uncompressed_ size in bytes of a chunk block When this threshold is exceeded the head block will be cut and compressed inside the chunk.") + _ = cfg.TargetChunkSize.Set(fmt.Sprint(onePointFiveMB)) + f.Var(&cfg.TargetChunkSize, prefix+"chunk-target-size", "A target _compressed_ size in bytes for chunks. This is a desired size not an exact size, chunks may be slightly bigger or significantly smaller if they get flushed for other reasons (e.g. chunk_idle_period). A value of 0 creates chunks with a fixed 10 blocks, a non zero value will create chunks with a variable number of blocks to meet the target size.") + f.StringVar(&cfg.ChunkEncoding, prefix+"chunk-encoding", compression.Snappy.String(), fmt.Sprintf("The algorithm to use for compressing chunk. (%s)", compression.SupportedCodecs())) + f.DurationVar(&cfg.MaxChunkAge, prefix+"max-chunk-age", 2*time.Hour, "The maximum duration of a timeseries chunk in memory. If a timeseries runs for longer than this, the current chunk will be flushed to the store and a new chunk created.") + f.DurationVar(&cfg.Interval, prefix+"interval", 10*time.Minute, "The interval at which to run.") + cfg.Backoff.RegisterFlagsWithPrefix(prefix+"backoff.", f) +} + +// RegisterFlags registers flags. +func (cfg *Config) RegisterFlags(flags *flag.FlagSet) { + cfg.RegisterFlagsWithPrefix("blockbuilder.", flags) +} + +func (cfg *Config) Validate() error { + enc, err := compression.ParseCodec(cfg.ChunkEncoding) + if err != nil { + return err + } + cfg.parsedEncoding = enc + return nil +} + +// BlockBuilder is a slimmed-down version of the ingester, intended to +// ingest logs without WALs. Broadly, it accumulates logs into per-tenant chunks in the same way the existing ingester does, +// without a WAL. Index (TSDB) creation is also not an out-of-band procedure and must be called directly. In essence, this +// allows us to buffer data, flushing chunks to storage as necessary, and then when ready to commit this, relevant TSDBs (one per period) are created and flushed to storage. This allows an external caller to prepare a batch of data, build relevant chunks+indices, ensure they're flushed, and then return. As long as chunk+index creation is deterministic, this operation is also +// idempotent, making retries simple and impossible to introduce duplicate data. +// It contains the following methods: +// - `Append(context.Context, logproto.PushRequest) error` +// Adds a push request to ingested data. May flush existing chunks when they're full/etc. +// - `Commit(context.Context) error` +// Serializes (cuts) any buffered data into chunks, flushes them to storage, then creates + flushes TSDB indices +// containing all chunk references. Finally, clears internal state. +type BlockBuilder struct { + services.Service + + id string + cfg Config + periodicConfigs []config.PeriodConfig + + metrics *SlimgesterMetrics + logger log.Logger + + store stores.ChunkWriter + objStore *MultiStore + jobController *PartitionJobController +} + +func NewBlockBuilder( + id string, + cfg Config, + periodicConfigs []config.PeriodConfig, + store stores.ChunkWriter, + objStore *MultiStore, + logger log.Logger, + reg prometheus.Registerer, + jobController *PartitionJobController, +) (*BlockBuilder, + error) { + i := &BlockBuilder{ + id: id, + cfg: cfg, + periodicConfigs: periodicConfigs, + metrics: NewSlimgesterMetrics(reg), + logger: logger, + store: store, + objStore: objStore, + jobController: jobController, + } + + i.Service = services.NewBasicService(nil, i.running, nil) + return i, nil +} + +func (i *BlockBuilder) running(ctx context.Context) error { + ticker := time.NewTicker(i.cfg.Interval) + defer ticker.Stop() + + // run once in beginning + select { + case <-ctx.Done(): + return nil + default: + _, err := i.runOne(ctx) + if err != nil { + return err + } + } + + for { + select { + case <-ctx.Done(): + return nil + case <-ticker.C: + skipped, err := i.runOne(ctx) + level.Info(i.logger).Log( + "msg", "completed block builder run", "skipped", + "skipped", skipped, + "err", err, + ) + if err != nil { + return err + } + } + } +} + +// runOne performs a single +func (i *BlockBuilder) runOne(ctx context.Context) (skipped bool, err error) { + + exists, job, err := i.jobController.LoadJob(ctx) + if err != nil { + return false, err + } + + if !exists { + level.Info(i.logger).Log("msg", "no available job to process") + return true, nil + } + + logger := log.With( + i.logger, + "partition", job.Partition, + "job_min_offset", job.Offsets.Min, + "job_max_offset", job.Offsets.Max, + ) + + level.Debug(logger).Log("msg", "beginning job") + + indexer := newTsdbCreator() + appender := newAppender(i.id, + i.cfg, + i.periodicConfigs, + i.store, + i.objStore, + logger, + i.metrics, + ) + + var lastOffset int64 + p := newPipeline(ctx) + + // Pipeline stage 1: Process the job offsets and write records to inputCh + // This stage reads from the partition and feeds records into the input channel + // When complete, it stores the last processed offset and closes the channel + inputCh := make(chan []AppendInput) + p.AddStageWithCleanup( + "load records", + 1, + func(ctx context.Context) error { + lastOffset, err = i.jobController.Process(ctx, job.Offsets, inputCh) + return err + }, + func(ctx context.Context) error { + level.Debug(logger).Log( + "msg", "finished loading records", + "ctx_error", ctx.Err(), + ) + close(inputCh) + return nil + }, + ) + + // Stage 2: Process input records and generate chunks + // This stage receives AppendInput batches, appends them to appropriate instances, + // and forwards any cut chunks to the chunks channel for flushing. + // ConcurrentWriters workers process inputs in parallel to maximize throughput. + flush := make(chan *chunk.Chunk) + p.AddStageWithCleanup( + "appender", + i.cfg.ConcurrentWriters, + func(ctx context.Context) error { + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case inputs, ok := <-inputCh: + // inputs are finished; we're done + if !ok { + return nil + } + + for _, input := range inputs { + cut, err := appender.Append(ctx, input) + if err != nil { + level.Error(logger).Log("msg", "failed to append records", "err", err) + return err + } + + for _, chk := range cut { + select { + case <-ctx.Done(): + return ctx.Err() + case flush <- chk: + } + } + } + } + } + }, + func(ctx context.Context) (err error) { + defer func() { + level.Debug(logger).Log( + "msg", "finished appender", + "err", err, + "ctx_error", ctx.Err(), + ) + }() + defer close(flush) + + // once we're done appending, cut all remaining chunks. + chks, err := appender.CutRemainingChunks(ctx) + if err != nil { + return err + } + + for _, chk := range chks { + select { + case <-ctx.Done(): + return ctx.Err() + case flush <- chk: + } + } + return nil + }, + ) + + // Stage 3: Flush chunks to storage + // This stage receives chunks from the chunks channel and flushes them to storage + // using ConcurrentFlushes workers for parallel processing + p.AddStage( + "flusher", + i.cfg.ConcurrentFlushes, + func(ctx context.Context) error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case chk, ok := <-flush: + if !ok { + return nil + } + if _, err := withBackoff( + ctx, + i.cfg.Backoff, // retry forever + func() (res struct{}, err error) { + err = i.store.PutOne(ctx, chk.From, chk.Through, *chk) + if err != nil { + i.metrics.chunksFlushFailures.Inc() + return + } + appender.reportFlushedChunkStatistics(chk) + + // write flushed chunk to index + approxKB := math.Round(float64(chk.Data.UncompressedSize()) / float64(1<<10)) + meta := index.ChunkMeta{ + Checksum: chk.ChunkRef.Checksum, + MinTime: int64(chk.ChunkRef.From), + MaxTime: int64(chk.ChunkRef.Through), + KB: uint32(approxKB), + Entries: uint32(chk.Data.Entries()), + } + err = indexer.Append(chk.UserID, chk.Metric, chk.ChunkRef.Fingerprint, index.ChunkMetas{meta}) + return + }, + ); err != nil { + return err + } + } + } + }, + ) + + err = p.Run() + level.Debug(logger).Log( + "msg", "finished chunk creation", + "err", err, + ) + if err != nil { + return false, err + } + + var ( + nodeName = i.id + tableRanges = config.GetIndexStoreTableRanges(types.TSDBType, i.periodicConfigs) + ) + + built, err := indexer.create(ctx, nodeName, tableRanges) + if err != nil { + return false, err + } + + for _, db := range built { + u := newUploader(i.objStore) + if err := u.Put(ctx, db); err != nil { + level.Error(util_log.Logger).Log( + "msg", "failed to upload tsdb", + "path", db.id.Path(), + ) + + return false, err + } + + level.Debug(logger).Log( + "msg", "uploaded tsdb", + "name", db.id.Name(), + ) + } + + if lastOffset <= job.Offsets.Min { + return false, nil + } + + if err = i.jobController.part.Commit(ctx, lastOffset); err != nil { + level.Error(logger).Log( + "msg", "failed to commit offset", + "last_offset", lastOffset, + "err", err, + ) + return false, err + } + + // log success + level.Info(logger).Log( + "msg", "successfully processed and committed batch", + "last_offset", lastOffset, + ) + + return false, nil +} + +type Appender struct { + id string + cfg Config + periodicConfigs []config.PeriodConfig + + metrics *SlimgesterMetrics + logger log.Logger + + instances map[string]*instance + instancesMtx sync.RWMutex + + store stores.ChunkWriter + objStore *MultiStore +} + +// Writer is a single use construct for building chunks +// for from a set of records. It's an independent struct to ensure its +// state is not reused across jobs. +func newAppender( + id string, + cfg Config, + periodicConfigs []config.PeriodConfig, + store stores.ChunkWriter, + objStore *MultiStore, + logger log.Logger, + metrics *SlimgesterMetrics, +) *Appender { + return &Appender{ + id: id, + cfg: cfg, + periodicConfigs: periodicConfigs, + metrics: metrics, + logger: logger, + instances: make(map[string]*instance), + store: store, + objStore: objStore, + } +} + +// reportFlushedChunkStatistics calculate overall statistics of flushed chunks without compromising the flush process. +func (w *Appender) reportFlushedChunkStatistics( + ch *chunk.Chunk, +) { + byt, err := ch.Encoded() + if err != nil { + level.Error(w.logger).Log("msg", "failed to encode flushed wire chunk", "err", err) + return + } + sizePerTenant := w.metrics.chunkSizePerTenant.WithLabelValues(ch.UserID) + countPerTenant := w.metrics.chunksPerTenant.WithLabelValues(ch.UserID) + + reason := flushReasonFull + from, through := ch.From.Time(), ch.Through.Time() + if through.Sub(from) > w.cfg.MaxChunkAge { + reason = flushReasonMaxAge + } + + w.metrics.chunksFlushedPerReason.WithLabelValues(reason).Add(1) + + compressedSize := float64(len(byt)) + uncompressedSize, ok := chunkenc.UncompressedSize(ch.Data) + + if ok && compressedSize > 0 { + w.metrics.chunkCompressionRatio.Observe(float64(uncompressedSize) / compressedSize) + } + + utilization := ch.Data.Utilization() + w.metrics.chunkUtilization.Observe(utilization) + + numEntries := ch.Data.Entries() + w.metrics.chunkEntries.Observe(float64(numEntries)) + w.metrics.chunkSize.Observe(compressedSize) + sizePerTenant.Add(compressedSize) + countPerTenant.Inc() + + w.metrics.chunkAge.Observe(time.Since(from).Seconds()) + w.metrics.chunkLifespan.Observe(through.Sub(from).Hours()) + + w.metrics.flushedChunksBytesStats.Record(compressedSize) + w.metrics.flushedChunksLinesStats.Record(float64(numEntries)) + w.metrics.flushedChunksUtilizationStats.Record(utilization) + w.metrics.flushedChunksAgeStats.Record(time.Since(from).Seconds()) + w.metrics.flushedChunksLifespanStats.Record(through.Sub(from).Seconds()) + w.metrics.flushedChunksStats.Inc(1) +} + +func (w *Appender) CutRemainingChunks(ctx context.Context) ([]*chunk.Chunk, error) { + var chunks []*chunk.Chunk + w.instancesMtx.Lock() + defer w.instancesMtx.Unlock() + + for _, inst := range w.instances { + + // wrap in anonymous fn to make lock release more straightforward + if err := func() error { + inst.streams.mtx.Lock() + defer inst.streams.mtx.Unlock() + + for _, stream := range inst.streams.byLabels { + + // wrap in anonymous fn to make lock release more straightforward + if err := func() error { + stream.chunkMtx.Lock() + defer stream.chunkMtx.Unlock() + if stream.chunk != nil { + cut, err := stream.closeChunk() + if err != nil { + return err + } + encoded, err := inst.encodeChunk(ctx, stream, cut) + if err != nil { + return err + } + chunks = append(chunks, encoded) + } + return nil + + }(); err != nil { + return err + } + + } + return nil + + }(); err != nil { + return nil, err + } + + } + + return chunks, nil +} + +type AppendInput struct { + tenant string + // both labels & labelsStr are populated to prevent duplicating conversion work in multiple places + labels labels.Labels + labelsStr string + entries []push.Entry +} + +func (w *Appender) Append(ctx context.Context, input AppendInput) ([]*chunk.Chunk, error) { + // use rlock so multiple appends can be called on same instance. + // re-check after using regular lock if it didnt exist. + w.instancesMtx.RLock() + inst, ok := w.instances[input.tenant] + w.instancesMtx.RUnlock() + if !ok { + w.instancesMtx.Lock() + inst, ok = w.instances[input.tenant] + if !ok { + inst = newInstance(w.cfg, input.tenant, w.metrics, w.periodicConfigs, w.logger) + w.instances[input.tenant] = inst + } + w.instancesMtx.Unlock() + } + + closed, err := inst.Push(ctx, input) + return closed, err +} + +// instance is a slimmed down version from the ingester pkg +type instance struct { + cfg Config + tenant string + buf []byte // buffer used to compute fps. + mapper *ingester.FpMapper // using of mapper no longer needs mutex because reading from streams is lock-free + metrics *SlimgesterMetrics + streams *streamsMap + logger log.Logger + + periods []config.PeriodConfig +} + +func newInstance( + cfg Config, + tenant string, + metrics *SlimgesterMetrics, + periods []config.PeriodConfig, + logger log.Logger, +) *instance { + streams := newStreamsMap() + return &instance{ + cfg: cfg, + tenant: tenant, + buf: make([]byte, 0, 1024), + mapper: ingester.NewFPMapper(streams.getLabelsFromFingerprint), + metrics: metrics, + streams: streams, + logger: logger, + periods: periods, + } +} + +func newStreamsMap() *streamsMap { + return &streamsMap{ + byLabels: make(map[string]*stream), + byFp: make(map[model.Fingerprint]*stream), + } +} + +type streamsMap struct { + // labels -> stream + byLabels map[string]*stream + byFp map[model.Fingerprint]*stream + mtx sync.RWMutex +} + +// For performs an operation on an existing stream, creating it if it wasn't previously present. +func (m *streamsMap) For( + ls string, + createFn func() (*stream, error), + fn func(*stream) error, +) error { + // first use read lock in case the stream exists + m.mtx.RLock() + if s, ok := m.byLabels[ls]; ok { + err := fn(s) + m.mtx.RUnlock() + return err + } + m.mtx.RUnlock() + + // Stream wasn't found, acquire write lock to create it + m.mtx.Lock() + defer m.mtx.Unlock() + + // Double check it wasn't created while we were upgrading the lock + if s, ok := m.byLabels[ls]; ok { + return fn(s) + } + + // Create new stream + s, err := createFn() + if err != nil { + return err + } + + m.byLabels[ls] = s + m.byFp[s.fp] = s + return fn(s) +} + +// Return labels associated with given fingerprint. Used by fingerprint mapper. +func (m *streamsMap) getLabelsFromFingerprint(fp model.Fingerprint) labels.Labels { + + if s, ok := m.byFp[fp]; ok { + return s.ls + } + return nil +} + +func (i *instance) getHashForLabels(ls labels.Labels) model.Fingerprint { + var fp uint64 + fp, i.buf = ls.HashWithoutLabels(i.buf, []string(nil)...) + return i.mapper.MapFP(model.Fingerprint(fp), ls) +} + +// Push will iterate over the given streams present in the PushRequest and attempt to store them. +func (i *instance) Push( + ctx context.Context, + input AppendInput, +) (closed []*chunk.Chunk, err error) { + err = i.streams.For( + input.labelsStr, + func() (*stream, error) { + fp := i.getHashForLabels(input.labels) + return newStream(fp, input.labels, i.cfg, i.metrics), nil + }, + func(stream *stream) error { + xs, err := stream.Push(input.entries) + if err != nil { + return err + } + + if len(xs) > 0 { + for _, x := range xs { + // encodeChunk mutates the chunk so we must pass by reference + chk, err := i.encodeChunk(ctx, stream, x) + if err != nil { + return err + } + closed = append(closed, chk) + } + } + return err + }, + ) + + return closed, err +} + +// encodeChunk encodes a chunk.Chunk. +func (i *instance) encodeChunk(ctx context.Context, stream *stream, mc *chunkenc.MemChunk) (*chunk.Chunk, error) { + if err := ctx.Err(); err != nil { + return nil, err + } + start := time.Now() + + firstTime, lastTime := util.RoundToMilliseconds(mc.Bounds()) + chk := chunk.NewChunk( + i.tenant, stream.fp, stream.ls, + chunkenc.NewFacade(mc, stream.blockSize, stream.targetChunkSize), + firstTime, + lastTime, + ) + + chunkBytesSize := mc.BytesSize() + 4*1024 // size + 4kB should be enough room for cortex header + if err := chk.EncodeTo(bytes.NewBuffer(make([]byte, 0, chunkBytesSize)), i.logger); err != nil { + if !errors.Is(err, chunk.ErrChunkDecode) { + return nil, fmt.Errorf("chunk encoding: %w", err) + } + + i.metrics.chunkDecodeFailures.WithLabelValues(chk.UserID).Inc() + } + i.metrics.chunkEncodeTime.Observe(time.Since(start).Seconds()) + i.metrics.chunksEncoded.WithLabelValues(chk.UserID).Inc() + return &chk, nil +} + +type stream struct { + fp model.Fingerprint + ls labels.Labels + + chunkFormat byte + codec compression.Codec + blockSize int + targetChunkSize int + + chunkMtx sync.RWMutex + chunk *chunkenc.MemChunk + metrics *SlimgesterMetrics +} + +func newStream(fp model.Fingerprint, ls labels.Labels, cfg Config, metrics *SlimgesterMetrics) *stream { + return &stream{ + fp: fp, + ls: ls, + + chunkFormat: chunkenc.ChunkFormatV4, + codec: cfg.parsedEncoding, + blockSize: cfg.BlockSize.Val(), + targetChunkSize: cfg.TargetChunkSize.Val(), + + metrics: metrics, + } +} + +func (s *stream) Push(entries []push.Entry) (closed []*chunkenc.MemChunk, err error) { + s.chunkMtx.Lock() + defer s.chunkMtx.Unlock() + + if s.chunk == nil { + s.chunk = s.NewChunk() + } + + // bytesAdded, err := s.storeEntries(ctx, toStore, usageTracker) + for i := 0; i < len(entries); i++ { + + // cut the chunk if the new addition overflows target size + if !s.chunk.SpaceFor(&entries[i]) { + cut, err := s.closeChunk() + if err != nil { + return nil, err + } + closed = append(closed, cut) + } + + if _, err = s.chunk.Append(&entries[i]); err != nil { + return closed, errors.Wrap(err, "appending entry") + } + } + + return closed, nil +} + +func (s *stream) closeChunk() (*chunkenc.MemChunk, error) { + if err := s.chunk.Close(); err != nil { + return nil, errors.Wrap(err, "closing chunk") + } + + s.metrics.samplesPerChunk.Observe(float64(s.chunk.Size())) + s.metrics.blocksPerChunk.Observe(float64(s.chunk.BlockCount())) + s.metrics.chunksCreatedTotal.Inc() + s.metrics.chunkCreatedStats.Inc(1) + + // add a chunk + res := s.chunk + s.chunk = s.NewChunk() + return res, nil +} + +func (s *stream) NewChunk() *chunkenc.MemChunk { + return chunkenc.NewMemChunk( + s.chunkFormat, + s.codec, + chunkenc.ChunkHeadFormatFor(s.chunkFormat), + s.blockSize, + s.targetChunkSize, + ) +} + +func withBackoff[T any]( + ctx context.Context, + config backoff.Config, + fn func() (T, error), +) (T, error) { + var zero T + + var boff = backoff.New(ctx, config) + for boff.Ongoing() { + res, err := fn() + if err != nil { + boff.Wait() + continue + } + return res, nil + } + + return zero, boff.ErrCause() +} diff --git a/pkg/blockbuilder/storage.go b/pkg/blockbuilder/storage.go new file mode 100644 index 000000000000..2815b9b97ad8 --- /dev/null +++ b/pkg/blockbuilder/storage.go @@ -0,0 +1,154 @@ +package blockbuilder + +import ( + "context" + "fmt" + "io" + "sort" + + "github.com/opentracing/opentracing-go" + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" +) + +type MultiStore struct { + stores []*storeEntry + storageConfig storage.Config +} + +type storeEntry struct { + start model.Time + cfg config.PeriodConfig + objectClient client.ObjectClient +} + +var _ client.ObjectClient = (*MultiStore)(nil) + +func NewMultiStore( + periodicConfigs []config.PeriodConfig, + storageConfig storage.Config, + clientMetrics storage.ClientMetrics, +) (*MultiStore, error) { + store := &MultiStore{ + storageConfig: storageConfig, + } + // sort by From time + sort.Slice(periodicConfigs, func(i, j int) bool { + return periodicConfigs[i].From.Time.Before(periodicConfigs[j].From.Time) + }) + for _, periodicConfig := range periodicConfigs { + objectClient, err := storage.NewObjectClient(periodicConfig.ObjectType, "storage-rf1", storageConfig, clientMetrics) + if err != nil { + return nil, fmt.Errorf("creating object client for period %s: %w ", periodicConfig.From, err) + } + store.stores = append(store.stores, &storeEntry{ + start: periodicConfig.From.Time, + cfg: periodicConfig, + objectClient: objectClient, + }) + } + return store, nil +} + +func (m *MultiStore) GetStoreFor(ts model.Time) (client.ObjectClient, error) { + // find the schema with the lowest start _after_ tm + j := sort.Search(len(m.stores), func(j int) bool { + return m.stores[j].start > ts + }) + + // reduce it by 1 because we want a schema with start <= tm + j-- + + if 0 <= j && j < len(m.stores) { + return m.stores[j].objectClient, nil + } + + // should in theory never happen + return nil, fmt.Errorf("no store found for timestamp %s", ts) +} + +func (m *MultiStore) GetAttributes(ctx context.Context, objectKey string) (client.ObjectAttributes, error) { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return client.ObjectAttributes{}, err + } + return s.GetAttributes(ctx, objectKey) +} + +func (m *MultiStore) ObjectExists(ctx context.Context, objectKey string) (bool, error) { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return false, err + } + return s.ObjectExists(ctx, objectKey) +} + +func (m *MultiStore) PutObject(ctx context.Context, objectKey string, object io.Reader) error { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return err + } + return s.PutObject(ctx, objectKey, object) +} + +func (m *MultiStore) GetObject(ctx context.Context, objectKey string) (io.ReadCloser, int64, error) { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return nil, 0, err + } + return s.GetObject(ctx, objectKey) +} + +func (m *MultiStore) GetObjectRange(ctx context.Context, objectKey string, off, length int64) (io.ReadCloser, error) { + sp, _ := opentracing.StartSpanFromContext(ctx, "GetObjectRange") + if sp != nil { + sp.LogKV("objectKey", objectKey, "off", off, "length", length) + } + defer sp.Finish() + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return nil, err + } + return s.GetObjectRange(ctx, objectKey, off, length) +} + +func (m *MultiStore) List(ctx context.Context, prefix string, delimiter string) ([]client.StorageObject, []client.StorageCommonPrefix, error) { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return nil, nil, err + } + return s.List(ctx, prefix, delimiter) +} + +func (m *MultiStore) DeleteObject(ctx context.Context, objectKey string) error { + s, err := m.GetStoreFor(model.Now()) + if err != nil { + return err + } + return s.DeleteObject(ctx, objectKey) +} + +func (m *MultiStore) IsObjectNotFoundErr(err error) bool { + s, _ := m.GetStoreFor(model.Now()) + if s == nil { + return false + } + return s.IsObjectNotFoundErr(err) +} + +func (m *MultiStore) IsRetryableErr(err error) bool { + s, _ := m.GetStoreFor(model.Now()) + if s == nil { + return false + } + return s.IsRetryableErr(err) +} + +func (m *MultiStore) Stop() { + for _, s := range m.stores { + s.objectClient.Stop() + } +} diff --git a/pkg/blockbuilder/storage_test.go b/pkg/blockbuilder/storage_test.go new file mode 100644 index 000000000000..8fc6b237e132 --- /dev/null +++ b/pkg/blockbuilder/storage_test.go @@ -0,0 +1,37 @@ +package blockbuilder + +import ( + "os" + "testing" + + "github.com/prometheus/common/model" + + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" +) + +var metrics *storage.ClientMetrics + +func NewTestStorage(t testing.TB) (*MultiStore, error) { + if metrics == nil { + m := storage.NewClientMetrics() + metrics = &m + } + dir := t.TempDir() + t.Cleanup(func() { + os.RemoveAll(dir) + metrics.Unregister() + }) + cfg := storage.Config{ + FSConfig: local.FSConfig{ + Directory: dir, + }, + } + return NewMultiStore([]config.PeriodConfig{ + { + From: config.DayTime{Time: model.Now()}, + ObjectType: "filesystem", + }, + }, cfg, *metrics) +} diff --git a/pkg/blockbuilder/tsdb.go b/pkg/blockbuilder/tsdb.go new file mode 100644 index 000000000000..8af463fcd27d --- /dev/null +++ b/pkg/blockbuilder/tsdb.go @@ -0,0 +1,328 @@ +package blockbuilder + +import ( + "bytes" + "context" + "fmt" + "io" + "sync" + "time" + + "github.com/cespare/xxhash/v2" + "github.com/go-kit/log/level" + "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" + + "github.com/grafana/loki/v3/pkg/compression" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" +) + +// TsdbCreator accepts writes and builds TSDBs. +type TsdbCreator struct { + // Function to build a TSDB from the current state + + mtx sync.RWMutex + shards int + heads *tenantHeads +} + +// new creates a new HeadManager +func newTsdbCreator() *TsdbCreator { + m := &TsdbCreator{ + shards: 1 << 5, // 32 shards + } + m.reset() + + return m +} + +// reset updates heads +func (m *TsdbCreator) reset() { + m.heads = newTenantHeads(m.shards) +} + +// Append adds a new series for the given user +func (m *TsdbCreator) Append(userID string, ls labels.Labels, fprint uint64, chks index.ChunkMetas) error { + m.mtx.RLock() + defer m.mtx.RUnlock() + + // TODO(owen-d): safe to remove? + // Remove __name__="logs" as it's not needed in TSDB + b := labels.NewBuilder(ls) + b.Del(labels.MetricName) + ls = b.Labels() + + // Just append to heads, no WAL needed + m.heads.Append(userID, ls, fprint, chks) + return nil +} + +type chunkInfo struct { + chunkMetas index.ChunkMetas + tsdbFormat int +} + +type tsdbWithID struct { + bucket model.Time + data []byte + id tsdb.Identifier +} + +// Create builds a TSDB from the current state using the provided mkTsdb function +func (m *TsdbCreator) create(ctx context.Context, nodeName string, tableRanges []config.TableRange) ([]tsdbWithID, error) { + m.mtx.Lock() + defer m.mtx.Unlock() + + type key struct { + bucket model.Time + prefix string + } + periods := make(map[key]*tsdb.Builder) + + if err := m.heads.forAll( + func(user string, ls labels.Labels, fp uint64, chks index.ChunkMetas) error { + // chunks may overlap index period bounds, in which case they're written to multiple + pds := make(map[key]chunkInfo) + for _, chk := range chks { + idxBuckets := tsdb.IndexBuckets(chk.From(), chk.Through(), tableRanges) + + for _, bucket := range idxBuckets { + k := key{ + bucket: bucket.BucketStart, + prefix: bucket.Prefix, + } + chkinfo := pds[k] + chkinfo.chunkMetas = append(chkinfo.chunkMetas, chk) + chkinfo.tsdbFormat = bucket.TsdbFormat + pds[k] = chkinfo + } + } + + // Embed the tenant label into TSDB + lb := labels.NewBuilder(ls) + lb.Set(index.TenantLabel, user) + withTenant := lb.Labels() + + // Add the chunks to all relevant builders + for pd, chkinfo := range pds { + matchingChks := chkinfo.chunkMetas + b, ok := periods[pd] + if !ok { + b = tsdb.NewBuilder(chkinfo.tsdbFormat) + periods[pd] = b + } + + b.AddSeries( + withTenant, + // use the fingerprint without the added tenant label + // so queries route to the chunks which actually exist. + model.Fingerprint(fp), + matchingChks, + ) + } + + return nil + }, + ); err != nil { + level.Error(util_log.Logger).Log("err", err.Error(), "msg", "building TSDB") + return nil, err + } + + now := time.Now() + res := make([]tsdbWithID, 0, len(periods)) + + for p, b := range periods { + + level.Debug(util_log.Logger).Log( + "msg", "building tsdb for period", + "pd", p, + ) + + // build+move tsdb to multitenant dir + start := time.Now() + dst, data, err := b.BuildInMemory( + ctx, + func(_, _ model.Time, _ uint32) tsdb.Identifier { + return tsdb.NewPrefixedIdentifier( + tsdb.MultitenantTSDBIdentifier{ + NodeName: nodeName, + Ts: now, + }, + p.prefix, + "", + ) + }, + ) + + if err != nil { + return nil, err + } + + level.Debug(util_log.Logger).Log( + "msg", "finished building tsdb for period", + "pd", p, + "dst", dst.Path(), + "duration", time.Since(start), + ) + res = append(res, tsdbWithID{ + bucket: p.bucket, + id: dst, + data: data, + }) + } + + m.reset() + return res, nil +} + +// tenantHeads manages per-tenant series +type tenantHeads struct { + shards int + locks []sync.RWMutex + tenants []map[string]*Head +} + +func newTenantHeads(shards int) *tenantHeads { + t := &tenantHeads{ + shards: shards, + locks: make([]sync.RWMutex, shards), + tenants: make([]map[string]*Head, shards), + } + for i := range t.tenants { + t.tenants[i] = make(map[string]*Head) + } + return t +} + +func (t *tenantHeads) Append(userID string, ls labels.Labels, fprint uint64, chks index.ChunkMetas) { + head := t.getOrCreateTenantHead(userID) + head.Append(ls, fprint, chks) +} + +func (t *tenantHeads) getOrCreateTenantHead(userID string) *Head { + idx := t.shardForTenant(userID) + mtx := &t.locks[idx] + + // Fast path: return existing head + mtx.RLock() + head, ok := t.tenants[idx][userID] + mtx.RUnlock() + if ok { + return head + } + + // Slow path: create new head + mtx.Lock() + defer mtx.Unlock() + + head, ok = t.tenants[idx][userID] + if !ok { + head = NewHead(userID) + t.tenants[idx][userID] = head + } + return head +} + +func (t *tenantHeads) shardForTenant(userID string) uint64 { + return xxhash.Sum64String(userID) & uint64(t.shards-1) +} + +// forAll iterates through all series in all tenant heads +func (t *tenantHeads) forAll(fn func(user string, ls labels.Labels, fp uint64, chks index.ChunkMetas) error) error { + for i, shard := range t.tenants { + t.locks[i].RLock() + defer t.locks[i].RUnlock() + + for user, tenant := range shard { + if err := tenant.forAll(func(ls labels.Labels, fp uint64, chks index.ChunkMetas) error { + return fn(user, ls, fp, chks) + }); err != nil { + return err + } + } + } + return nil +} + +// Head manages series for a single tenant +type Head struct { + userID string + series map[uint64]*series + mtx sync.RWMutex +} + +type series struct { + labels labels.Labels + chks []index.ChunkMeta +} + +func NewHead(userID string) *Head { + return &Head{ + userID: userID, + series: make(map[uint64]*series), + } +} + +func (h *Head) Append(ls labels.Labels, fp uint64, chks index.ChunkMetas) { + h.mtx.Lock() + defer h.mtx.Unlock() + + s, ok := h.series[fp] + if !ok { + s = &series{labels: ls} + h.series[fp] = s + } + s.chks = append(s.chks, chks...) +} + +func (h *Head) forAll(fn func(ls labels.Labels, fp uint64, chks index.ChunkMetas) error) error { + h.mtx.RLock() + defer h.mtx.RUnlock() + + for fp, s := range h.series { + if err := fn(s.labels, fp, s.chks); err != nil { + return err + } + } + return nil +} + +type uploader struct { + store *MultiStore +} + +func newUploader(store *MultiStore) *uploader { + return &uploader{store: store} +} + +func (u *uploader) Put(ctx context.Context, db tsdbWithID) error { + client, err := u.store.GetStoreFor(db.bucket) + if err != nil { + return err + } + + reader := bytes.NewReader(db.data) + gzipPool := compression.GetWriterPool(compression.GZIP) + buf := bytes.NewBuffer(make([]byte, 0, 1<<20)) + compressedWriter := gzipPool.GetWriter(buf) + defer gzipPool.PutWriter(compressedWriter) + + _, err = io.Copy(compressedWriter, reader) + if err != nil { + return err + } + + err = compressedWriter.Close() + if err != nil { + return err + } + + return client.PutObject(ctx, db.id.Path(), buf) +} + +func buildFileName(indexName string) string { + return fmt.Sprintf("%s.gz", indexName) +} diff --git a/pkg/bloombuild/builder/builder.go b/pkg/bloombuild/builder/builder.go index 1b0a81a24725..b5bb682e8efc 100644 --- a/pkg/bloombuild/builder/builder.go +++ b/pkg/bloombuild/builder/builder.go @@ -23,6 +23,7 @@ import ( "github.com/grafana/loki/v3/pkg/bloombuild/common" "github.com/grafana/loki/v3/pkg/bloombuild/protos" + "github.com/grafana/loki/v3/pkg/bloomgateway" "github.com/grafana/loki/v3/pkg/compression" iter "github.com/grafana/loki/v3/pkg/iter/v2" "github.com/grafana/loki/v3/pkg/storage" @@ -48,8 +49,9 @@ type Builder struct { metrics *Metrics logger log.Logger - bloomStore bloomshipper.Store - chunkLoader ChunkLoader + bloomStore bloomshipper.Store + chunkLoader ChunkLoader + bloomGateway bloomgateway.Client client protos.PlannerForBuilderClient @@ -66,6 +68,7 @@ func New( _ storage.ClientMetrics, fetcherProvider stores.ChunkFetcherProvider, bloomStore bloomshipper.Store, + bloomGateway bloomgateway.Client, logger log.Logger, r prometheus.Registerer, rm *ring.RingManager, @@ -77,13 +80,14 @@ func New( metrics := NewMetrics(r) b := &Builder{ - ID: builderID, - cfg: cfg, - limits: limits, - metrics: metrics, - bloomStore: bloomStore, - chunkLoader: NewStoreChunkLoader(fetcherProvider, metrics), - logger: logger, + ID: builderID, + cfg: cfg, + limits: limits, + metrics: metrics, + bloomStore: bloomStore, + chunkLoader: NewStoreChunkLoader(fetcherProvider, metrics), + bloomGateway: bloomGateway, + logger: logger, } if rm != nil { @@ -368,6 +372,12 @@ func (b *Builder) processTask( logger := task.GetLogger(b.logger) level.Debug(logger).Log("msg", "task started") + if timeout := b.limits.BuilderResponseTimeout(task.Tenant); timeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithDeadline(ctx, time.Now().Add(timeout)) + defer cancel() + } + client, err := b.bloomStore.Client(task.Table.ModelTime()) if err != nil { level.Error(logger).Log("msg", "failed to get client", "err", err) @@ -390,6 +400,10 @@ func (b *Builder) processTask( ) for i := range task.Gaps { + if ctx.Err() != nil { + return nil, ctx.Err() + } + gap := task.Gaps[i] logger := log.With(logger, "gap", gap.Bounds.String()) @@ -509,6 +523,13 @@ func (b *Builder) processTask( b.metrics.metasCreated.Inc() level.Debug(logger).Log("msg", "uploaded meta") created = append(created, meta) + + // Now that the meta is written thus blocks can be queried, we prefetch them to the gateway + if b.bloomGateway != nil && b.limits.PrefetchBloomBlocks(tenant) { + if err := b.bloomGateway.PrefetchBloomBlocks(ctx, meta.Blocks); err != nil { + level.Error(logger).Log("msg", "failed to prefetch block on gateway", "err", err) + } + } } b.metrics.seriesPerTask.Observe(float64(totalSeries)) diff --git a/pkg/bloombuild/builder/builder_test.go b/pkg/bloombuild/builder/builder_test.go index 6197c6209974..7fdbf44ffdb8 100644 --- a/pkg/bloombuild/builder/builder_test.go +++ b/pkg/bloombuild/builder/builder_test.go @@ -18,6 +18,7 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" + "github.com/grafana/loki/v3/pkg/bloombuild/planner/plannertest" "github.com/grafana/loki/v3/pkg/bloombuild/protos" "github.com/grafana/loki/v3/pkg/storage" v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" @@ -28,10 +29,7 @@ import ( "github.com/grafana/loki/v3/pkg/storage/types" ) -func Test_BuilderLoop(t *testing.T) { - logger := log.NewNopLogger() - //logger := log.NewLogfmtLogger(os.Stdout) - +func setupBuilder(t *testing.T, plannerAddr string, limits Limits, logger log.Logger) *Builder { schemaCfg := config.SchemaConfig{ Configs: []config.PeriodConfig{ { @@ -64,22 +62,8 @@ func Test_BuilderLoop(t *testing.T) { }, } - tasks := make([]*protos.ProtoTask, 256) - for i := range tasks { - tasks[i] = &protos.ProtoTask{ - Id: fmt.Sprintf("task-%d", i), - } - } - - server, err := newFakePlannerServer(tasks) - require.NoError(t, err) - - // Start the server so the builder can connect and receive tasks. - server.Start() - - limits := fakeLimits{} cfg := Config{ - PlannerAddress: server.Addr(), + PlannerAddress: plannerAddr, BackoffConfig: backoff.Config{ MinBackoff: 1 * time.Second, MaxBackoff: 10 * time.Second, @@ -88,8 +72,48 @@ func Test_BuilderLoop(t *testing.T) { } flagext.DefaultValues(&cfg.GrpcConfig) - builder, err := New(cfg, limits, schemaCfg, storageCfg, storage.NewClientMetrics(), nil, fakeBloomStore{}, logger, prometheus.DefaultRegisterer, nil) + metrics := storage.NewClientMetrics() + metrics.Unregister() + + builder, err := New(cfg, limits, schemaCfg, storageCfg, metrics, nil, fakeBloomStore{}, nil, logger, prometheus.NewPedanticRegistry(), nil) + require.NoError(t, err) + + return builder +} + +func createTasks(n int) []*protos.ProtoTask { + tasks := make([]*protos.ProtoTask, n) + for i := range tasks { + tasks[i] = protos.NewTask( + plannertest.TestTable, + "fake", + v1.NewBounds(model.Fingerprint(i), model.Fingerprint(i+10)), + plannertest.TsdbID(1), + []protos.Gap{ + { + Bounds: v1.NewBounds(model.Fingerprint(i+1), model.Fingerprint(i+2)), + }, + { + Bounds: v1.NewBounds(model.Fingerprint(i+3), model.Fingerprint(i+9)), + }, + }, + ).ToProtoTask() + } + return tasks +} + +func Test_BuilderLoop(t *testing.T) { + logger := log.NewNopLogger() + //logger := log.NewLogfmtLogger(os.Stdout) + + tasks := createTasks(256) + server, err := newFakePlannerServer(tasks) require.NoError(t, err) + + // Start the server so the builder can connect and receive tasks. + server.Start() + + builder := setupBuilder(t, server.Addr(), fakeLimits{}, logger) t.Cleanup(func() { err = services.StopAndAwaitTerminated(context.Background(), builder) require.NoError(t, err) @@ -128,9 +152,71 @@ func Test_BuilderLoop(t *testing.T) { require.True(t, server.shutdownCalled) } +func Test_BuilderLoop_Timeout(t *testing.T) { + for _, tc := range []struct { + name string + timeout time.Duration + allTasksSucceed bool + }{ + { + name: "no timeout configured", + timeout: 0, + allTasksSucceed: true, + }, + { + name: "long enough timeout", + timeout: 15 * time.Minute, + allTasksSucceed: true, + }, + { + name: "task times out", + timeout: 1 * time.Nanosecond, // Pretty much immediately. + allTasksSucceed: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + logger := log.NewNopLogger() + //logger := log.NewLogfmtLogger(os.Stdout) + + tasks := createTasks(256) + server, err := newFakePlannerServer(tasks) + require.NoError(t, err) + + // Start the server so the builder can connect and receive tasks. + server.Start() + + limits := fakeLimits{ + taskTimout: tc.timeout, + } + builder := setupBuilder(t, server.Addr(), limits, logger) + t.Cleanup(func() { + err = services.StopAndAwaitTerminated(context.Background(), builder) + require.NoError(t, err) + + server.Stop() + }) + + err = services.StartAndAwaitRunning(context.Background(), builder) + require.NoError(t, err) + + require.Eventually(t, func() bool { + return server.CompletedTasks() >= len(tasks) + }, 30*time.Second, 500*time.Millisecond) + + erroredTasks := server.ErroredTasks() + if tc.allTasksSucceed { + require.Equal(t, 0, erroredTasks) + } else { + require.Equal(t, len(tasks), erroredTasks) + } + }) + } +} + type fakePlannerServer struct { tasks []*protos.ProtoTask completedTasks atomic.Int64 + erroredTasks atomic.Int64 shutdownCalled bool listenAddr string @@ -198,11 +284,18 @@ func (f *fakePlannerServer) BuilderLoop(srv protos.PlannerForBuilder_BuilderLoop if err := srv.Send(&protos.PlannerToBuilder{Task: task}); err != nil { return fmt.Errorf("failed to send task: %w", err) } - if _, err := srv.Recv(); err != nil { + + result, err := srv.Recv() + if err != nil { return fmt.Errorf("failed to receive task response: %w", err) } - time.Sleep(10 * time.Millisecond) // Simulate task processing time to add some latency. + f.completedTasks.Inc() + if result.Result.Error != "" { + f.erroredTasks.Inc() + } + + time.Sleep(10 * time.Millisecond) // Simulate task processing time to add some latency. } // No more tasks. Wait until shutdown. @@ -214,32 +307,36 @@ func (f *fakePlannerServer) CompletedTasks() int { return int(f.completedTasks.Load()) } +func (f *fakePlannerServer) ErroredTasks() int { + return int(f.erroredTasks.Load()) +} + func (f *fakePlannerServer) NotifyBuilderShutdown(_ context.Context, _ *protos.NotifyBuilderShutdownRequest) (*protos.NotifyBuilderShutdownResponse, error) { f.shutdownCalled = true return &protos.NotifyBuilderShutdownResponse{}, nil } type fakeLimits struct { + Limits + taskTimout time.Duration } -func (f fakeLimits) BloomBlockEncoding(_ string) string { - panic("implement me") -} - -func (f fakeLimits) BloomNGramLength(_ string) int { - panic("implement me") -} +var _ Limits = fakeLimits{} -func (f fakeLimits) BloomNGramSkip(_ string) int { - panic("implement me") +func (f fakeLimits) BloomBlockEncoding(_ string) string { + return "none" } func (f fakeLimits) BloomMaxBlockSize(_ string) int { - panic("implement me") + return 0 } func (f fakeLimits) BloomMaxBloomSize(_ string) int { - panic("implement me") + return 0 +} + +func (f fakeLimits) BuilderResponseTimeout(_ string) time.Duration { + return f.taskTimout } type fakeBloomStore struct { @@ -250,6 +347,26 @@ func (f fakeBloomStore) BloomMetrics() *v1.Metrics { return nil } +func (f fakeBloomStore) Client(_ model.Time) (bloomshipper.Client, error) { + return fakeBloomClient{}, nil +} + +func (f fakeBloomStore) Fetcher(_ model.Time) (*bloomshipper.Fetcher, error) { + return &bloomshipper.Fetcher{}, nil +} + +type fakeBloomClient struct { + bloomshipper.Client +} + +func (f fakeBloomClient) PutBlock(_ context.Context, _ bloomshipper.Block) error { + return nil +} + +func (f fakeBloomClient) PutMeta(_ context.Context, _ bloomshipper.Meta) error { + return nil +} + func parseDayTime(s string) config.DayTime { t, err := time.Parse("2006-01-02", s) if err != nil { diff --git a/pkg/bloombuild/builder/config.go b/pkg/bloombuild/builder/config.go index dcb44c55b5f3..026a5b88d7b7 100644 --- a/pkg/bloombuild/builder/config.go +++ b/pkg/bloombuild/builder/config.go @@ -3,6 +3,7 @@ package builder import ( "flag" "fmt" + "time" "github.com/grafana/dskit/backoff" "github.com/grafana/dskit/grpcclient" @@ -40,4 +41,6 @@ type Limits interface { BloomBlockEncoding(tenantID string) string BloomMaxBlockSize(tenantID string) int BloomMaxBloomSize(tenantID string) int + BuilderResponseTimeout(tenantID string) time.Duration + PrefetchBloomBlocks(tenantID string) bool } diff --git a/pkg/bloombuild/builder/metrics.go b/pkg/bloombuild/builder/metrics.go index 8d4545c00bf5..c5d45b69fc4f 100644 --- a/pkg/bloombuild/builder/metrics.go +++ b/pkg/bloombuild/builder/metrics.go @@ -3,10 +3,11 @@ package builder import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( - metricsNamespace = "loki" metricsSubsystem = "bloombuilder" statusSuccess = "success" @@ -34,32 +35,32 @@ type Metrics struct { func NewMetrics(r prometheus.Registerer) *Metrics { return &Metrics{ running: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "running", Help: "Value will be 1 if the bloom builder is currently running on this instance", }), processingTask: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "processing_task", Help: "Value will be 1 if the bloom builder is currently processing a task", }), taskStarted: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "task_started_total", Help: "Total number of task started", }), taskCompleted: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "task_completed_total", Help: "Total number of task completed", }, []string{"status"}), taskDuration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "task_duration_seconds", Help: "Time spent processing a task.", @@ -73,26 +74,26 @@ func NewMetrics(r prometheus.Registerer) *Metrics { }, []string{"status"}), blocksReused: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "blocks_reused_total", Help: "Number of overlapping bloom blocks reused when creating new blocks", }), blocksCreated: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "blocks_created_total", Help: "Number of blocks created", }), metasCreated: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "metas_created_total", Help: "Number of metas created", }), seriesPerTask: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "series_per_task", Help: "Number of series during task processing. Includes series which copied from other blocks and don't need to be indexed", @@ -100,7 +101,7 @@ func NewMetrics(r prometheus.Registerer) *Metrics { Buckets: prometheus.ExponentialBucketsRange(1, 10e6, 10), }), bytesPerTask: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "bytes_per_task", Help: "Number of source bytes from chunks added during a task processing.", @@ -109,7 +110,7 @@ func NewMetrics(r prometheus.Registerer) *Metrics { }), chunkSize: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "chunk_series_size", Help: "Uncompressed size of chunks in a series", diff --git a/pkg/bloombuild/planner/metrics.go b/pkg/bloombuild/planner/metrics.go index 936515ad736f..fd9efad2716b 100644 --- a/pkg/bloombuild/planner/metrics.go +++ b/pkg/bloombuild/planner/metrics.go @@ -5,10 +5,11 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( - metricsNamespace = "loki" metricsSubsystem = "bloomplanner" statusSuccess = "success" @@ -40,6 +41,7 @@ type Metrics struct { tenantsDiscovered prometheus.Counter tenantTasksPlanned *prometheus.GaugeVec tenantTasksCompleted *prometheus.GaugeVec + tenantTasksTiming *prometheus.HistogramVec // Retention metrics retentionRunning prometheus.Gauge @@ -55,26 +57,26 @@ func NewMetrics( ) *Metrics { return &Metrics{ running: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "running", Help: "Value will be 1 if bloom planner is currently running on this instance", }), connectedBuilders: promauto.With(r).NewGaugeFunc(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "connected_builders", Help: "Number of builders currently connected to the planner.", }, getConnectedBuilders), queueDuration: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "queue_duration_seconds", Help: "Time spend by tasks in queue before getting picked up by a builder.", Buckets: prometheus.DefBuckets, }), inflightRequests: promauto.With(r).NewSummary(prometheus.SummaryOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "inflight_tasks", Help: "Number of inflight tasks (either queued or processing) sampled at a regular interval. Quantile buckets keep track of inflight tasks over the last 60s.", @@ -83,20 +85,20 @@ func NewMetrics( AgeBuckets: 6, }), tasksRequeued: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "tasks_requeued_total", Help: "Total number of tasks requeued due to not being picked up by a builder.", }), taskLost: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "tasks_lost_total", Help: "Total number of tasks lost due to not being picked up by a builder and failed to be requeued.", }), planningTime: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "planning_time_seconds", Help: "Time spent planning a build cycle.", @@ -104,19 +106,19 @@ func NewMetrics( Buckets: prometheus.LinearBuckets(1, 60, 60), }), buildStarted: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "build_started_total", Help: "Total number of builds started", }), buildCompleted: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "build_completed_total", Help: "Total number of builds completed", }, []string{"status"}), buildTime: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "build_time_seconds", Help: "Time spent during a builds cycle.", @@ -129,54 +131,62 @@ func NewMetrics( ), }, []string{"status"}), buildLastSuccess: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "build_last_successful_run_timestamp_seconds", Help: "Unix timestamp of the last successful build cycle.", }), blocksDeleted: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "blocks_deleted_total", Help: "Number of blocks deleted", }, []string{"phase"}), metasDeleted: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "metas_deleted_total", Help: "Number of metas deleted", }, []string{"phase"}), tenantsDiscovered: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "tenants_discovered_total", Help: "Number of tenants discovered during the current build iteration", }), tenantTasksPlanned: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "tenant_tasks_planned", Help: "Number of tasks planned for a tenant during the current build iteration.", }, []string{"tenant"}), tenantTasksCompleted: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "tenant_tasks_completed", Help: "Number of tasks completed for a tenant during the current build iteration.", }, []string{"tenant", "status"}), + tenantTasksTiming: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: metricsSubsystem, + Name: "tenant_tasks_time_seconds", + Help: "Time spent building tasks for a tenant during the current build iteration.", + // 1s --> 1h (steps of 1 minute) + Buckets: prometheus.LinearBuckets(1, 60, 60), + }, []string{"tenant", "status"}), // Retention retentionRunning: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "retention_running", Help: "1 if retention is running in this compactor.", }), retentionTime: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "retention_time_seconds", Help: "Time this retention process took to complete.", @@ -184,7 +194,7 @@ func NewMetrics( }, []string{"status"}), retentionDaysPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "retention_days_processed", Help: "Number of days iterated over during the retention process.", @@ -193,7 +203,7 @@ func NewMetrics( }, []string{"status"}), retentionTenantsPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "retention_tenants_processed", Help: "Number of tenants on which retention was applied during the retention process.", @@ -202,7 +212,7 @@ func NewMetrics( }, []string{"status"}), retentionTenantsExceedingLookback: promauto.With(r).NewGauge(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: metricsSubsystem, Name: "retention_tenants_exceeding_lookback", Help: "Number of tenants with a retention exceeding the configured retention lookback.", diff --git a/pkg/bloombuild/planner/planner.go b/pkg/bloombuild/planner/planner.go index 9ef59a9b7f85..4ec2aec389e5 100644 --- a/pkg/bloombuild/planner/planner.go +++ b/pkg/bloombuild/planner/planner.go @@ -26,6 +26,7 @@ import ( "github.com/grafana/loki/v3/pkg/storage/config" "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util/constants" utillog "github.com/grafana/loki/v3/pkg/util/log" "github.com/grafana/loki/v3/pkg/util/ring" ) @@ -49,7 +50,8 @@ type Planner struct { tsdbStore common.TSDBStore bloomStore bloomshipper.StoreBase - tasksQueue *queue.Queue + tasksQueue *queue.Queue + planFactory *strategies.Factory metrics *Metrics logger log.Logger @@ -78,7 +80,7 @@ func New( } // Queue to manage tasks - queueMetrics := queue.NewMetrics(r, metricsNamespace, metricsSubsystem) + queueMetrics := queue.NewMetrics(r, constants.Loki, metricsSubsystem) queueLimits := NewQueueLimits(limits) tasksQueue, err := queue.NewQueue(logger, cfg.Queue, queueLimits, queueMetrics, storageMetrics) if err != nil { @@ -86,14 +88,15 @@ func New( } p := &Planner{ - cfg: cfg, - limits: limits, - schemaCfg: schemaCfg, - tsdbStore: tsdbStore, - bloomStore: bloomStore, - tasksQueue: tasksQueue, - metrics: NewMetrics(r, tasksQueue.GetConnectedConsumersMetric), - logger: logger, + cfg: cfg, + limits: limits, + schemaCfg: schemaCfg, + tsdbStore: tsdbStore, + bloomStore: bloomStore, + tasksQueue: tasksQueue, + planFactory: strategies.NewFactory(limits, strategies.NewMetrics(r), logger), + metrics: NewMetrics(r, tasksQueue.GetConnectedConsumersMetric), + logger: logger, } p.retentionManager = NewRetentionManager( @@ -370,7 +373,7 @@ func (p *Planner) computeTasks( table config.DayTable, tenant string, ) ([]*protos.Task, []bloomshipper.Meta, error) { - strategy, err := strategies.NewStrategy(tenant, p.limits, p.logger) + strategy, err := p.planFactory.GetStrategy(tenant) if err != nil { return nil, nil, fmt.Errorf("error creating strategy: %w", err) } @@ -770,8 +773,10 @@ func (p *Planner) BuilderLoop(builder protos.PlannerForBuilder_BuilderLoopServer continue } + startTime := time.Now() result, err := p.forwardTaskToBuilder(builder, builderID, task) if err != nil { + p.metrics.tenantTasksTiming.WithLabelValues(task.Tenant, statusFailure).Observe(time.Since(startTime).Seconds()) maxRetries := p.limits.BloomTaskMaxRetries(task.Tenant) if maxRetries > 0 && int(task.timesEnqueued.Load()) >= maxRetries { p.tasksQueue.Release(task.ProtoTask) @@ -811,10 +816,12 @@ func (p *Planner) BuilderLoop(builder protos.PlannerForBuilder_BuilderLoopServer level.Debug(logger).Log( "msg", "task completed", - "duration", time.Since(task.queueTime).Seconds(), + "timeSinceEnqueued", time.Since(task.queueTime).Seconds(), + "buildTime", time.Since(startTime).Seconds(), "retries", task.timesEnqueued.Load()-1, // -1 because the first enqueue is not a retry ) p.tasksQueue.Release(task.ProtoTask) + p.metrics.tenantTasksTiming.WithLabelValues(task.Tenant, statusSuccess).Observe(time.Since(startTime).Seconds()) // Send the result back to the task. The channel is buffered, so this should not block. task.resultsChannel <- result @@ -866,7 +873,7 @@ func (p *Planner) forwardTaskToBuilder( case err := <-errCh: return nil, err case <-timeout: - return nil, fmt.Errorf("timeout waiting for response from builder (%s)", builderID) + return nil, fmt.Errorf("timeout (%s) waiting for response from builder (%s)", taskTimeout, builderID) } } diff --git a/pkg/bloombuild/planner/strategies/chunksize.go b/pkg/bloombuild/planner/strategies/chunksize.go index 456183aa62ef..b21c90b6f2e2 100644 --- a/pkg/bloombuild/planner/strategies/chunksize.go +++ b/pkg/bloombuild/planner/strategies/chunksize.go @@ -9,6 +9,8 @@ import ( "github.com/dustin/go-humanize" "github.com/go-kit/log" "github.com/go-kit/log/level" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" @@ -19,24 +21,49 @@ import ( "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/constants" ) +const ( + metricsSubsystem = "bloomplanner" +) + +type ChunkSizeStrategyMetrics struct { + tenantTaskSize *prometheus.HistogramVec +} + +func NewChunkSizeStrategyMetrics(r prometheus.Registerer) *ChunkSizeStrategyMetrics { + return &ChunkSizeStrategyMetrics{ + tenantTaskSize: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: metricsSubsystem, + Name: "tenant_task_size_bytes", + Help: "Size of tasks generated by the chunk size strategy", + // 1GB --> 512GB + Buckets: prometheus.ExponentialBuckets(1e9, 2, 10), + }, []string{"tenant"}), + } +} + type ChunkSizeStrategyLimits interface { BloomTaskTargetSeriesChunksSizeBytes(tenantID string) uint64 } type ChunkSizeStrategy struct { - limits ChunkSizeStrategyLimits - logger log.Logger + limits ChunkSizeStrategyLimits + metrics *ChunkSizeStrategyMetrics + logger log.Logger } func NewChunkSizeStrategy( limits ChunkSizeStrategyLimits, + metrics *ChunkSizeStrategyMetrics, logger log.Logger, ) (*ChunkSizeStrategy, error) { return &ChunkSizeStrategy{ - limits: limits, - logger: logger, + limits: limits, + metrics: metrics, + logger: logger, }, nil } @@ -82,8 +109,9 @@ func (s *ChunkSizeStrategy) Plan( continue } - bounds := series.Bounds() + s.metrics.tenantTaskSize.WithLabelValues(tenant).Observe(float64(series.Size())) + bounds := series.Bounds() blocks, err := getBlocksMatchingBounds(metas, bounds) if err != nil { return nil, fmt.Errorf("failed to get blocks matching bounds: %w", err) diff --git a/pkg/bloombuild/planner/strategies/chunksize_test.go b/pkg/bloombuild/planner/strategies/chunksize_test.go index 951d033e5c10..c59e13fdfdd1 100644 --- a/pkg/bloombuild/planner/strategies/chunksize_test.go +++ b/pkg/bloombuild/planner/strategies/chunksize_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/go-kit/log" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" "github.com/grafana/loki/v3/pkg/bloombuild/planner/plannertest" @@ -228,7 +229,7 @@ func Test_ChunkSizeStrategy_Plan(t *testing.T) { logger := log.NewNopLogger() //logger := log.NewLogfmtLogger(os.Stdout) - strategy, err := NewChunkSizeStrategy(tc.limits, logger) + strategy, err := NewChunkSizeStrategy(tc.limits, NewChunkSizeStrategyMetrics(prometheus.NewPedanticRegistry()), logger) require.NoError(t, err) actual, err := strategy.Plan(context.Background(), plannertest.TestTable, "fake", tc.tsdbs, tc.originalMetas) diff --git a/pkg/bloombuild/planner/strategies/factory.go b/pkg/bloombuild/planner/strategies/factory.go index f58f91e51708..d48d4e7f59f8 100644 --- a/pkg/bloombuild/planner/strategies/factory.go +++ b/pkg/bloombuild/planner/strategies/factory.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/go-kit/log" + "github.com/prometheus/client_golang/prometheus" "github.com/grafana/loki/v3/pkg/bloombuild/common" "github.com/grafana/loki/v3/pkg/bloombuild/protos" @@ -32,18 +33,42 @@ type PlanningStrategy interface { Plan(ctx context.Context, table config.DayTable, tenant string, tsdbs TSDBSet, metas []bloomshipper.Meta) ([]*protos.Task, error) } -func NewStrategy( - tenantID string, +type Metrics struct { + *ChunkSizeStrategyMetrics +} + +func NewMetrics(reg prometheus.Registerer) *Metrics { + return &Metrics{ + ChunkSizeStrategyMetrics: NewChunkSizeStrategyMetrics(reg), + } +} + +type Factory struct { + limits Limits + logger log.Logger + metrics *Metrics +} + +func NewFactory( limits Limits, + metrics *Metrics, logger log.Logger, -) (PlanningStrategy, error) { - strategy := limits.BloomPlanningStrategy(tenantID) +) *Factory { + return &Factory{ + limits: limits, + logger: logger, + metrics: metrics, + } +} + +func (f *Factory) GetStrategy(tenantID string) (PlanningStrategy, error) { + strategy := f.limits.BloomPlanningStrategy(tenantID) switch strategy { case SplitKeyspaceStrategyName: - return NewSplitKeyspaceStrategy(limits, logger) + return NewSplitKeyspaceStrategy(f.limits, f.logger) case SplitBySeriesChunkSizeStrategyName: - return NewChunkSizeStrategy(limits, logger) + return NewChunkSizeStrategy(f.limits, f.metrics.ChunkSizeStrategyMetrics, f.logger) default: return nil, fmt.Errorf("unknown bloom planning strategy (%s)", strategy) } diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 30624f9d3245..4242118e225c 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -161,6 +161,43 @@ func (g *Gateway) stopping(_ error) error { return services.StopManagerAndAwaitStopped(context.Background(), g.serviceMngr) } +func (g *Gateway) PrefetchBloomBlocks(_ context.Context, req *logproto.PrefetchBloomBlocksRequest) (*logproto.PrefetchBloomBlocksResponse, error) { + refs, err := decodeBlockKeys(req.Blocks) + if err != nil { + return nil, err + } + + bqs, err := g.bloomStore.FetchBlocks( + // We don't use the ctx passed to the handler since its canceled when the handler returns + context.Background(), + refs, + bloomshipper.WithFetchAsync(true), + bloomshipper.WithIgnoreNotFound(true), + bloomshipper.WithCacheGetOptions( + bloomshipper.WithSkipHitMissMetrics(true), + ), + ) + if err != nil { + g.metrics.prefetchedBlocks.WithLabelValues(typeError).Add(float64(len(refs))) + return nil, err + } + + for _, bq := range bqs { + if bq == nil { + // This is the expected case: the blocks is not yet downloaded and the block querier is nil + continue + } + + // Close any block querier that were already downloaded + if err := bq.Close(); err != nil { + level.Warn(g.logger).Log("msg", "failed to close block querier", "err", err) + } + } + + g.metrics.prefetchedBlocks.WithLabelValues(typeSuccess).Add(float64(len(refs))) + return &logproto.PrefetchBloomBlocksResponse{}, err +} + // FilterChunkRefs implements BloomGatewayServer func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunkRefRequest) (*logproto.FilterChunkRefResponse, error) { tenantID, err := tenant.TenantID(ctx) @@ -204,14 +241,10 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk return &logproto.FilterChunkRefResponse{ChunkRefs: req.Refs}, nil } - blocks := make([]bloomshipper.BlockRef, 0, len(req.Blocks)) - for _, key := range req.Blocks { - block, err := bloomshipper.BlockRefFromKey(key) - if err != nil { - stats.Status = labelFailure - return nil, errors.New("could not parse block key") - } - blocks = append(blocks, block) + blocks, err := decodeBlockKeys(req.Blocks) + if err != nil { + stats.Status = labelFailure + return nil, err } // Shortcut if request does not contain blocks @@ -470,3 +503,15 @@ func filterChunkRefsForSeries(cur *logproto.GroupedChunkRefs, removals v1.ChunkR cur.Refs = cur.Refs[:len(res)] } + +func decodeBlockKeys(keys []string) ([]bloomshipper.BlockRef, error) { + blocks := make([]bloomshipper.BlockRef, 0, len(keys)) + for _, key := range keys { + block, err := bloomshipper.BlockRefFromKey(key) + if err != nil { + return nil, errors.New("could not parse block key") + } + blocks = append(blocks, block) + } + return blocks, nil +} diff --git a/pkg/bloomgateway/cache.go b/pkg/bloomgateway/cache.go index 77d5168ca303..acfe951ffa36 100644 --- a/pkg/bloomgateway/cache.go +++ b/pkg/bloomgateway/cache.go @@ -113,6 +113,7 @@ func (m merger) MergeResponse(responses ...resultscache.Response) (resultscache. type ClientCache struct { cache *resultscache.ResultsCache + next logproto.BloomGatewayClient limits CacheLimits logger log.Logger } @@ -149,12 +150,19 @@ func NewBloomGatewayClientCacheMiddleware( ) return &ClientCache{ + next: next, cache: resultsCache, limits: limits, logger: logger, } } +// PrefetchBloomBlocks implements logproto.BloomGatewayClient. +func (c *ClientCache) PrefetchBloomBlocks(ctx context.Context, in *logproto.PrefetchBloomBlocksRequest, opts ...grpc.CallOption) (*logproto.PrefetchBloomBlocksResponse, error) { + return c.next.PrefetchBloomBlocks(ctx, in, opts...) +} + +// FilterChunkRefs implements logproto.BloomGatewayClient. func (c *ClientCache) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunkRefRequest, opts ...grpc.CallOption) (*logproto.FilterChunkRefResponse, error) { cacheReq := requestWithGrpcCallOptions{ FilterChunkRefRequest: req, diff --git a/pkg/bloomgateway/cache_test.go b/pkg/bloomgateway/cache_test.go index f85d366cc5aa..d0a524b4907d 100644 --- a/pkg/bloomgateway/cache_test.go +++ b/pkg/bloomgateway/cache_test.go @@ -468,6 +468,8 @@ type mockServer struct { res *logproto.FilterChunkRefResponse } +var _ logproto.BloomGatewayClient = &mockServer{} + func newMockServer(res *logproto.FilterChunkRefResponse) (*mockServer, *int) { var calls int return &mockServer{ @@ -480,11 +482,17 @@ func (s *mockServer) SetResponse(res *logproto.FilterChunkRefResponse) { s.res = res } +// FilterChunkRefs implements logproto.BloomGatewayClient. func (s *mockServer) FilterChunkRefs(_ context.Context, _ *logproto.FilterChunkRefRequest, _ ...grpc.CallOption) (*logproto.FilterChunkRefResponse, error) { *s.calls++ return s.res, nil } +// PrefetchBloomBlocks implements logproto.BloomGatewayClient. +func (s *mockServer) PrefetchBloomBlocks(_ context.Context, _ *logproto.PrefetchBloomBlocksRequest, _ ...grpc.CallOption) (*logproto.PrefetchBloomBlocksResponse, error) { + panic("unimplemented") +} + type mockLimits struct { cacheFreshness time.Duration cacheInterval time.Duration diff --git a/pkg/bloomgateway/client.go b/pkg/bloomgateway/client.go index 23f7026d0070..b085b849488e 100644 --- a/pkg/bloomgateway/client.go +++ b/pkg/bloomgateway/client.go @@ -116,6 +116,7 @@ func (i *ClientConfig) Validate() error { type Client interface { FilterChunks(ctx context.Context, tenant string, interval bloomshipper.Interval, blocks []blockWithSeries, plan plan.QueryPlan) ([]*logproto.GroupedChunkRefs, error) + PrefetchBloomBlocks(ctx context.Context, blocks []bloomshipper.BlockRef) error } // clientPool is a minimal interface that is satisfied by the JumpHashClientPool. @@ -204,6 +205,47 @@ func (c *GatewayClient) Close() { c.dnsProvider.Stop() } +func (c *GatewayClient) PrefetchBloomBlocks(ctx context.Context, blocks []bloomshipper.BlockRef) error { + if len(blocks) == 0 { + return nil + } + + pos := make(map[string]int) + servers := make([]addrWithBlocks, 0, len(blocks)) + for _, block := range blocks { + addr, err := c.pool.Addr(block.String()) + if err != nil { + level.Error(c.logger).Log("msg", "failed to resolve server address for block", "block", block, "err", err) + continue + } + + if idx, found := pos[addr]; found { + servers[idx].blocks = append(servers[idx].blocks, block.String()) + } else { + pos[addr] = len(servers) + servers = append(servers, addrWithBlocks{ + addr: addr, + blocks: []string{block.String()}, + }) + } + } + + return concurrency.ForEachJob(ctx, len(servers), len(servers), func(ctx context.Context, i int) error { + rs := servers[i] + return c.doForAddrs([]string{rs.addr}, func(client logproto.BloomGatewayClient) error { + req := &logproto.PrefetchBloomBlocksRequest{Blocks: rs.blocks} + _, err := client.PrefetchBloomBlocks(ctx, req) + if err != nil { + level.Error(c.logger).Log("msg", "block prefetch failed for instance, skipping", "addr", rs.addr, "blocks", len(rs.blocks), "err", err) + c.metrics.clientRequests.WithLabelValues(routePrefectBlocks, typeError).Inc() + } else { + c.metrics.clientRequests.WithLabelValues(routePrefectBlocks, typeSuccess).Inc() + } + return err + }) + }) +} + // FilterChunks implements Client func (c *GatewayClient) FilterChunks(ctx context.Context, _ string, interval bloomshipper.Interval, blocks []blockWithSeries, plan plan.QueryPlan) ([]*logproto.GroupedChunkRefs, error) { // no block and therefore no series with chunks @@ -268,10 +310,10 @@ func (c *GatewayClient) FilterChunks(ctx context.Context, _ string, interval blo "err", err, ) // filter none of the results on failed request - c.metrics.clientRequests.WithLabelValues(typeError).Inc() + c.metrics.clientRequests.WithLabelValues(routeFilterChunks, typeError).Inc() results[i] = rs.groups } else { - c.metrics.clientRequests.WithLabelValues(typeSuccess).Inc() + c.metrics.clientRequests.WithLabelValues(routeFilterChunks, typeSuccess).Inc() results[i] = resp.ChunkRefs } @@ -390,6 +432,11 @@ func (c *GatewayClient) doForAddrs(addrs []string, fn func(logproto.BloomGateway return err } +type addrWithBlocks struct { + addr string + blocks []string +} + type addrWithGroups struct { addr string blocks []string diff --git a/pkg/bloomgateway/metrics.go b/pkg/bloomgateway/metrics.go index 690f95354a23..af04cc7e03bc 100644 --- a/pkg/bloomgateway/metrics.go +++ b/pkg/bloomgateway/metrics.go @@ -18,6 +18,9 @@ type metrics struct { const ( typeSuccess = "success" typeError = "error" + + routeFilterChunks = "FilterChunks" + routePrefectBlocks = "PrefetchBloomBlocks" ) type clientMetrics struct { @@ -32,7 +35,7 @@ func newClientMetrics(registerer prometheus.Registerer) *clientMetrics { Subsystem: "bloom_gateway_client", Name: "requests_total", Help: "Total number of requests made to the bloom gateway", - }, []string{"type"}), + }, []string{"route", "type"}), requestLatency: promauto.With(registerer).NewHistogramVec(prometheus.HistogramOpts{ Namespace: constants.Loki, Subsystem: "bloom_gateway_client", @@ -50,6 +53,7 @@ type serverMetrics struct { requestedChunks prometheus.Histogram filteredChunks prometheus.Histogram receivedMatchers prometheus.Histogram + prefetchedBlocks *prometheus.CounterVec } func newMetrics(registerer prometheus.Registerer, namespace, subsystem string) *metrics { @@ -105,6 +109,12 @@ func newServerMetrics(registerer prometheus.Registerer, namespace, subsystem str Help: "Number of matchers per request.", Buckets: prometheus.ExponentialBuckets(1, 2, 9), // 1 -> 256 }), + prefetchedBlocks: promauto.With(registerer).NewCounterVec(prometheus.CounterOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: "prefetched_blocks_total", + Help: "Total amount of blocks prefetched by the bloom-gateway", + }, []string{"status"}), } } diff --git a/pkg/bloomgateway/querier_test.go b/pkg/bloomgateway/querier_test.go index ab890ab5d84f..f06d9d199f0f 100644 --- a/pkg/bloomgateway/querier_test.go +++ b/pkg/bloomgateway/querier_test.go @@ -26,6 +26,8 @@ type noopClient struct { callCount int } +var _ Client = &noopClient{} + // FilterChunks implements Client. func (c *noopClient) FilterChunks(_ context.Context, _ string, _ bloomshipper.Interval, blocks []blockWithSeries, _ plan.QueryPlan) (result []*logproto.GroupedChunkRefs, err error) { for _, block := range blocks { @@ -39,6 +41,10 @@ func (c *noopClient) FilterChunks(_ context.Context, _ string, _ bloomshipper.In return result, c.err } +func (c *noopClient) PrefetchBloomBlocks(_ context.Context, _ []bloomshipper.BlockRef) error { + return nil +} + type mockBlockResolver struct{} // Resolve implements BlockResolver. diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index e4b92c8f585c..f776e6778b43 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -28,7 +28,7 @@ import ( "github.com/grafana/dskit/services" "github.com/grafana/dskit/tenant" "github.com/grafana/dskit/user" - lru "github.com/hashicorp/golang-lru" + lru "github.com/hashicorp/golang-lru/v2" "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" @@ -148,7 +148,7 @@ type Distributor struct { subservicesWatcher *services.FailureWatcher // Per-user rate limiter. ingestionRateLimiter *limiter.RateLimiter - labelCache *lru.Cache + labelCache *lru.Cache[string, labelData] // Push failures rate limiter. writeFailuresManager *writefailures.Manager @@ -217,7 +217,7 @@ func New( var servs []services.Service rateLimitStrat := validation.LocalIngestionRateStrategy - labelCache, err := lru.New(maxLabelCacheSize) + labelCache, err := lru.New[string, labelData](maxLabelCacheSize) if err != nil { return nil, err } @@ -1086,8 +1086,7 @@ type labelData struct { func (d *Distributor) parseStreamLabels(vContext validationContext, key string, stream logproto.Stream) (labels.Labels, string, uint64, error) { if val, ok := d.labelCache.Get(key); ok { - labelVal := val.(labelData) - return labelVal.ls, labelVal.ls.String(), labelVal.hash, nil + return val.ls, val.ls.String(), val.hash, nil } ls, err := syntax.ParseLabels(key) diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 17089efbbf63..edc8d57da542 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -296,7 +296,7 @@ type Ingester struct { ingestPartitionID int32 partitionRingLifecycler *ring.PartitionInstanceLifecycler - partitionReader *partition.Reader + partitionReader *partition.ReaderService } // New makes a new Ingester. @@ -380,7 +380,14 @@ func New(cfg Config, clientConfig client.Config, store Store, limits Limits, con logger, prometheus.WrapRegistererWithPrefix("loki_", registerer)) - i.partitionReader, err = partition.NewReader(cfg.KafkaIngestion.KafkaConfig, i.ingestPartitionID, cfg.LifecyclerConfig.ID, NewKafkaConsumerFactory(i, logger, registerer), logger, registerer) + i.partitionReader, err = partition.NewReaderService( + cfg.KafkaIngestion.KafkaConfig, + i.ingestPartitionID, + cfg.LifecyclerConfig.ID, + NewKafkaConsumerFactory(i, logger, registerer), + logger, + registerer, + ) if err != nil { return nil, err } diff --git a/pkg/kafka/encoding.go b/pkg/kafka/encoding.go index 65daf59c25e7..15479336de67 100644 --- a/pkg/kafka/encoding.go +++ b/pkg/kafka/encoding.go @@ -9,7 +9,7 @@ import ( "github.com/twmb/franz-go/pkg/kgo" - lru "github.com/hashicorp/golang-lru" + lru "github.com/hashicorp/golang-lru/v2" "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/v3/pkg/logproto" @@ -126,11 +126,11 @@ func marshalWriteRequestToRecord(partitionID int32, tenantID string, stream logp // It caches parsed labels for efficiency. type Decoder struct { stream *logproto.Stream - cache *lru.Cache + cache *lru.Cache[string, labels.Labels] } func NewDecoder() (*Decoder, error) { - cache, err := lru.New(5000) // Set LRU size to 5000, adjust as needed + cache, err := lru.New[string, labels.Labels](5000) if err != nil { return nil, fmt.Errorf("failed to create LRU cache: %w", err) } @@ -154,7 +154,7 @@ func (d *Decoder) Decode(data []byte) (logproto.Stream, labels.Labels, error) { var ls labels.Labels if cachedLabels, ok := d.cache.Get(d.stream.Labels); ok { - ls = cachedLabels.(labels.Labels) + ls = cachedLabels } else { var err error ls, err = syntax.ParseLabels(d.stream.Labels) diff --git a/pkg/kafka/partition/committer.go b/pkg/kafka/partition/committer.go index f9aeda3f0fc5..9181539ceea9 100644 --- a/pkg/kafka/partition/committer.go +++ b/pkg/kafka/partition/committer.go @@ -10,10 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/twmb/franz-go/pkg/kadm" "go.uber.org/atomic" - - "github.com/grafana/loki/v3/pkg/kafka" ) // Committer defines an interface for committing offsets @@ -22,49 +19,40 @@ type Committer interface { EnqueueOffset(offset int64) } -// partitionCommitter is responsible for committing offsets for a specific Kafka partition -// to the Kafka broker. It also tracks metrics related to the commit process. type partitionCommitter struct { commitRequestsTotal prometheus.Counter commitRequestsLatency prometheus.Histogram commitFailuresTotal prometheus.Counter lastCommittedOffset prometheus.Gauge - logger log.Logger - admClient *kadm.Client - - kafkaCfg kafka.Config - partitionID int32 - consumerGroup string + logger log.Logger + reader ReaderIfc + commitFreq time.Duration toCommit *atomic.Int64 wg sync.WaitGroup cancel context.CancelFunc } -// newCommitter creates and initializes a new Committer. -// It sets up the necessary metrics and initializes the committer with the provided configuration. -func newCommitter(kafkaCfg kafka.Config, admClient *kadm.Client, partitionID int32, consumerGroup string, logger log.Logger, reg prometheus.Registerer) *partitionCommitter { +func newCommitter(reader ReaderIfc, commitFreq time.Duration, logger log.Logger, reg prometheus.Registerer) *partitionCommitter { c := &partitionCommitter{ - logger: logger, - kafkaCfg: kafkaCfg, - partitionID: partitionID, - consumerGroup: consumerGroup, - admClient: admClient, + logger: logger, + reader: reader, + commitFreq: commitFreq, commitRequestsTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{ Name: "loki_ingest_storage_reader_offset_commit_requests_total", Help: "Total number of requests issued to commit the last consumed offset (includes both successful and failed requests).", - ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(partitionID))}, + ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(reader.Partition()))}, }), commitFailuresTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{ Name: "loki_ingest_storage_reader_offset_commit_failures_total", Help: "Total number of failed requests to commit the last consumed offset.", - ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(partitionID))}, + ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(reader.Partition()))}, }), commitRequestsLatency: promauto.With(reg).NewHistogram(prometheus.HistogramOpts{ Name: "loki_ingest_storage_reader_offset_commit_request_duration_seconds", Help: "The duration of requests to commit the last consumed offset.", - ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(partitionID))}, + ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(reader.Partition()))}, NativeHistogramBucketFactor: 1.1, NativeHistogramMaxBucketNumber: 100, NativeHistogramMinResetDuration: time.Hour, @@ -73,15 +61,15 @@ func newCommitter(kafkaCfg kafka.Config, admClient *kadm.Client, partitionID int lastCommittedOffset: promauto.With(reg).NewGauge(prometheus.GaugeOpts{ Name: "loki_ingest_storage_reader_last_committed_offset", Help: "The last consumed offset successfully committed by the partition reader. Set to -1 if not offset has been committed yet.", - ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(partitionID))}, + ConstLabels: prometheus.Labels{"partition": strconv.Itoa(int(reader.Partition()))}, }), toCommit: atomic.NewInt64(-1), } - // Initialise the last committed offset metric to -1 to signal no offset has been committed yet (0 is a valid offset). + // Initialize the last committed offset metric to -1 to signal no offset has been committed yet c.lastCommittedOffset.Set(-1) - if kafkaCfg.ConsumerGroupOffsetCommitInterval > 0 { + if commitFreq > 0 { c.wg.Add(1) ctx, cancel := context.WithCancel(context.Background()) c.cancel = cancel @@ -91,77 +79,71 @@ func newCommitter(kafkaCfg kafka.Config, admClient *kadm.Client, partitionID int return c } -func (r *partitionCommitter) autoCommitLoop(ctx context.Context) { - defer r.wg.Done() - commitTicker := time.NewTicker(r.kafkaCfg.ConsumerGroupOffsetCommitInterval) +func (c *partitionCommitter) autoCommitLoop(ctx context.Context) { + defer c.wg.Done() + commitTicker := time.NewTicker(c.commitFreq) defer commitTicker.Stop() - previousOffset := r.toCommit.Load() + previousOffset := c.toCommit.Load() for { select { case <-ctx.Done(): return case <-commitTicker.C: - currOffset := r.toCommit.Load() + currOffset := c.toCommit.Load() if currOffset == previousOffset { continue } - if err := r.Commit(ctx, currOffset); err == nil { + if err := c.Commit(ctx, currOffset); err == nil { + level.Error(c.logger).Log("msg", "failed to commit", "offset", currOffset) + c.lastCommittedOffset.Set(float64(currOffset)) previousOffset = currOffset } } } } -func (r *partitionCommitter) EnqueueOffset(o int64) { - if r.kafkaCfg.ConsumerGroupOffsetCommitInterval > 0 { - r.toCommit.Store(o) +func (c *partitionCommitter) EnqueueOffset(o int64) { + if c.commitFreq > 0 { + c.toCommit.Store(o) } } -// commit attempts to commit the given offset to Kafka for the partition this committer is responsible for. -// It updates relevant metrics and logs the result of the commit operation. -func (r *partitionCommitter) Commit(ctx context.Context, offset int64) (returnErr error) { +func (c *partitionCommitter) Commit(ctx context.Context, offset int64) error { startTime := time.Now() - r.commitRequestsTotal.Inc() + c.commitRequestsTotal.Inc() - defer func() { - r.commitRequestsLatency.Observe(time.Since(startTime).Seconds()) - - if returnErr != nil { - level.Error(r.logger).Log("msg", "failed to commit last consumed offset to Kafka", "err", returnErr, "offset", offset) - r.commitFailuresTotal.Inc() - } - }() - - // Commit the last consumed offset. - toCommit := kadm.Offsets{} - toCommit.AddOffset(r.kafkaCfg.Topic, r.partitionID, offset, -1) - committed, err := r.admClient.CommitOffsets(ctx, r.consumerGroup, toCommit) - if err != nil { + if err := c.reader.Commit(ctx, offset); err != nil { + level.Error(c.logger).Log("msg", "failed to commit offset", "err", err, "offset", offset) + c.commitFailuresTotal.Inc() + c.commitRequestsLatency.Observe(time.Since(startTime).Seconds()) return err - } else if !committed.Ok() { - return committed.Error() } - committedOffset, _ := committed.Lookup(r.kafkaCfg.Topic, r.partitionID) - level.Debug(r.logger).Log("msg", "last commit offset successfully committed to Kafka", "offset", committedOffset.At) - r.lastCommittedOffset.Set(float64(committedOffset.At)) + level.Debug(c.logger).Log("msg", "successfully committed offset", "offset", offset) + c.lastCommittedOffset.Set(float64(offset)) + c.commitRequestsLatency.Observe(time.Since(startTime).Seconds()) return nil } -func (r *partitionCommitter) Stop() { - if r.kafkaCfg.ConsumerGroupOffsetCommitInterval <= 0 { +func (c *partitionCommitter) Stop() { + if c.commitFreq <= 0 { return } - r.cancel() - r.wg.Wait() + c.cancel() + c.wg.Wait() - offset := r.toCommit.Load() + offset := c.toCommit.Load() if offset < 0 { return } - // Commit has internal timeouts, so this call shouldn't block for too long. - _ = r.Commit(context.Background(), offset) + + // Commit has internal timeouts, so this call shouldn't block for too long + logger := log.With(c.logger, "msg", "stopping partition committer", "final_offset", offset) + if err := c.Commit(context.Background(), offset); err != nil { + level.Error(logger).Log("err", err) + } else { + level.Info(logger).Log() + } } diff --git a/pkg/kafka/partition/committer_test.go b/pkg/kafka/partition/committer_test.go index 1739986cd66c..dc3d2748c078 100644 --- a/pkg/kafka/partition/committer_test.go +++ b/pkg/kafka/partition/committer_test.go @@ -36,7 +36,15 @@ func TestPartitionCommitter(t *testing.T) { reg := prometheus.NewRegistry() partitionID := int32(1) consumerGroup := "test-consumer-group" - committer := newCommitter(kafkaCfg, admClient, partitionID, consumerGroup, logger, reg) + reader := newReader( + client, + kafkaCfg.Topic, + partitionID, + consumerGroup, + logger, + reg, + ) + committer := newCommitter(reader, kafkaCfg.ConsumerGroupOffsetCommitInterval, logger, reg) // Test committing an offset ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) diff --git a/pkg/kafka/partition/metrics.go b/pkg/kafka/partition/metrics.go deleted file mode 100644 index 7979ea70a40c..000000000000 --- a/pkg/kafka/partition/metrics.go +++ /dev/null @@ -1,79 +0,0 @@ -package partition - -import ( - "math" - "strconv" - "time" - - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/twmb/franz-go/plugin/kprom" - - "github.com/grafana/loki/v3/pkg/kafka/client" -) - -type readerMetrics struct { - partition *prometheus.GaugeVec - phase *prometheus.GaugeVec - receiveDelay *prometheus.HistogramVec - recordsPerFetch prometheus.Histogram - fetchesErrors prometheus.Counter - fetchesTotal prometheus.Counter - fetchWaitDuration prometheus.Histogram - consumeLatency prometheus.Histogram - kprom *kprom.Metrics -} - -// newReaderMetrics initializes and returns a new set of metrics for the PartitionReader. -func newReaderMetrics(r prometheus.Registerer) readerMetrics { - return readerMetrics{ - partition: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ - Name: "loki_ingest_storage_reader_partition", - Help: "The partition ID assigned to this reader.", - }, []string{"id"}), - phase: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ - Name: "loki_ingest_storage_reader_phase", - Help: "The current phase of the consumer.", - }, []string{"phase"}), - receiveDelay: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ - Name: "loki_ingest_storage_reader_receive_delay_seconds", - Help: "Delay between producing a record and receiving it in the consumer.", - NativeHistogramZeroThreshold: math.Pow(2, -10), // Values below this will be considered to be 0. Equals to 0.0009765625, or about 1ms. - NativeHistogramBucketFactor: 1.2, // We use higher factor (scheme=2) to have wider spread of buckets. - NativeHistogramMaxBucketNumber: 100, - NativeHistogramMinResetDuration: 1 * time.Hour, - Buckets: prometheus.ExponentialBuckets(0.125, 2, 18), // Buckets between 125ms and 9h. - }, []string{"phase"}), - kprom: client.NewReaderClientMetrics("partition-reader", r), - fetchWaitDuration: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "loki_ingest_storage_reader_records_batch_wait_duration_seconds", - Help: "How long a consumer spent waiting for a batch of records from the Kafka client. If fetching is faster than processing, then this will be close to 0.", - NativeHistogramBucketFactor: 1.1, - }), - recordsPerFetch: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "loki_ingest_storage_reader_records_per_fetch", - Help: "The number of records received by the consumer in a single fetch operation.", - Buckets: prometheus.ExponentialBuckets(1, 2, 15), - }), - fetchesErrors: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "loki_ingest_storage_reader_fetch_errors_total", - Help: "The number of fetch errors encountered by the consumer.", - }), - fetchesTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "loki_ingest_storage_reader_fetches_total", - Help: "Total number of Kafka fetches received by the consumer.", - }), - } -} - -func (m *readerMetrics) reportStarting(partitionID int32) { - m.partition.WithLabelValues(strconv.Itoa(int(partitionID))).Set(1) - m.phase.WithLabelValues(phaseStarting).Set(1) - m.phase.WithLabelValues(phaseRunning).Set(0) -} - -func (m *readerMetrics) reportRunning(partitionID int32) { - m.partition.WithLabelValues(strconv.Itoa(int(partitionID))).Set(1) - m.phase.WithLabelValues(phaseStarting).Set(0) - m.phase.WithLabelValues(phaseRunning).Set(1) -} diff --git a/pkg/kafka/partition/reader.go b/pkg/kafka/partition/reader.go index 31d1e65e79eb..a0d360c4a065 100644 --- a/pkg/kafka/partition/reader.go +++ b/pkg/kafka/partition/reader.go @@ -3,56 +3,32 @@ package partition import ( "context" "fmt" + "math" "time" - "github.com/coder/quartz" "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/dskit/backoff" "github.com/grafana/dskit/multierror" - "github.com/grafana/dskit/services" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" "github.com/twmb/franz-go/pkg/kadm" "github.com/twmb/franz-go/pkg/kerr" "github.com/twmb/franz-go/pkg/kgo" "github.com/twmb/franz-go/pkg/kmsg" "github.com/grafana/loki/v3/pkg/kafka" + "github.com/grafana/loki/v3/pkg/kafka/client" ) -var errWaitTargetLagDeadlineExceeded = errors.New("waiting for target lag deadline exceeded") +type SpecialOffset int const ( - kafkaStartOffset = -2 - kafkaEndOffset = -1 - - phaseStarting = "starting" - phaseRunning = "running" + KafkaStartOffset SpecialOffset = -2 + KafkaEndOffset SpecialOffset = -1 ) -// Reader is responsible for reading data from a specific Kafka partition -// and passing it to the consumer for processing. It is a core component of the -// Loki ingester's Kafka-based ingestion pipeline. -type Reader struct { - services.Service - - kafkaCfg kafka.Config - partitionID int32 - consumerGroup string - consumerFactory ConsumerFactory - committer *partitionCommitter - lastProcessedOffset int64 - recordsChan chan []Record - - client *kgo.Client - logger log.Logger - metrics readerMetrics - reg prometheus.Registerer - clock quartz.Clock -} - type Record struct { // Context holds the tracing (and potentially other) info, that the record was enriched with on fetch from Kafka. Ctx context.Context @@ -61,177 +37,186 @@ type Record struct { Offset int64 } -type ConsumerFactory func(committer Committer) (Consumer, error) +type ReaderIfc interface { + Topic() string + Partition() int32 + ConsumerGroup() string + FetchLastCommittedOffset(ctx context.Context) (int64, error) + FetchPartitionOffset(ctx context.Context, position SpecialOffset) (int64, error) + Poll(ctx context.Context) ([]Record, error) + Commit(ctx context.Context, offset int64) error + // Set the target offset for consumption. reads will begin from here. + SetOffsetForConsumption(offset int64) +} + +// readerMetrics contains metrics specific to Kafka reading operations +type readerMetrics struct { + recordsPerFetch prometheus.Histogram + fetchesErrors prometheus.Counter + fetchesTotal prometheus.Counter + fetchWaitDuration prometheus.Histogram + receiveDelay prometheus.Histogram + lastCommittedOffset prometheus.Gauge +} -type Consumer interface { - Start(ctx context.Context, recordsChan <-chan []Record) func() +func newReaderMetrics(r prometheus.Registerer) *readerMetrics { + return &readerMetrics{ + fetchWaitDuration: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Name: "loki_kafka_reader_fetch_wait_duration_seconds", + Help: "How long the reader spent waiting for a batch of records from Kafka.", + NativeHistogramBucketFactor: 1.1, + }), + recordsPerFetch: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Name: "loki_kafka_reader_records_per_fetch", + Help: "The number of records received in a single fetch operation.", + Buckets: prometheus.ExponentialBuckets(1, 2, 15), + }), + fetchesErrors: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Name: "loki_kafka_reader_fetch_errors_total", + Help: "The number of fetch errors encountered.", + }), + fetchesTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ + Name: "loki_kafka_reader_fetches_total", + Help: "Total number of Kafka fetches performed.", + }), + receiveDelay: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ + Name: "loki_kafka_reader_receive_delay_seconds", + Help: "Delay between producing a record and receiving it.", + NativeHistogramZeroThreshold: math.Pow(2, -10), + NativeHistogramBucketFactor: 1.2, + NativeHistogramMaxBucketNumber: 100, + NativeHistogramMinResetDuration: 1 * time.Hour, + Buckets: prometheus.ExponentialBuckets(0.125, 2, 18), + }), + } +} + +// Reader provides low-level access to Kafka partition reading operations +type Reader struct { + client *kgo.Client + topic string + partitionID int32 + consumerGroup string + metrics *readerMetrics + logger log.Logger } -// NewReader creates and initializes a new PartitionReader. -// It sets up the basic service and initializes the reader with the provided configuration. func NewReader( - kafkaCfg kafka.Config, + cfg kafka.Config, partitionID int32, instanceID string, - consumerFactory ConsumerFactory, logger log.Logger, reg prometheus.Registerer, ) (*Reader, error) { - r := &Reader{ - kafkaCfg: kafkaCfg, - partitionID: partitionID, - consumerGroup: kafkaCfg.GetConsumerGroup(instanceID, partitionID), - logger: logger, - metrics: newReaderMetrics(reg), - reg: reg, - lastProcessedOffset: -1, - consumerFactory: consumerFactory, - } - r.Service = services.NewBasicService(r.start, r.run, nil) - return r, nil -} - -// start initializes the Kafka client and committer for the PartitionReader. -// This method is called when the PartitionReader service starts. -func (p *Reader) start(ctx context.Context) error { - var err error - p.client, err = client.NewReaderClient(p.kafkaCfg, p.metrics.kprom, p.logger) + // Create a new Kafka client for this reader + clientMetrics := client.NewReaderClientMetrics("partition-reader", reg) + c, err := client.NewReaderClient( + cfg, + clientMetrics, + log.With(logger, "component", "kafka-client"), + ) if err != nil { - return errors.Wrap(err, "creating kafka reader client") - } - p.metrics.reportStarting(p.partitionID) - - // We manage our commits manually, so we must fetch the last offset for our consumer group to find out where to read from. - lastCommittedOffset := p.fetchLastCommittedOffset(ctx) - if lastCommittedOffset == kafkaEndOffset { - level.Warn(p.logger).Log("msg", "no committed offset found for partition, starting from the beginning", "partition", p.partitionID, "consumer_group", p.consumerGroup) - lastCommittedOffset = kafkaStartOffset // If we haven't committed any offsets yet, we start reading from the beginning. + return nil, fmt.Errorf("creating kafka client: %w", err) } - if lastCommittedOffset > 0 { - lastCommittedOffset++ // We want to begin to read from the next offset, but only if we've previously committed an offset. - } - p.client.AddConsumePartitions(map[string]map[int32]kgo.Offset{ - p.kafkaCfg.Topic: {p.partitionID: kgo.NewOffset().At(lastCommittedOffset)}, - }) - - level.Info(p.logger).Log("msg", "initialising partition reader", "last_committed_offset", lastCommittedOffset, "partition", p.partitionID, "consumer_group", p.consumerGroup) - p.committer = newCommitter(p.kafkaCfg, kadm.NewClient(p.client), p.partitionID, p.consumerGroup, p.logger, p.reg) - - if targetLag, maxLag := p.kafkaCfg.TargetConsumerLagAtStartup, p.kafkaCfg.MaxConsumerLagAtStartup; targetLag > 0 && maxLag > 0 { - consumer, err := p.consumerFactory(p.committer) - if err != nil { - return fmt.Errorf("creating consumer: %w", err) - } + // Create the reader + return newReader( + c, + cfg.Topic, + partitionID, + cfg.GetConsumerGroup(instanceID, partitionID), + logger, + reg, + ), nil +} - cancelCtx, cancel := context.WithCancel(ctx) - // Temporarily start a consumer to do the initial update - recordsChan := make(chan []Record) - wait := consumer.Start(cancelCtx, recordsChan) - // Shutdown the consumer after catching up. We start a new instance in the run method to tie the lifecycle to the run context. - defer func() { - close(recordsChan) - cancel() - wait() - }() - - err = p.processNextFetchesUntilTargetOrMaxLagHonored(ctx, p.kafkaCfg.MaxConsumerLagAtStartup, p.kafkaCfg.TargetConsumerLagAtStartup, recordsChan) - if err != nil { - level.Error(p.logger).Log("msg", "failed to catch up to max lag", "partition", p.partitionID, "consumer_group", p.consumerGroup, "err", err) - return err - } +// NewReader creates a new Reader instance +func newReader( + client *kgo.Client, + topic string, + partitionID int32, + consumerGroup string, + logger log.Logger, + reg prometheus.Registerer, +) *Reader { + return &Reader{ + client: client, + topic: topic, + partitionID: partitionID, + consumerGroup: consumerGroup, + metrics: newReaderMetrics(reg), + logger: logger, } - - return nil } -// run is the main loop of the PartitionReader. It continuously fetches and processes -// data from Kafka, and send it to the consumer. -func (p *Reader) run(ctx context.Context) error { - level.Info(p.logger).Log("msg", "starting partition reader", "partition", p.partitionID, "consumer_group", p.consumerGroup) - p.metrics.reportRunning(p.partitionID) - - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - consumer, err := p.consumerFactory(p.committer) - if err != nil { - return errors.Wrap(err, "creating consumer") - } +// Topic returns the topic being read +func (r *Reader) Topic() string { + return r.topic +} - recordsChan := p.startFetchLoop(ctx) - wait := consumer.Start(ctx, recordsChan) +// Partition returns the partition being read +func (r *Reader) Partition() int32 { + return r.partitionID +} - wait() - p.committer.Stop() - return nil +// ConsumerGroup returns the consumer group +func (r *Reader) ConsumerGroup() string { + return r.consumerGroup } -func (p *Reader) fetchLastCommittedOffset(ctx context.Context) int64 { - // We manually create a request so that we can request the offset for a single partition - // only, which is more performant than requesting the offsets for all partitions. +// FetchLastCommittedOffset retrieves the last committed offset for this partition +func (r *Reader) FetchLastCommittedOffset(ctx context.Context) (int64, error) { req := kmsg.NewPtrOffsetFetchRequest() - req.Topics = []kmsg.OffsetFetchRequestTopic{{Topic: p.kafkaCfg.Topic, Partitions: []int32{p.partitionID}}} - req.Group = p.consumerGroup + req.Topics = []kmsg.OffsetFetchRequestTopic{{ + Topic: r.topic, + Partitions: []int32{r.partitionID}, + }} + req.Group = r.consumerGroup - resps := p.client.RequestSharded(ctx, req) + resps := r.client.RequestSharded(ctx, req) // Since we issued a request for only 1 partition, we expect exactly 1 response. if expected, actual := 1, len(resps); actual != expected { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected number of responses (expected: %d, got: %d)", expected, actual), "expected", expected, "actual", len(resps)) - return kafkaStartOffset + return 0, fmt.Errorf("unexpected number of responses: %d", len(resps)) } + // Ensure no error occurred. res := resps[0] if res.Err != nil { - level.Error(p.logger).Log("msg", "error fetching group offset for partition", "err", res.Err) - return kafkaStartOffset + return 0, res.Err } // Parse the response. fetchRes, ok := res.Resp.(*kmsg.OffsetFetchResponse) if !ok { - level.Error(p.logger).Log("msg", "unexpected response type") - return kafkaStartOffset - } - if expected, actual := 1, len(fetchRes.Groups); actual != expected { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected number of groups in the response (expected: %d, got: %d)", expected, actual)) - return kafkaStartOffset - } - if expected, actual := 1, len(fetchRes.Groups[0].Topics); actual != expected { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected number of topics in the response (expected: %d, got: %d)", expected, actual)) - return kafkaStartOffset - } - if expected, actual := p.kafkaCfg.Topic, fetchRes.Groups[0].Topics[0].Topic; expected != actual { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected topic in the response (expected: %s, got: %s)", expected, actual)) - return kafkaStartOffset - } - if expected, actual := 1, len(fetchRes.Groups[0].Topics[0].Partitions); actual != expected { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected number of partitions in the response (expected: %d, got: %d)", expected, actual)) - return kafkaStartOffset + return 0, errors.New("unexpected response type") } - if expected, actual := p.partitionID, fetchRes.Groups[0].Topics[0].Partitions[0].Partition; actual != expected { - level.Error(p.logger).Log("msg", fmt.Sprintf("unexpected partition in the response (expected: %d, got: %d)", expected, actual)) - return kafkaStartOffset + + if len(fetchRes.Groups) != 1 || + len(fetchRes.Groups[0].Topics) != 1 || + len(fetchRes.Groups[0].Topics[0].Partitions) != 1 { + level.Debug(r.logger).Log( + "msg", "malformed response, setting to start offset", + ) + return int64(KafkaStartOffset), nil } - if err := kerr.ErrorForCode(fetchRes.Groups[0].Topics[0].Partitions[0].ErrorCode); err != nil { - level.Error(p.logger).Log("msg", "unexpected error in the response", "err", err) - return kafkaStartOffset + + partition := fetchRes.Groups[0].Topics[0].Partitions[0] + if err := kerr.ErrorForCode(partition.ErrorCode); err != nil { + return 0, err } - return fetchRes.Groups[0].Topics[0].Partitions[0].Offset + return partition.Offset, nil } -func (p *Reader) fetchPartitionOffset(ctx context.Context, position int64) (int64, error) { - // Create a custom request to fetch the latest offset of a specific partition. - // We manually create a request so that we can request the offset for a single partition - // only, which is more performant than requesting the offsets for all partitions. +// FetchPartitionOffset retrieves the offset for a specific position +func (r *Reader) FetchPartitionOffset(ctx context.Context, position SpecialOffset) (int64, error) { partitionReq := kmsg.NewListOffsetsRequestTopicPartition() - partitionReq.Partition = p.partitionID - partitionReq.Timestamp = position + partitionReq.Partition = r.partitionID + partitionReq.Timestamp = int64(position) topicReq := kmsg.NewListOffsetsRequestTopic() - topicReq.Topic = p.kafkaCfg.Topic + topicReq.Topic = r.topic topicReq.Partitions = []kmsg.ListOffsetsRequestTopicPartition{partitionReq} req := kmsg.NewPtrListOffsetsRequest() @@ -240,11 +225,11 @@ func (p *Reader) fetchPartitionOffset(ctx context.Context, position int64) (int6 // Even if we share the same client, other in-flight requests are not canceled once this context is canceled // (or its deadline is exceeded). We've verified it with a unit test. - resps := p.client.RequestSharded(ctx, req) + resps := r.client.RequestSharded(ctx, req) // Since we issued a request for only 1 partition, we expect exactly 1 response. - if expected := 1; len(resps) != 1 { - return 0, fmt.Errorf("unexpected number of responses (expected: %d, got: %d)", expected, len(resps)) + if len(resps) != 1 { + return 0, fmt.Errorf("unexpected number of responses: %d", len(resps)) } // Ensure no error occurred. @@ -253,227 +238,56 @@ func (p *Reader) fetchPartitionOffset(ctx context.Context, position int64) (int6 return 0, res.Err } - // Parse the response. listRes, ok := res.Resp.(*kmsg.ListOffsetsResponse) if !ok { return 0, errors.New("unexpected response type") } - if expected, actual := 1, len(listRes.Topics); actual != expected { - return 0, fmt.Errorf("unexpected number of topics in the response (expected: %d, got: %d)", expected, actual) - } - if expected, actual := p.kafkaCfg.Topic, listRes.Topics[0].Topic; expected != actual { - return 0, fmt.Errorf("unexpected topic in the response (expected: %s, got: %s)", expected, actual) - } - if expected, actual := 1, len(listRes.Topics[0].Partitions); actual != expected { - return 0, fmt.Errorf("unexpected number of partitions in the response (expected: %d, got: %d)", expected, actual) - } - if expected, actual := p.partitionID, listRes.Topics[0].Partitions[0].Partition; actual != expected { - return 0, fmt.Errorf("unexpected partition in the response (expected: %d, got: %d)", expected, actual) - } - if err := kerr.ErrorForCode(listRes.Topics[0].Partitions[0].ErrorCode); err != nil { - return 0, err - } - - return listRes.Topics[0].Partitions[0].Offset, nil -} -// processNextFetchesUntilTargetOrMaxLagHonored process records from Kafka until at least the maxLag is honored. -// This function does a best-effort to get lag below targetLag, but it's not guaranteed that it will be -// reached once this function successfully returns (only maxLag is guaranteed). -func (p *Reader) processNextFetchesUntilTargetOrMaxLagHonored(ctx context.Context, targetLag, maxLag time.Duration, recordsChan chan<- []Record) error { - logger := log.With(p.logger, "target_lag", targetLag, "max_lag", maxLag) - level.Info(logger).Log("msg", "partition reader is starting to consume partition until target and max consumer lag is honored") - - attempts := []func() (time.Duration, error){ - // First process fetches until at least the max lag is honored. - func() (time.Duration, error) { - return p.processNextFetchesUntilLagHonored(ctx, maxLag, logger, recordsChan, time.Since) - }, - - // If the target lag hasn't been reached with the first attempt (which stops once at least the max lag - // is honored) then we try to reach the (lower) target lag within a fixed time (best-effort). - // The timeout is equal to the max lag. This is done because we expect at least a 2x replay speed - // from Kafka (which means at most it takes 1s to ingest 2s of data): assuming new data is continuously - // written to the partition, we give the reader maxLag time to replay the backlog + ingest the new data - // written in the meanwhile. - func() (time.Duration, error) { - timedCtx, cancel := context.WithTimeoutCause(ctx, maxLag, errWaitTargetLagDeadlineExceeded) - defer cancel() - - return p.processNextFetchesUntilLagHonored(timedCtx, targetLag, logger, recordsChan, time.Since) - }, - - // If the target lag hasn't been reached with the previous attempt then we'll move on. However, - // we still need to guarantee that in the meanwhile the lag didn't increase and max lag is still honored. - func() (time.Duration, error) { - return p.processNextFetchesUntilLagHonored(ctx, maxLag, logger, recordsChan, time.Since) - }, + if len(listRes.Topics) != 1 || + len(listRes.Topics[0].Partitions) != 1 { + return 0, errors.New("malformed response") } - var currLag time.Duration - for _, attempt := range attempts { - var err error - - currLag, err = attempt() - if errors.Is(err, errWaitTargetLagDeadlineExceeded) { - continue - } - if err != nil { - return err - } - if currLag <= targetLag { - level.Info(logger).Log( - "msg", "partition reader consumed partition and current lag is lower or equal to configured target consumer lag", - "last_consumed_offset", p.committer.lastCommittedOffset, - "current_lag", currLag, - ) - return nil - } - } - - level.Warn(logger).Log( - "msg", "partition reader consumed partition and current lag is lower than configured max consumer lag but higher than target consumer lag", - "last_consumed_offset", p.committer.lastCommittedOffset, - "current_lag", currLag, - ) - return nil -} - -func (p *Reader) processNextFetchesUntilLagHonored(ctx context.Context, maxLag time.Duration, logger log.Logger, recordsChan chan<- []Record, timeSince func(time.Time) time.Duration) (time.Duration, error) { - boff := backoff.New(ctx, backoff.Config{ - MinBackoff: 100 * time.Millisecond, - MaxBackoff: time.Second, - MaxRetries: 0, // Retry forever (unless context is canceled / deadline exceeded). - }) - currLag := time.Duration(0) - - for boff.Ongoing() { - // Send a direct request to the Kafka backend to fetch the partition start offset. - partitionStartOffset, err := p.fetchPartitionOffset(ctx, kafkaStartOffset) - if err != nil { - level.Warn(logger).Log("msg", "partition reader failed to fetch partition start offset", "err", err) - boff.Wait() - continue - } - - consumerGroupLastCommittedOffset := p.fetchLastCommittedOffset(ctx) - - // Send a direct request to the Kafka backend to fetch the last produced offset. - // We intentionally don't use WaitNextFetchLastProducedOffset() to not introduce further - // latency. - lastProducedOffsetRequestedAt := time.Now() - lastProducedOffset, err := p.fetchPartitionOffset(ctx, kafkaEndOffset) - if err != nil { - level.Warn(logger).Log("msg", "partition reader failed to fetch last produced offset", "err", err) - boff.Wait() - continue - } - lastProducedOffset = lastProducedOffset - 1 // Kafka returns the next empty offset so we must subtract 1 to get the oldest written offset. - - level.Debug(logger).Log("msg", "fetched latest offset information", "partition_start_offset", partitionStartOffset, "last_produced_offset", lastProducedOffset) - - // Ensure there are some records to consume. For example, if the partition has been inactive for a long - // time and all its records have been deleted, the partition start offset may be > 0 but there are no - // records to actually consume. - if partitionStartOffset > lastProducedOffset { - level.Info(logger).Log("msg", "partition reader found no records to consume because partition is empty", "partition_start_offset", partitionStartOffset, "last_produced_offset", lastProducedOffset) - return 0, nil - } - - if consumerGroupLastCommittedOffset == lastProducedOffset { - level.Info(logger).Log("msg", "partition reader found no records to consume because it is already up-to-date", "last_committed_offset", consumerGroupLastCommittedOffset, "last_produced_offset", lastProducedOffset) - return 0, nil - } - - // This message is NOT expected to be logged with a very high rate. In this log we display the last measured - // lag. If we don't have it (lag is zero value), then it will not be logged. - level.Info(loggerWithCurrentLagIfSet(logger, currLag)).Log("msg", "partition reader is consuming records to honor target and max consumer lag", "partition_start_offset", partitionStartOffset, "last_produced_offset", lastProducedOffset, "last_processed_offset", p.lastProcessedOffset, "offset_lag", lastProducedOffset-p.lastProcessedOffset) - - for boff.Ongoing() { - // Continue reading until we reached the desired offset. - if lastProducedOffset <= p.lastProcessedOffset { - break - } - if time.Since(lastProducedOffsetRequestedAt) > time.Minute { - level.Info(loggerWithCurrentLagIfSet(logger, currLag)).Log("msg", "partition reader is still consuming records...", "last_processed_offset", p.lastProcessedOffset, "offset_lag", lastProducedOffset-p.lastProcessedOffset) - } - - timedCtx, cancel := context.WithTimeout(ctx, 30*time.Second) - recordsChan <- p.poll(timedCtx) - cancel() - } - if boff.Err() != nil { - return 0, boff.ErrCause() - } - - // If it took less than the max desired lag to replay the partition - // then we can stop here, otherwise we'll have to redo it. - if currLag = timeSince(lastProducedOffsetRequestedAt); currLag <= maxLag { - return currLag, nil - } + partition := listRes.Topics[0].Partitions[0] + if err := kerr.ErrorForCode(partition.ErrorCode); err != nil { + return 0, err } - return 0, boff.ErrCause() + return partition.Offset, nil } -func loggerWithCurrentLagIfSet(logger log.Logger, currLag time.Duration) log.Logger { - if currLag <= 0 { - return logger - } +// Poll retrieves the next batch of records from Kafka +func (r *Reader) Poll(ctx context.Context) ([]Record, error) { + start := time.Now() + fetches := r.client.PollFetches(ctx) + r.metrics.fetchWaitDuration.Observe(time.Since(start).Seconds()) - return log.With(logger, "current_lag", currLag) -} - -func (p *Reader) startFetchLoop(ctx context.Context) chan []Record { - records := make(chan []Record) - go func() { - for { - select { - case <-ctx.Done(): - return - default: - records <- p.poll(ctx) - } - } - }() - return records -} + // Record metrics + r.metrics.fetchesTotal.Add(float64(len(fetches))) + var numRecords int + fetches.EachRecord(func(record *kgo.Record) { + numRecords++ + r.metrics.receiveDelay.Observe(time.Since(record.Timestamp).Seconds()) + }) + r.metrics.recordsPerFetch.Observe(float64(numRecords)) -// logFetchErrors logs any errors encountered during the fetch operation. -func (p *Reader) logFetchErrors(fetches kgo.Fetches) { - mErr := multierror.New() + // Handle errors + var errs multierror.MultiError fetches.EachError(func(topic string, partition int32, err error) { if errors.Is(err, context.Canceled) { return } - - // kgo advises to "restart" the kafka client if the returned error is a kerr.Error. - // Recreating the client would cause duplicate metrics registration, so we don't do it for now. - mErr.Add(fmt.Errorf("topic %q, partition %d: %w", topic, partition, err)) + errs.Add(fmt.Errorf("topic %q, partition %d: %w", topic, partition, err)) }) - if len(mErr) == 0 { - return + if len(errs) > 0 { + r.metrics.fetchesErrors.Add(float64(len(errs))) + return nil, fmt.Errorf("fetch errors: %v", errs.Err()) } - p.metrics.fetchesErrors.Add(float64(len(mErr))) - level.Error(p.logger).Log("msg", "encountered error while fetching", "err", mErr.Err()) -} -// pollFetches retrieves the next batch of records from Kafka and measures the fetch duration. -func (p *Reader) poll(ctx context.Context) []Record { - defer func(start time.Time) { - p.metrics.fetchWaitDuration.Observe(time.Since(start).Seconds()) - }(time.Now()) - fetches := p.client.PollFetches(ctx) - p.recordFetchesMetrics(fetches) - p.logFetchErrors(fetches) - fetches = filterOutErrFetches(fetches) - if fetches.NumRecords() == 0 { - return nil - } + // Build records slice records := make([]Record, 0, fetches.NumRecords()) fetches.EachRecord(func(rec *kgo.Record) { - if rec.Partition != p.partitionID { - level.Error(p.logger).Log("msg", "wrong partition record received", "partition", rec.Partition, "expected_partition", p.partitionID) + if rec.Partition != r.partitionID { return } records = append(records, Record{ @@ -485,50 +299,32 @@ func (p *Reader) poll(ctx context.Context) []Record { Offset: rec.Offset, }) }) - p.lastProcessedOffset = records[len(records)-1].Offset - return records + + return records, nil } -// recordFetchesMetrics updates various metrics related to the fetch operation. -func (p *Reader) recordFetchesMetrics(fetches kgo.Fetches) { - var ( - now = time.Now() - numRecords = 0 - ) - fetches.EachRecord(func(record *kgo.Record) { - numRecords++ - delay := now.Sub(record.Timestamp).Seconds() - if p.Service.State() == services.Starting { - p.metrics.receiveDelay.WithLabelValues(phaseStarting).Observe(delay) - } else { - p.metrics.receiveDelay.WithLabelValues(phaseRunning).Observe(delay) - } +func (r *Reader) SetOffsetForConsumption(offset int64) { + r.client.AddConsumePartitions(map[string]map[int32]kgo.Offset{ + r.topic: {r.partitionID: kgo.NewOffset().At(offset)}, }) - - p.metrics.fetchesTotal.Add(float64(len(fetches))) - p.metrics.recordsPerFetch.Observe(float64(numRecords)) } -// filterOutErrFetches removes any fetches that resulted in errors from the provided slice. -func filterOutErrFetches(fetches kgo.Fetches) kgo.Fetches { - filtered := make(kgo.Fetches, 0, len(fetches)) - for i, fetch := range fetches { - if !isErrFetch(fetch) { - filtered = append(filtered, fetches[i]) - } - } +// Commit commits an offset to the consumer group +func (r *Reader) Commit(ctx context.Context, offset int64) error { + admin := kadm.NewClient(r.client) - return filtered -} + // Commit the last consumed offset. + toCommit := kadm.Offsets{} + toCommit.AddOffset(r.topic, r.partitionID, offset, -1) -// isErrFetch checks if a given fetch resulted in any errors. -func isErrFetch(fetch kgo.Fetch) bool { - for _, t := range fetch.Topics { - for _, p := range t.Partitions { - if p.Err != nil { - return true - } - } + committed, err := admin.CommitOffsets(ctx, r.consumerGroup, toCommit) + if err != nil { + return err + } else if !committed.Ok() { + return committed.Error() } - return false + + committedOffset, _ := committed.Lookup(r.topic, r.partitionID) + level.Debug(r.logger).Log("msg", "last commit offset successfully committed to Kafka", "offset", committedOffset.At) + return nil } diff --git a/pkg/kafka/partition/reader_service.go b/pkg/kafka/partition/reader_service.go new file mode 100644 index 000000000000..49dcda1c928b --- /dev/null +++ b/pkg/kafka/partition/reader_service.go @@ -0,0 +1,417 @@ +package partition + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/go-kit/log" + "github.com/go-kit/log/level" + "github.com/grafana/dskit/backoff" + "github.com/grafana/dskit/services" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/kafka" +) + +var errWaitTargetLagDeadlineExceeded = errors.New("waiting for target lag deadline exceeded") + +const ( + kafkaStartOffset = -2 + kafkaEndOffset = -1 + + phaseStarting = "starting" + phaseRunning = "running" +) + +type ConsumerFactory func(committer Committer) (Consumer, error) + +type Consumer interface { + Start(ctx context.Context, recordsChan <-chan []Record) func() +} + +type serviceMetrics struct { + phase *prometheus.GaugeVec + partition *prometheus.GaugeVec +} + +func newServiceMetrics(r prometheus.Registerer) *serviceMetrics { + return &serviceMetrics{ + partition: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ + Name: "loki_ingest_storage_reader_partition", + Help: "The partition ID assigned to this reader.", + }, []string{"id"}), + phase: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ + Name: "loki_ingest_storage_reader_phase", + Help: "The current phase of the consumer.", + }, []string{"phase"}), + } +} + +type ReaderService struct { + services.Service + + cfg ReaderConfig + reader ReaderIfc + consumerFactory ConsumerFactory + logger log.Logger + metrics *serviceMetrics + committer *partitionCommitter + + lastProcessedOffset int64 +} + +type ReaderConfig struct { + TargetConsumerLagAtStartup time.Duration + MaxConsumerLagAtStartup time.Duration + ConsumerGroupOffsetCommitFreq time.Duration +} + +// mimics `NewReader` constructor but builds a reader service using +// a reader. +func NewReaderService( + kafkaCfg kafka.Config, + partitionID int32, + instanceID string, + consumerFactory ConsumerFactory, + logger log.Logger, + reg prometheus.Registerer, +) (*ReaderService, error) { + + // Create the reader + reader, err := NewReader( + kafkaCfg, + partitionID, + instanceID, + logger, + reg, + ) + + if err != nil { + return nil, errors.Wrap(err, "creating kafka reader") + } + + return newReaderServiceFromIfc( + ReaderConfig{ + TargetConsumerLagAtStartup: kafkaCfg.TargetConsumerLagAtStartup, + MaxConsumerLagAtStartup: kafkaCfg.MaxConsumerLagAtStartup, + ConsumerGroupOffsetCommitFreq: kafkaCfg.ConsumerGroupOffsetCommitInterval, + }, + reader, + consumerFactory, + logger, + reg, + ), nil +} + +func newReaderServiceFromIfc( + cfg ReaderConfig, + reader ReaderIfc, + consumerFactory ConsumerFactory, + logger log.Logger, + reg prometheus.Registerer, +) *ReaderService { + s := &ReaderService{ + cfg: cfg, + reader: reader, + consumerFactory: consumerFactory, + logger: logger, + metrics: newServiceMetrics(reg), + lastProcessedOffset: -1, + } + + // Create the committer + s.committer = newCommitter(reader, cfg.ConsumerGroupOffsetCommitFreq, logger, reg) + + s.Service = services.NewBasicService(s.starting, s.running, nil) + return s +} + +func (s *ReaderService) starting(ctx context.Context) error { + level.Info(s.logger).Log( + "msg", "starting reader service", + "partition", s.reader.Partition(), + "consumer_group", s.reader.ConsumerGroup(), + ) + s.metrics.reportStarting(s.reader.Partition()) + + // Fetch the last committed offset to determine where to start reading + lastCommittedOffset, err := s.reader.FetchLastCommittedOffset(ctx) + if err != nil { + return fmt.Errorf("fetching last committed offset: %w", err) + } + + if lastCommittedOffset == int64(KafkaEndOffset) { + level.Warn(s.logger).Log( + "msg", "no committed offset found for partition, starting from the beginning", + "partition", s.reader.Partition(), + "consumer_group", s.reader.ConsumerGroup(), + ) + lastCommittedOffset = int64(KafkaStartOffset) + } + + if lastCommittedOffset >= 0 { + lastCommittedOffset++ // We want to begin to read from the next offset, but only if we've previously committed an offset. + } + + s.reader.SetOffsetForConsumption(lastCommittedOffset) + + if targetLag, maxLag := s.cfg.TargetConsumerLagAtStartup, s.cfg.MaxConsumerLagAtStartup; targetLag > 0 && maxLag > 0 { + consumer, err := s.consumerFactory(s.committer) + if err != nil { + return fmt.Errorf("creating consumer: %w", err) + } + + cancelCtx, cancel := context.WithCancel(ctx) + recordsChan := make(chan []Record) + wait := consumer.Start(cancelCtx, recordsChan) + + defer func() { + close(recordsChan) + cancel() + wait() + }() + + err = s.processNextFetchesUntilTargetOrMaxLagHonored(ctx, maxLag, targetLag, recordsChan) + if err != nil { + level.Error(s.logger).Log( + "msg", "failed to catch up to max lag", + "partition", s.reader.Partition(), + "consumer_group", s.reader.ConsumerGroup(), + "err", err, + ) + return err + } + } + + return nil +} + +func (s *ReaderService) running(ctx context.Context) error { + level.Info(s.logger).Log( + "msg", "reader service running", + "partition", s.reader.Partition(), + "consumer_group", s.reader.ConsumerGroup(), + ) + s.metrics.reportRunning(s.reader.Partition()) + + consumer, err := s.consumerFactory(s.committer) + if err != nil { + return fmt.Errorf("creating consumer: %w", err) + } + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + recordsChan := s.startFetchLoop(ctx) + wait := consumer.Start(ctx, recordsChan) + wait() + s.committer.Stop() + return nil +} + +// processNextFetchesUntilTargetOrMaxLagHonored process records from Kafka until at least the maxLag is honored. +// This function does a best-effort to get lag below targetLag, but it's not guaranteed that it will be +// reached once this function successfully returns (only maxLag is guaranteed). +func (s *ReaderService) processNextFetchesUntilTargetOrMaxLagHonored(ctx context.Context, targetLag, maxLag time.Duration, recordsChan chan<- []Record) error { + logger := log.With(s.logger, "target_lag", targetLag, "max_lag", maxLag) + level.Info(logger).Log("msg", "partition reader is starting to consume partition until target and max consumer lag is honored") + + attempts := []func() (time.Duration, error){ + // First process fetches until at least the max lag is honored. + func() (time.Duration, error) { + return s.processNextFetchesUntilLagHonored(ctx, maxLag, logger, recordsChan, time.Since) + }, + + // If the target lag hasn't been reached with the first attempt (which stops once at least the max lag + // is honored) then we try to reach the (lower) target lag within a fixed time (best-effort). + // The timeout is equal to the max lag. This is done because we expect at least a 2x replay speed + // from Kafka (which means at most it takes 1s to ingest 2s of data): assuming new data is continuously + // written to the partition, we give the reader maxLag time to replay the backlog + ingest the new data + // written in the meanwhile. + func() (time.Duration, error) { + timedCtx, cancel := context.WithTimeoutCause(ctx, maxLag, errWaitTargetLagDeadlineExceeded) + defer cancel() + + return s.processNextFetchesUntilLagHonored(timedCtx, targetLag, logger, recordsChan, time.Since) + }, + + // If the target lag hasn't been reached with the previous attempt then we'll move on. However, + // we still need to guarantee that in the meanwhile the lag didn't increase and max lag is still honored. + func() (time.Duration, error) { + return s.processNextFetchesUntilLagHonored(ctx, maxLag, logger, recordsChan, time.Since) + }, + } + + var currLag time.Duration + for _, attempt := range attempts { + var err error + + currLag, err = attempt() + if errors.Is(err, errWaitTargetLagDeadlineExceeded) { + continue + } + if err != nil { + return err + } + if currLag <= targetLag { + level.Info(logger).Log( + "msg", "partition reader consumed partition and current lag is lower or equal to configured target consumer lag", + "last_consumed_offset", s.committer.lastCommittedOffset, + "current_lag", currLag, + ) + return nil + } + } + + level.Warn(logger).Log( + "msg", "partition reader consumed partition and current lag is lower than configured max consumer lag but higher than target consumer lag", + "last_consumed_offset", s.committer.lastCommittedOffset, + "current_lag", currLag, + ) + return nil +} + +func (s *ReaderService) processNextFetchesUntilLagHonored(ctx context.Context, maxLag time.Duration, logger log.Logger, recordsChan chan<- []Record, timeSince func(time.Time) time.Duration) (time.Duration, error) { + boff := backoff.New(ctx, backoff.Config{ + MinBackoff: 100 * time.Millisecond, + MaxBackoff: time.Second, + MaxRetries: 0, // Retry forever (unless context is canceled / deadline exceeded). + }) + currLag := time.Duration(0) + + for boff.Ongoing() { + // Send a direct request to the Kafka backend to fetch the partition start offset. + partitionStartOffset, err := s.reader.FetchPartitionOffset(ctx, kafkaStartOffset) + if err != nil { + level.Warn(logger).Log("msg", "partition reader failed to fetch partition start offset", "err", err) + boff.Wait() + continue + } + + consumerGroupLastCommittedOffset, err := s.reader.FetchLastCommittedOffset(ctx) + if err != nil { + level.Warn(logger).Log("msg", "partition reader failed to fetch last committed offset", "err", err) + boff.Wait() + continue + } + + // Send a direct request to the Kafka backend to fetch the last produced offset. + // We intentionally don't use WaitNextFetchLastProducedOffset() to not introduce further + // latency. + lastProducedOffsetRequestedAt := time.Now() + lastProducedOffset, err := s.reader.FetchPartitionOffset(ctx, kafkaEndOffset) + if err != nil { + level.Warn(logger).Log("msg", "partition reader failed to fetch last produced offset", "err", err) + boff.Wait() + continue + } + lastProducedOffset = lastProducedOffset - 1 // Kafka returns the next empty offset so we must subtract 1 to get the oldest written offset. + + level.Debug(logger).Log( + "msg", "fetched latest offset information", + "partition_start_offset", partitionStartOffset, + "last_produced_offset", lastProducedOffset, + "last_committed_offset", consumerGroupLastCommittedOffset, + ) + + // Ensure there are some records to consume. For example, if the partition has been inactive for a long + // time and all its records have been deleted, the partition start offset may be > 0 but there are no + // records to actually consume. + if partitionStartOffset > lastProducedOffset { + level.Info(logger).Log("msg", "partition reader found no records to consume because partition is empty", "partition_start_offset", partitionStartOffset, "last_produced_offset", lastProducedOffset) + return 0, nil + } + + if consumerGroupLastCommittedOffset == lastProducedOffset { + level.Info(logger).Log("msg", "partition reader found no records to consume because it is already up-to-date", "last_committed_offset", consumerGroupLastCommittedOffset, "last_produced_offset", lastProducedOffset) + return 0, nil + } + + // This message is NOT expected to be logged with a very high rate. In this log we display the last measured + // lag. If we don't have it (lag is zero value), then it will not be logged. + level.Info(loggerWithCurrentLagIfSet(logger, currLag)).Log("msg", "partition reader is consuming records to honor target and max consumer lag", "partition_start_offset", partitionStartOffset, "last_produced_offset", lastProducedOffset, "last_processed_offset", s.lastProcessedOffset, "offset_lag", lastProducedOffset-s.lastProcessedOffset) + + for boff.Ongoing() { + // Continue reading until we reached the desired offset. + if lastProducedOffset <= s.lastProcessedOffset { + break + } + if time.Since(lastProducedOffsetRequestedAt) > time.Minute { + level.Info(loggerWithCurrentLagIfSet(logger, currLag)).Log("msg", "partition reader is still consuming records...", "last_processed_offset", s.lastProcessedOffset, "offset_lag", lastProducedOffset-s.lastProcessedOffset) + } + + timedCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + records, err := s.reader.Poll(timedCtx) + cancel() + + if err != nil { + level.Error(logger).Log("msg", "error polling records", "err", err) + continue + } + if len(records) > 0 { + recordsChan <- records + s.lastProcessedOffset = records[len(records)-1].Offset + } + } + + if boff.Err() != nil { + return 0, boff.ErrCause() + } + + // If it took less than the max desired lag to replay the partition + // then we can stop here, otherwise we'll have to redo it. + if currLag = timeSince(lastProducedOffsetRequestedAt); currLag <= maxLag { + return currLag, nil + } + } + + return 0, boff.ErrCause() +} + +func (s *ReaderService) startFetchLoop(ctx context.Context) chan []Record { + records := make(chan []Record) + go func() { + defer close(records) + for { + select { + case <-ctx.Done(): + return + default: + res, err := s.reader.Poll(ctx) + if err != nil { + level.Error(s.logger).Log("msg", "error polling records", "err", err) + continue + } + if len(res) > 0 { + records <- res + s.lastProcessedOffset = res[len(res)-1].Offset + } + } + } + }() + return records +} + +func (s *serviceMetrics) reportStarting(partition int32) { + s.partition.WithLabelValues(strconv.Itoa(int(partition))).Set(1) + s.phase.WithLabelValues(phaseStarting).Set(1) + s.phase.WithLabelValues(phaseRunning).Set(0) +} + +func (s *serviceMetrics) reportRunning(partition int32) { + s.partition.WithLabelValues(strconv.Itoa(int(partition))).Set(1) + s.phase.WithLabelValues(phaseStarting).Set(0) + s.phase.WithLabelValues(phaseRunning).Set(1) +} + +func loggerWithCurrentLagIfSet(logger log.Logger, currLag time.Duration) log.Logger { + if currLag <= 0 { + return logger + } + + return log.With(logger, "current_lag", currLag) +} diff --git a/pkg/kafka/partition/reader_test.go b/pkg/kafka/partition/reader_test.go index 191c0fc304ea..f48d3a313221 100644 --- a/pkg/kafka/partition/reader_test.go +++ b/pkg/kafka/partition/reader_test.go @@ -15,7 +15,6 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "github.com/twmb/franz-go/pkg/kadm" - "github.com/twmb/franz-go/pkg/kgo" "github.com/grafana/loki/v3/pkg/kafka" "github.com/grafana/loki/v3/pkg/kafka/client" @@ -59,6 +58,26 @@ func (m *mockConsumer) Flush(ctx context.Context) error { return args.Error(0) } +func readersFromKafkaCfg( + t *testing.T, + kafkaCfg kafka.Config, + consumerFactory ConsumerFactory, + partition int32, +) (ReaderIfc, *ReaderService) { + partitionReader, err := NewReaderService( + kafkaCfg, + partition, + "test-consumer-group", + consumerFactory, + log.NewNopLogger(), + nil, + ) + require.NoError(t, err) + + // Get the underlying reader from the service + return partitionReader.reader, partitionReader +} + func TestPartitionReader_BasicFunctionality(t *testing.T) { _, kafkaCfg := testkafka.CreateCluster(t, 1, "test") consumer := newMockConsumer() @@ -67,8 +86,13 @@ func TestPartitionReader_BasicFunctionality(t *testing.T) { return consumer, nil } - partitionReader, err := NewReader(kafkaCfg, 0, "test-consumer-group", consumerFactory, log.NewNopLogger(), prometheus.NewRegistry()) - require.NoError(t, err) + _, partitionReader := readersFromKafkaCfg( + t, + kafkaCfg, + consumerFactory, + 0, + ) + producer, err := client.NewWriterClient(kafkaCfg, 100, log.NewNopLogger(), prometheus.NewRegistry()) require.NoError(t, err) @@ -121,8 +145,13 @@ func TestPartitionReader_ProcessCatchUpAtStartup(t *testing.T) { return newMockConsumer(), nil } - partitionReader, err := NewReader(kafkaCfg, 0, "test-consumer-group", consumerFactory, log.NewNopLogger(), prometheus.NewRegistry()) - require.NoError(t, err) + _, partitionReader := readersFromKafkaCfg( + t, + kafkaCfg, + consumerFactory, + 0, + ) + producer, err := client.NewWriterClient(kafkaCfg, 100, log.NewNopLogger(), prometheus.NewRegistry()) require.NoError(t, err) @@ -175,18 +204,18 @@ func TestPartitionReader_ProcessCommits(t *testing.T) { } partitionID := int32(0) - partitionReader, err := NewReader(kafkaCfg, partitionID, "test-consumer-group", consumerFactory, log.NewNopLogger(), prometheus.NewRegistry()) - require.NoError(t, err) + partitionReader, readerSvc := readersFromKafkaCfg( + t, + kafkaCfg, + consumerFactory, + partitionID, + ) + producer, err := client.NewWriterClient(kafkaCfg, 100, log.NewNopLogger(), prometheus.NewRegistry()) require.NoError(t, err) // Init the client: This usually happens in "start" but we want to manage our own lifecycle for this test. - partitionReader.client, err = client.NewReaderClient(kafkaCfg, nil, log.NewNopLogger(), - kgo.ConsumePartitions(map[string]map[int32]kgo.Offset{ - kafkaCfg.Topic: {partitionID: kgo.NewOffset().AtStart()}, - }), - ) - require.NoError(t, err) + partitionReader.SetOffsetForConsumption(int64(KafkaStartOffset)) stream := logproto.Stream{ Labels: labels.FromStrings("foo", "bar").String(), @@ -217,7 +246,7 @@ func TestPartitionReader_ProcessCommits(t *testing.T) { return targetLag - 1 } - _, err = partitionReader.processNextFetchesUntilLagHonored(ctx, targetLag, log.NewNopLogger(), recordsChan, timeSince) + _, err = readerSvc.processNextFetchesUntilLagHonored(ctx, targetLag, log.NewNopLogger(), recordsChan, timeSince) assert.NoError(t, err) // Wait to process all the records @@ -270,8 +299,12 @@ func TestPartitionReader_StartsAtNextOffset(t *testing.T) { require.NoError(t, resp.Error()) // Start reading - partitionReader, err := NewReader(kafkaCfg, 0, "test-consumer-group", consumerFactory, log.NewNopLogger(), prometheus.NewRegistry()) - require.NoError(t, err) + _, partitionReader := readersFromKafkaCfg( + t, + kafkaCfg, + consumerFactory, + 0, + ) err = services.StartAndAwaitRunning(context.Background(), partitionReader) require.NoError(t, err) @@ -328,8 +361,12 @@ func TestPartitionReader_StartsUpIfNoNewRecordsAreAvailable(t *testing.T) { require.NoError(t, resp.Error()) // Start reading - partitionReader, err := NewReader(kafkaCfg, 0, "test-consumer-group", consumerFactory, log.NewNopLogger(), prometheus.NewRegistry()) - require.NoError(t, err) + _, partitionReader := readersFromKafkaCfg( + t, + kafkaCfg, + consumerFactory, + 0, + ) ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() err = services.StartAndAwaitRunning(ctx, partitionReader) diff --git a/pkg/loghttp/push/otlp_config.go b/pkg/loghttp/push/otlp_config.go index f71efe8bee7d..4739e1f10dd3 100644 --- a/pkg/loghttp/push/otlp_config.go +++ b/pkg/loghttp/push/otlp_config.go @@ -56,6 +56,7 @@ func (cfg *GlobalOTLPConfig) RegisterFlags(fs *flag.FlagSet) { "service.namespace", "service.instance.id", "deployment.environment", + "deployment.environment.name", "cloud.region", "cloud.availability_zone", "k8s.cluster.name", diff --git a/pkg/logproto/bloomgateway.pb.go b/pkg/logproto/bloomgateway.pb.go index e4841b8ae0bd..2c1bbb8f7f3b 100644 --- a/pkg/logproto/bloomgateway.pb.go +++ b/pkg/logproto/bloomgateway.pb.go @@ -243,51 +243,134 @@ func (m *GroupedChunkRefs) GetLabels() *IndexSeries { return nil } +type PrefetchBloomBlocksRequest struct { + Blocks []string `protobuf:"bytes,1,rep,name=blocks,proto3" json:"blocks,omitempty"` +} + +func (m *PrefetchBloomBlocksRequest) Reset() { *m = PrefetchBloomBlocksRequest{} } +func (*PrefetchBloomBlocksRequest) ProtoMessage() {} +func (*PrefetchBloomBlocksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_a50b5dd1dbcd1415, []int{4} +} +func (m *PrefetchBloomBlocksRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *PrefetchBloomBlocksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_PrefetchBloomBlocksRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *PrefetchBloomBlocksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrefetchBloomBlocksRequest.Merge(m, src) +} +func (m *PrefetchBloomBlocksRequest) XXX_Size() int { + return m.Size() +} +func (m *PrefetchBloomBlocksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PrefetchBloomBlocksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PrefetchBloomBlocksRequest proto.InternalMessageInfo + +func (m *PrefetchBloomBlocksRequest) GetBlocks() []string { + if m != nil { + return m.Blocks + } + return nil +} + +type PrefetchBloomBlocksResponse struct { +} + +func (m *PrefetchBloomBlocksResponse) Reset() { *m = PrefetchBloomBlocksResponse{} } +func (*PrefetchBloomBlocksResponse) ProtoMessage() {} +func (*PrefetchBloomBlocksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_a50b5dd1dbcd1415, []int{5} +} +func (m *PrefetchBloomBlocksResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *PrefetchBloomBlocksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_PrefetchBloomBlocksResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *PrefetchBloomBlocksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrefetchBloomBlocksResponse.Merge(m, src) +} +func (m *PrefetchBloomBlocksResponse) XXX_Size() int { + return m.Size() +} +func (m *PrefetchBloomBlocksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PrefetchBloomBlocksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PrefetchBloomBlocksResponse proto.InternalMessageInfo + func init() { proto.RegisterType((*FilterChunkRefRequest)(nil), "logproto.FilterChunkRefRequest") proto.RegisterType((*FilterChunkRefResponse)(nil), "logproto.FilterChunkRefResponse") proto.RegisterType((*ShortRef)(nil), "logproto.ShortRef") proto.RegisterType((*GroupedChunkRefs)(nil), "logproto.GroupedChunkRefs") + proto.RegisterType((*PrefetchBloomBlocksRequest)(nil), "logproto.PrefetchBloomBlocksRequest") + proto.RegisterType((*PrefetchBloomBlocksResponse)(nil), "logproto.PrefetchBloomBlocksResponse") } func init() { proto.RegisterFile("pkg/logproto/bloomgateway.proto", fileDescriptor_a50b5dd1dbcd1415) } var fileDescriptor_a50b5dd1dbcd1415 = []byte{ - // 532 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xf6, 0x36, 0x21, 0x24, 0x1b, 0xfe, 0xb4, 0xa2, 0x95, 0x15, 0xa4, 0x8d, 0x95, 0x03, 0xcd, - 0x05, 0x5b, 0x4a, 0x85, 0xc4, 0x85, 0x4b, 0x2a, 0x51, 0xf5, 0x06, 0x5b, 0xc4, 0x01, 0x89, 0x83, - 0xe3, 0x8c, 0x7f, 0x14, 0x7b, 0xd7, 0xdd, 0x5d, 0x03, 0xbd, 0xf1, 0x08, 0xbc, 0x03, 0x17, 0x9e, - 0x80, 0x67, 0xe8, 0x31, 0xe2, 0x54, 0x71, 0xa8, 0x88, 0x73, 0xe1, 0xd8, 0x47, 0x40, 0x76, 0xe2, - 0xba, 0xa9, 0x40, 0x95, 0x38, 0x71, 0xf2, 0xee, 0xce, 0x37, 0xe3, 0x6f, 0xbe, 0x6f, 0x06, 0xf7, - 0xd3, 0x59, 0xe0, 0xc4, 0x22, 0x48, 0xa5, 0xd0, 0xc2, 0x99, 0xc4, 0x42, 0x24, 0x81, 0xab, 0xe1, - 0x83, 0x7b, 0x62, 0x97, 0x4f, 0xa4, 0x5d, 0x05, 0x7b, 0x0f, 0x03, 0x11, 0x88, 0x15, 0xae, 0x38, - 0xad, 0xe2, 0xbd, 0x47, 0x1b, 0x05, 0xaa, 0xc3, 0x2a, 0x38, 0xf8, 0xbe, 0x85, 0xb7, 0x5f, 0x44, - 0xb1, 0x06, 0xb9, 0x1f, 0x66, 0x7c, 0xc6, 0xc0, 0x67, 0x70, 0x9c, 0x81, 0xd2, 0x64, 0x1f, 0x37, - 0x7d, 0x29, 0x12, 0x13, 0x59, 0x68, 0xd8, 0x18, 0x3b, 0xa7, 0xe7, 0x7d, 0xe3, 0xc7, 0x79, 0x7f, - 0x37, 0x88, 0x74, 0x98, 0x4d, 0x6c, 0x4f, 0x24, 0x4e, 0x2a, 0x45, 0x02, 0x3a, 0x84, 0x4c, 0x39, - 0x9e, 0x48, 0x12, 0xc1, 0x9d, 0x44, 0x4c, 0x21, 0xb6, 0x5f, 0x47, 0x09, 0xb0, 0x32, 0x99, 0x1c, - 0xe2, 0xdb, 0x3a, 0x94, 0x22, 0x0b, 0x42, 0x73, 0xeb, 0xdf, 0xea, 0x54, 0xf9, 0xc4, 0xc6, 0x4d, - 0x09, 0xbe, 0x32, 0x1b, 0x56, 0x63, 0xd8, 0x1d, 0xf5, 0xec, 0xcb, 0x46, 0x0e, 0xa4, 0xc8, 0x52, - 0x98, 0x56, 0xfc, 0x15, 0x2b, 0x71, 0xc4, 0xc5, 0xcd, 0x34, 0x76, 0xb9, 0x79, 0xcb, 0x42, 0xc3, - 0xee, 0xe8, 0x5e, 0x8d, 0x7f, 0x19, 0xbb, 0x7c, 0xfc, 0x7c, 0xcd, 0xe3, 0xe9, 0x15, 0x1e, 0x81, - 0x74, 0x7d, 0x97, 0xbb, 0x4e, 0x2c, 0x66, 0x91, 0xf3, 0x7e, 0xcf, 0x29, 0x74, 0x3b, 0xce, 0x40, - 0x46, 0x20, 0x9d, 0xa2, 0x94, 0xfd, 0x2a, 0x03, 0x79, 0x52, 0xa4, 0xb3, 0xb2, 0x34, 0xd9, 0xc1, - 0xad, 0x49, 0x2c, 0xbc, 0x99, 0x32, 0x5b, 0x56, 0x63, 0xd8, 0x61, 0xeb, 0xdb, 0x80, 0xe1, 0x9d, - 0xeb, 0x9a, 0xaa, 0x54, 0x70, 0x05, 0xe4, 0x19, 0xee, 0x78, 0x15, 0x4f, 0x13, 0xdd, 0xd8, 0x49, - 0x0d, 0x1e, 0x7c, 0x43, 0xb8, 0x7d, 0x14, 0x0a, 0xa9, 0x19, 0xf8, 0xff, 0x9d, 0x37, 0x3d, 0xdc, - 0xf6, 0x42, 0xf0, 0x66, 0x2a, 0x4b, 0xcc, 0x86, 0x85, 0x86, 0x77, 0xd9, 0xe5, 0x7d, 0xf0, 0x05, - 0xe1, 0x07, 0xd7, 0x1b, 0x23, 0x16, 0xee, 0xfa, 0x11, 0x0f, 0x40, 0xa6, 0x32, 0xe2, 0xba, 0xec, - 0xa3, 0xc9, 0xae, 0x3e, 0x15, 0xda, 0x6a, 0xe0, 0x2e, 0xd7, 0x25, 0xb9, 0x0e, 0x5b, 0xdf, 0xc8, - 0xe3, 0x8d, 0x31, 0x20, 0xb5, 0x78, 0x95, 0x38, 0x6b, 0xfb, 0x9f, 0xe0, 0x56, 0xec, 0x4e, 0x20, - 0x56, 0x66, 0xb3, 0x1c, 0x80, 0xed, 0x1a, 0x79, 0xc8, 0xa7, 0xf0, 0xf1, 0xa8, 0xf0, 0x55, 0xb1, - 0x35, 0x68, 0xe4, 0xe3, 0x3b, 0xe3, 0x62, 0xb5, 0x0e, 0x56, 0xab, 0x45, 0xde, 0xe0, 0xfb, 0x9b, - 0x16, 0x2a, 0xd2, 0xaf, 0x2b, 0xfc, 0x71, 0x63, 0x7a, 0xd6, 0xdf, 0x01, 0x2b, 0xfb, 0x07, 0xc6, - 0xf8, 0xdd, 0x7c, 0x41, 0x8d, 0xb3, 0x05, 0x35, 0x2e, 0x16, 0x14, 0x7d, 0xca, 0x29, 0xfa, 0x9a, - 0x53, 0x74, 0x9a, 0x53, 0x34, 0xcf, 0x29, 0xfa, 0x99, 0x53, 0xf4, 0x2b, 0xa7, 0xc6, 0x45, 0x4e, - 0xd1, 0xe7, 0x25, 0x35, 0xe6, 0x4b, 0x6a, 0x9c, 0x2d, 0xa9, 0xf1, 0x76, 0xf7, 0x86, 0x29, 0xad, - 0xfe, 0x3b, 0x69, 0x95, 0x9f, 0xbd, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x05, 0x31, 0x08, - 0x35, 0x04, 0x00, 0x00, + // 581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xbd, 0x6e, 0xd4, 0x40, + 0x10, 0xf6, 0xe6, 0x8e, 0x23, 0xd9, 0xf0, 0xa7, 0x85, 0x44, 0x96, 0x23, 0xf6, 0x2c, 0x0b, 0xc8, + 0x35, 0xd8, 0x52, 0x02, 0x12, 0x0d, 0xcd, 0x45, 0x22, 0x4a, 0x17, 0x36, 0x88, 0x02, 0x89, 0xc2, + 0xe7, 0x1b, 0xff, 0xe8, 0x6c, 0xaf, 0xb3, 0x5e, 0x03, 0xe9, 0x78, 0x04, 0xde, 0x81, 0x86, 0x27, + 0xe0, 0x19, 0x52, 0x50, 0x44, 0x54, 0x11, 0x45, 0x44, 0x7c, 0x0d, 0x65, 0x1e, 0x01, 0xd9, 0x3e, + 0xc7, 0x77, 0xd1, 0x45, 0x27, 0x51, 0x51, 0xd9, 0x3b, 0xf3, 0xcd, 0xcc, 0x37, 0xf3, 0xcd, 0xe0, + 0x6e, 0x32, 0xf2, 0xac, 0x90, 0x7b, 0x89, 0xe0, 0x92, 0x5b, 0x83, 0x90, 0xf3, 0xc8, 0xb3, 0x25, + 0x7c, 0xb4, 0x8f, 0xcc, 0xd2, 0x44, 0x96, 0x6b, 0xa7, 0xf6, 0xc0, 0xe3, 0x1e, 0xaf, 0x70, 0xc5, + 0x5f, 0xe5, 0xd7, 0x36, 0x66, 0x12, 0xd4, 0x3f, 0x95, 0xd3, 0xf8, 0xb9, 0x84, 0xd7, 0x5e, 0x05, + 0xa1, 0x04, 0xb1, 0xe3, 0x67, 0xf1, 0x88, 0x81, 0xcb, 0xe0, 0x30, 0x83, 0x54, 0x92, 0x1d, 0xdc, + 0x76, 0x05, 0x8f, 0x54, 0xa4, 0xa3, 0x5e, 0xab, 0x6f, 0x1d, 0x9f, 0x75, 0x95, 0x5f, 0x67, 0xdd, + 0x4d, 0x2f, 0x90, 0x7e, 0x36, 0x30, 0x1d, 0x1e, 0x59, 0x89, 0xe0, 0x11, 0x48, 0x1f, 0xb2, 0xd4, + 0x72, 0x78, 0x14, 0xf1, 0xd8, 0x8a, 0xf8, 0x10, 0x42, 0xf3, 0x4d, 0x10, 0x01, 0x2b, 0x83, 0xc9, + 0x1e, 0xbe, 0x29, 0x7d, 0xc1, 0x33, 0xcf, 0x57, 0x97, 0xfe, 0x2d, 0x4f, 0x1d, 0x4f, 0x4c, 0xdc, + 0x16, 0xe0, 0xa6, 0x6a, 0x4b, 0x6f, 0xf5, 0x56, 0xb7, 0x34, 0xf3, 0xb2, 0x91, 0x5d, 0xc1, 0xb3, + 0x04, 0x86, 0x35, 0xff, 0x94, 0x95, 0x38, 0x62, 0xe3, 0x76, 0x12, 0xda, 0xb1, 0x7a, 0x43, 0x47, + 0xbd, 0xd5, 0xad, 0x3b, 0x0d, 0x7e, 0x3f, 0xb4, 0xe3, 0xfe, 0xcb, 0x09, 0x8f, 0xe7, 0x53, 0x3c, + 0x3c, 0x61, 0xbb, 0x76, 0x6c, 0x5b, 0x21, 0x1f, 0x05, 0xd6, 0x87, 0x6d, 0xab, 0x98, 0xdb, 0x61, + 0x06, 0x22, 0x00, 0x61, 0x15, 0xa9, 0xcc, 0xd7, 0x19, 0x88, 0xa3, 0x22, 0x9c, 0x95, 0xa9, 0xc9, + 0x3a, 0xee, 0x0c, 0x42, 0xee, 0x8c, 0x52, 0xb5, 0xa3, 0xb7, 0x7a, 0x2b, 0x6c, 0xf2, 0x32, 0x18, + 0x5e, 0xbf, 0x3a, 0xd3, 0x34, 0xe1, 0x71, 0x0a, 0xe4, 0x05, 0x5e, 0x71, 0x6a, 0x9e, 0x2a, 0x5a, + 0xd8, 0x49, 0x03, 0x36, 0xbe, 0x23, 0xbc, 0x7c, 0xe0, 0x73, 0x21, 0x19, 0xb8, 0xff, 0x9d, 0x36, + 0x1a, 0x5e, 0x76, 0x7c, 0x70, 0x46, 0x69, 0x16, 0xa9, 0x2d, 0x1d, 0xf5, 0x6e, 0xb3, 0xcb, 0xb7, + 0xf1, 0x15, 0xe1, 0x7b, 0x57, 0x1b, 0x23, 0x3a, 0x5e, 0x75, 0x83, 0xd8, 0x03, 0x91, 0x88, 0x20, + 0x96, 0x65, 0x1f, 0x6d, 0x36, 0x6d, 0x2a, 0x66, 0x2b, 0x21, 0xb6, 0x63, 0x59, 0x92, 0x5b, 0x61, + 0x93, 0x17, 0x79, 0x32, 0xb3, 0x06, 0xa4, 0x19, 0x5e, 0x3d, 0x9c, 0x89, 0xfc, 0x4f, 0x71, 0x27, + 0xb4, 0x07, 0x10, 0xa6, 0x6a, 0xbb, 0x5c, 0x80, 0xb5, 0x06, 0xb9, 0x17, 0x0f, 0xe1, 0xd3, 0x41, + 0xa1, 0x6b, 0xca, 0x26, 0x20, 0xe3, 0x19, 0xd6, 0xf6, 0x05, 0xb8, 0x20, 0x1d, 0xbf, 0x5f, 0x9c, + 0x58, 0xbf, 0x54, 0xb2, 0xbe, 0x85, 0x46, 0x68, 0x34, 0x23, 0xf4, 0x43, 0xbc, 0x31, 0x37, 0xaa, + 0x52, 0x7b, 0xeb, 0x07, 0xc2, 0xb7, 0x4a, 0xfb, 0x6e, 0x75, 0xb0, 0xe4, 0x2d, 0xbe, 0x3b, 0xbb, + 0x18, 0x29, 0xe9, 0x36, 0xbc, 0xe6, 0xde, 0xa1, 0xa6, 0x5f, 0x0f, 0xa8, 0xca, 0x18, 0x0a, 0x19, + 0xe2, 0xfb, 0x73, 0x78, 0x90, 0x47, 0x53, 0x4b, 0x7f, 0x6d, 0x73, 0xda, 0xe3, 0x05, 0xa8, 0xba, + 0x4a, 0xff, 0xfd, 0xc9, 0x39, 0x55, 0x4e, 0xcf, 0xa9, 0x72, 0x71, 0x4e, 0xd1, 0xe7, 0x9c, 0xa2, + 0x6f, 0x39, 0x45, 0xc7, 0x39, 0x45, 0x27, 0x39, 0x45, 0xbf, 0x73, 0x8a, 0xfe, 0xe4, 0x54, 0xb9, + 0xc8, 0x29, 0xfa, 0x32, 0xa6, 0xca, 0xc9, 0x98, 0x2a, 0xa7, 0x63, 0xaa, 0xbc, 0xdb, 0x5c, 0x70, + 0x61, 0x75, 0xf1, 0x41, 0xa7, 0xfc, 0x6c, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x3b, 0x6b, 0x23, + 0x31, 0xf1, 0x04, 0x00, 0x00, } func (this *FilterChunkRefRequest) Equal(that interface{}) bool { @@ -433,6 +516,56 @@ func (this *GroupedChunkRefs) Equal(that interface{}) bool { } return true } +func (this *PrefetchBloomBlocksRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*PrefetchBloomBlocksRequest) + if !ok { + that2, ok := that.(PrefetchBloomBlocksRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.Blocks) != len(that1.Blocks) { + return false + } + for i := range this.Blocks { + if this.Blocks[i] != that1.Blocks[i] { + return false + } + } + return true +} +func (this *PrefetchBloomBlocksResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*PrefetchBloomBlocksResponse) + if !ok { + that2, ok := that.(PrefetchBloomBlocksResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + return true +} func (this *FilterChunkRefRequest) GoString() string { if this == nil { return "nil" @@ -490,6 +623,25 @@ func (this *GroupedChunkRefs) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *PrefetchBloomBlocksRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.PrefetchBloomBlocksRequest{") + s = append(s, "Blocks: "+fmt.Sprintf("%#v", this.Blocks)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *PrefetchBloomBlocksResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 4) + s = append(s, "&logproto.PrefetchBloomBlocksResponse{") + s = append(s, "}") + return strings.Join(s, "") +} func valueToGoStringBloomgateway(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -512,6 +664,7 @@ const _ = grpc.SupportPackageIsVersion4 // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type BloomGatewayClient interface { FilterChunkRefs(ctx context.Context, in *FilterChunkRefRequest, opts ...grpc.CallOption) (*FilterChunkRefResponse, error) + PrefetchBloomBlocks(ctx context.Context, in *PrefetchBloomBlocksRequest, opts ...grpc.CallOption) (*PrefetchBloomBlocksResponse, error) } type bloomGatewayClient struct { @@ -531,9 +684,19 @@ func (c *bloomGatewayClient) FilterChunkRefs(ctx context.Context, in *FilterChun return out, nil } +func (c *bloomGatewayClient) PrefetchBloomBlocks(ctx context.Context, in *PrefetchBloomBlocksRequest, opts ...grpc.CallOption) (*PrefetchBloomBlocksResponse, error) { + out := new(PrefetchBloomBlocksResponse) + err := c.cc.Invoke(ctx, "/logproto.BloomGateway/PrefetchBloomBlocks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // BloomGatewayServer is the server API for BloomGateway service. type BloomGatewayServer interface { FilterChunkRefs(context.Context, *FilterChunkRefRequest) (*FilterChunkRefResponse, error) + PrefetchBloomBlocks(context.Context, *PrefetchBloomBlocksRequest) (*PrefetchBloomBlocksResponse, error) } // UnimplementedBloomGatewayServer can be embedded to have forward compatible implementations. @@ -543,6 +706,9 @@ type UnimplementedBloomGatewayServer struct { func (*UnimplementedBloomGatewayServer) FilterChunkRefs(ctx context.Context, req *FilterChunkRefRequest) (*FilterChunkRefResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method FilterChunkRefs not implemented") } +func (*UnimplementedBloomGatewayServer) PrefetchBloomBlocks(ctx context.Context, req *PrefetchBloomBlocksRequest) (*PrefetchBloomBlocksResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PrefetchBloomBlocks not implemented") +} func RegisterBloomGatewayServer(s *grpc.Server, srv BloomGatewayServer) { s.RegisterService(&_BloomGateway_serviceDesc, srv) @@ -566,6 +732,24 @@ func _BloomGateway_FilterChunkRefs_Handler(srv interface{}, ctx context.Context, return interceptor(ctx, in, info, handler) } +func _BloomGateway_PrefetchBloomBlocks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PrefetchBloomBlocksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BloomGatewayServer).PrefetchBloomBlocks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/logproto.BloomGateway/PrefetchBloomBlocks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BloomGatewayServer).PrefetchBloomBlocks(ctx, req.(*PrefetchBloomBlocksRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _BloomGateway_serviceDesc = grpc.ServiceDesc{ ServiceName: "logproto.BloomGateway", HandlerType: (*BloomGatewayServer)(nil), @@ -574,6 +758,10 @@ var _BloomGateway_serviceDesc = grpc.ServiceDesc{ MethodName: "FilterChunkRefs", Handler: _BloomGateway_FilterChunkRefs_Handler, }, + { + MethodName: "PrefetchBloomBlocks", + Handler: _BloomGateway_PrefetchBloomBlocks_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "pkg/logproto/bloomgateway.proto", @@ -781,6 +969,61 @@ func (m *GroupedChunkRefs) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *PrefetchBloomBlocksRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *PrefetchBloomBlocksRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *PrefetchBloomBlocksRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Blocks) > 0 { + for iNdEx := len(m.Blocks) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Blocks[iNdEx]) + copy(dAtA[i:], m.Blocks[iNdEx]) + i = encodeVarintBloomgateway(dAtA, i, uint64(len(m.Blocks[iNdEx]))) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *PrefetchBloomBlocksResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *PrefetchBloomBlocksResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *PrefetchBloomBlocksResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + return len(dAtA) - i, nil +} + func encodeVarintBloomgateway(dAtA []byte, offset int, v uint64) int { offset -= sovBloomgateway(v) base := offset @@ -880,6 +1123,30 @@ func (m *GroupedChunkRefs) Size() (n int) { return n } +func (m *PrefetchBloomBlocksRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Blocks) > 0 { + for _, s := range m.Blocks { + l = len(s) + n += 1 + l + sovBloomgateway(uint64(l)) + } + } + return n +} + +func (m *PrefetchBloomBlocksResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + return n +} + func sovBloomgateway(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -950,6 +1217,25 @@ func (this *GroupedChunkRefs) String() string { }, "") return s } +func (this *PrefetchBloomBlocksRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&PrefetchBloomBlocksRequest{`, + `Blocks:` + fmt.Sprintf("%v", this.Blocks) + `,`, + `}`, + }, "") + return s +} +func (this *PrefetchBloomBlocksResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&PrefetchBloomBlocksResponse{`, + `}`, + }, "") + return s +} func valueToStringBloomgateway(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -1519,6 +1805,144 @@ func (m *GroupedChunkRefs) Unmarshal(dAtA []byte) error { } return nil } +func (m *PrefetchBloomBlocksRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBloomgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: PrefetchBloomBlocksRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: PrefetchBloomBlocksRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Blocks", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBloomgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthBloomgateway + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthBloomgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Blocks = append(m.Blocks, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipBloomgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthBloomgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthBloomgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *PrefetchBloomBlocksResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowBloomgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: PrefetchBloomBlocksResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: PrefetchBloomBlocksResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := skipBloomgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthBloomgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthBloomgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipBloomgateway(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/pkg/logproto/bloomgateway.proto b/pkg/logproto/bloomgateway.proto index 809afb90b7b5..c34027e6cbc6 100644 --- a/pkg/logproto/bloomgateway.proto +++ b/pkg/logproto/bloomgateway.proto @@ -50,6 +50,13 @@ message GroupedChunkRefs { IndexSeries labels = 4; } +message PrefetchBloomBlocksRequest { + repeated string blocks = 1; +} + +message PrefetchBloomBlocksResponse {} + service BloomGateway { rpc FilterChunkRefs(FilterChunkRefRequest) returns (FilterChunkRefResponse) {} + rpc PrefetchBloomBlocks(PrefetchBloomBlocksRequest) returns (PrefetchBloomBlocksResponse) {} } diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index c8d0bcb31ebb..c5ef408cc21e 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -583,16 +583,36 @@ func (b *LabelsBuilder) LabelsResult() LabelsResult { return b.currentResult } - stream := b.labels(StreamLabel).Copy() - structuredMetadata := b.labels(StructuredMetadataLabel).Copy() - parsed := b.labels(ParsedLabel).Copy() - b.buf = flattenLabels(b.buf, stream, structuredMetadata, parsed) + // Get all labels at once and sort them + b.buf = b.UnsortedLabels(b.buf) + sort.Sort(b.buf) hash := b.hasher.Hash(b.buf) + if cached, ok := b.resultCache[hash]; ok { return cached } - result := NewLabelsResult(b.buf.String(), hash, stream, structuredMetadata, parsed) + // Now segregate the sorted labels into their categories + var stream, meta, parsed []labels.Label + + for _, l := range b.buf { + // Skip error labels for stream and meta categories + if l.Name == logqlmodel.ErrorLabel || l.Name == logqlmodel.ErrorDetailsLabel { + parsed = append(parsed, l) + continue + } + + // Check which category this label belongs to + if labelsContain(b.add[ParsedLabel], l.Name) { + parsed = append(parsed, l) + } else if labelsContain(b.add[StructuredMetadataLabel], l.Name) { + meta = append(meta, l) + } else { + stream = append(stream, l) + } + } + + result := NewLabelsResult(b.buf.String(), hash, labels.New(stream...), labels.New(meta...), labels.New(parsed...)) b.resultCache[hash] = result return result diff --git a/pkg/logql/log/labels_test.go b/pkg/logql/log/labels_test.go index 7f543a48d7d8..2be2769f00ca 100644 --- a/pkg/logql/log/labels_test.go +++ b/pkg/logql/log/labels_test.go @@ -3,6 +3,7 @@ package log import ( "sort" "testing" + "time" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" @@ -463,3 +464,41 @@ func assertLabelResult(t *testing.T, lbs labels.Labels, res LabelsResult) { res.String(), ) } + +// benchmark streamLineSampleExtractor.Process method +func BenchmarkStreamLineSampleExtractor_Process(b *testing.B) { + // Setup some test data + baseLabels := labels.FromStrings( + "namespace", "prod", + "cluster", "us-east-1", + "pod", "my-pod-123", + "container", "main", + "stream", "stdout", + ) + + structuredMeta := []labels.Label{ + {Name: "level", Value: "info"}, + {Name: "caller", Value: "http.go:42"}, + {Name: "user", Value: "john"}, + {Name: "trace_id", Value: "abc123"}, + } + + testLine := []byte(`{"timestamp":"2024-01-01T00:00:00Z","level":"info","message":"test message","duration_ms":150}`) + + // JSON parsing + filtering + label extraction + matcher := labels.MustNewMatcher(labels.MatchEqual, "level", "info") + filter := NewStringLabelFilter(matcher) + stages := []Stage{ + NewJSONParser(), + filter, + } + ex, err := NewLineSampleExtractor(CountExtractor, stages, []string{}, false, false) + require.NoError(b, err) + streamEx := ex.ForStream(baseLabels) + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, _, _ = streamEx.Process(time.Now().UnixNano(), testLine, structuredMeta...) + } +} diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index 3586e21b541d..ff5c6de1565b 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -31,6 +31,7 @@ import ( "google.golang.org/grpc/health/grpc_health_v1" "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/blockbuilder" "github.com/grafana/loki/v3/pkg/bloombuild" "github.com/grafana/loki/v3/pkg/bloomgateway" "github.com/grafana/loki/v3/pkg/compactor" @@ -89,6 +90,7 @@ type Config struct { RulerStorage rulestore.Config `yaml:"ruler_storage,omitempty" doc:"hidden"` IngesterClient ingester_client.Config `yaml:"ingester_client,omitempty"` Ingester ingester.Config `yaml:"ingester,omitempty"` + BlockBuilder blockbuilder.Config `yaml:"block_builder,omitempty"` Pattern pattern.Config `yaml:"pattern_ingester,omitempty"` IndexGateway indexgateway.Config `yaml:"index_gateway"` BloomBuild bloombuild.Config `yaml:"bloom_build,omitempty" category:"experimental"` @@ -183,6 +185,7 @@ func (c *Config) RegisterFlags(f *flag.FlagSet) { c.OperationalConfig.RegisterFlags(f) c.Profiling.RegisterFlags(f) c.KafkaConfig.RegisterFlags(f) + c.BlockBuilder.RegisterFlags(f) } func (c *Config) registerServerFlagsWithChangedDefaultValues(fs *flag.FlagSet) { @@ -258,6 +261,9 @@ func (c *Config) Validate() error { if err := c.Ingester.Validate(); err != nil { errs = append(errs, errors.Wrap(err, "CONFIG ERROR: invalid ingester config")) } + if err := c.BlockBuilder.Validate(); err != nil { + errs = append(errs, errors.Wrap(err, "CONFIG ERROR: invalid block_builder config")) + } if err := c.LimitsConfig.Validate(); err != nil { errs = append(errs, errors.Wrap(err, "CONFIG ERROR: invalid limits_config config")) } @@ -372,6 +378,7 @@ type Loki struct { indexGatewayRingManager *lokiring.RingManager partitionRingWatcher *ring.PartitionRingWatcher partitionRing *ring.PartitionInstanceRing + blockBuilder *blockbuilder.BlockBuilder ClientMetrics storage.ClientMetrics deleteClientMetrics *deletion.DeleteRequestClientMetrics @@ -682,6 +689,7 @@ func (t *Loki) setupModuleManager() error { mm.RegisterModule(PatternIngesterTee, t.initPatternIngesterTee, modules.UserInvisibleModule) mm.RegisterModule(PatternIngester, t.initPatternIngester) mm.RegisterModule(PartitionRing, t.initPartitionRing, modules.UserInvisibleModule) + mm.RegisterModule(BlockBuilder, t.initBlockBuilder) mm.RegisterModule(All, nil) mm.RegisterModule(Read, nil) @@ -719,6 +727,7 @@ func (t *Loki) setupModuleManager() error { IndexGatewayRing: {Overrides, MemberlistKV}, PartitionRing: {MemberlistKV, Server, Ring}, MemberlistKV: {Server}, + BlockBuilder: {PartitionRing, Store, Server}, Read: {QueryFrontend, Querier}, Write: {Ingester, Distributor, PatternIngester}, diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index b8ebc89b6475..0e53cbd00e4a 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -35,6 +35,7 @@ import ( "github.com/prometheus/common/model" "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/blockbuilder" "github.com/grafana/loki/v3/pkg/bloombuild/builder" "github.com/grafana/loki/v3/pkg/bloombuild/planner" bloomprotos "github.com/grafana/loki/v3/pkg/bloombuild/protos" @@ -47,6 +48,8 @@ import ( "github.com/grafana/loki/v3/pkg/distributor" "github.com/grafana/loki/v3/pkg/indexgateway" "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/kafka/partition" + "github.com/grafana/loki/v3/pkg/kafka/partitionring" "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql" "github.com/grafana/loki/v3/pkg/logqlmodel/stats" @@ -134,6 +137,7 @@ const ( Analytics string = "analytics" InitCodec string = "init-codec" PartitionRing string = "partition-ring" + BlockBuilder string = "block-builder" ) const ( @@ -875,6 +879,12 @@ func (t *Loki) updateConfigForShipperStore() { t.Cfg.StorageConfig.BoltDBShipperConfig.Mode = indexshipper.ModeReadOnly t.Cfg.StorageConfig.TSDBShipperConfig.Mode = indexshipper.ModeReadOnly + case t.Cfg.isTarget(BlockBuilder): + // Blockbuilder handles index creation independently of the shipper. + // TODO: introduce Disabled mode for boltdb shipper and set it here. + t.Cfg.StorageConfig.BoltDBShipperConfig.Mode = indexshipper.ModeReadOnly + t.Cfg.StorageConfig.TSDBShipperConfig.Mode = indexshipper.ModeDisabled + default: // All other targets use the shipper store in RW mode t.Cfg.StorageConfig.BoltDBShipperConfig.Mode = indexshipper.ModeReadWrite @@ -1634,6 +1644,22 @@ func (t *Loki) initBloomBuilder() (services.Service, error) { ringManager = t.indexGatewayRingManager } + var bloomGatewayClient bloomgateway.Client + if t.Cfg.BloomGateway.Enabled { + var err error + bloomGatewayClient, err = bloomgateway.NewClient( + t.Cfg.BloomGateway.Client, + t.Overrides, + prometheus.DefaultRegisterer, + logger, + t.cacheGenerationLoader, + t.Cfg.CompactorConfig.RetentionEnabled, + ) + if err != nil { + return nil, err + } + } + return builder.New( t.Cfg.BloomBuild.Builder, t.Overrides, @@ -1642,6 +1668,7 @@ func (t *Loki) initBloomBuilder() (services.Service, error) { t.ClientMetrics, t.Store, t.BloomStore, + bloomGatewayClient, logger, prometheus.DefaultRegisterer, ringManager, @@ -1770,6 +1797,61 @@ func (t *Loki) initPartitionRing() (services.Service, error) { return t.partitionRingWatcher, nil } +func (t *Loki) initBlockBuilder() (services.Service, error) { + logger := log.With(util_log.Logger, "component", "block_builder") + + // TODO(owen-d): perhaps refactor to not use the ingester config? + id := t.Cfg.Ingester.LifecyclerConfig.ID + + ingestPartitionID, err := partitionring.ExtractIngesterPartitionID(id) + if err != nil { + return nil, fmt.Errorf("calculating block builder partition ID: %w", err) + } + + reader, err := partition.NewReader( + t.Cfg.KafkaConfig, + ingestPartitionID, + id, + logger, + prometheus.DefaultRegisterer, + ) + if err != nil { + return nil, err + } + + controller, err := blockbuilder.NewPartitionJobController( + reader, + t.Cfg.BlockBuilder.Backoff, + ) + + if err != nil { + return nil, err + } + + objectStore, err := blockbuilder.NewMultiStore(t.Cfg.SchemaConfig.Configs, t.Cfg.StorageConfig, t.ClientMetrics) + if err != nil { + return nil, err + } + + bb, err := blockbuilder.NewBlockBuilder( + id, + t.Cfg.BlockBuilder, + t.Cfg.SchemaConfig.Configs, + t.Store, + objectStore, + logger, + prometheus.DefaultRegisterer, + controller, + ) + + if err != nil { + return nil, err + } + + t.blockBuilder = bb + return t.blockBuilder, nil +} + func (t *Loki) deleteRequestsClient(clientType string, limits limiter.CombinedLimits) (deletion.DeleteRequestsClient, error) { if !t.supportIndexDeleteRequest() || !t.Cfg.CompactorConfig.RetentionEnabled { return deletion.NewNoOpDeleteRequestsStore(), nil diff --git a/pkg/pattern/aggregation/metrics.go b/pkg/pattern/aggregation/metrics.go index d777af50b813..344d2255f6fb 100644 --- a/pkg/pattern/aggregation/metrics.go +++ b/pkg/pattern/aggregation/metrics.go @@ -3,26 +3,121 @@ package aggregation import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/v3/pkg/util/constants" ) -type ChunkMetrics struct { +type Metrics struct { + reg prometheus.Registerer + chunks *prometheus.GaugeVec samples *prometheus.CounterVec + + // push operation + pushErrors *prometheus.CounterVec + pushRetries *prometheus.CounterVec + pushSuccesses *prometheus.CounterVec + payloadSize *prometheus.HistogramVec + + // Batch metrics + streamsPerPush *prometheus.HistogramVec + entriesPerPush *prometheus.HistogramVec + servicesTracked *prometheus.GaugeVec + + writeTimeout *prometheus.CounterVec } -func NewChunkMetrics(r prometheus.Registerer, metricsNamespace string) *ChunkMetrics { - return &ChunkMetrics{ +func NewMetrics(r prometheus.Registerer) *Metrics { + var m Metrics + m.reg = r + + m = Metrics{ chunks: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: "pattern_ingester", Name: "metric_chunks", Help: "The total number of chunks in memory.", }, []string{"service_name"}), samples: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Namespace: metricsNamespace, + Namespace: constants.Loki, Subsystem: "pattern_ingester", Name: "metric_samples", Help: "The total number of samples in memory.", }, []string{"service_name"}), + pushErrors: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "push_errors_total", + Help: "Total number of errors when pushing metrics to Loki.", + }, []string{"tenant_id", "error_type"}), + + pushRetries: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "push_retries_total", + Help: "Total number of retries when pushing metrics to Loki.", + }, []string{"tenant_id"}), + + pushSuccesses: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "push_successes_total", + Help: "Total number of successful pushes to Loki.", + }, []string{"tenant_id"}), + + // Batch metrics + payloadSize: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "push_payload_bytes", + Help: "Size of push payloads in bytes.", + Buckets: []float64{1024, 4096, 16384, 65536, 262144, 1048576}, + }, []string{"tenant_id"}), + + streamsPerPush: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "streams_per_push", + Help: "Number of streams in each push request.", + Buckets: []float64{1, 5, 10, 25, 50, 100, 250, 500, 1000}, + }, []string{"tenant_id"}), + + entriesPerPush: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "entries_per_push", + Help: "Number of entries in each push request.", + Buckets: []float64{10, 50, 100, 500, 1000, 5000, 10000}, + }, []string{"tenant_id"}), + + servicesTracked: promauto.With(r).NewGaugeVec(prometheus.GaugeOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "services_tracked", + Help: "Number of unique services being tracked.", + }, []string{"tenant_id"}), + writeTimeout: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Subsystem: "pattern_ingester", + Name: "write_timeouts_total", + Help: "Total number of write timeouts.", + }, []string{"tenant_id"}), } + + if m.reg != nil { + m.reg.MustRegister( + m.chunks, + m.samples, + m.pushErrors, + m.pushRetries, + m.pushSuccesses, + m.payloadSize, + m.streamsPerPush, + m.entriesPerPush, + m.servicesTracked, + m.writeTimeout, + ) + } + + return &m } diff --git a/pkg/pattern/aggregation/push.go b/pkg/pattern/aggregation/push.go index a282913fe508..52b291fda54f 100644 --- a/pkg/pattern/aggregation/push.go +++ b/pkg/pattern/aggregation/push.go @@ -4,6 +4,7 @@ import ( "bufio" "bytes" "context" + "errors" "fmt" "io" "net/http" @@ -16,6 +17,7 @@ import ( "github.com/go-kit/log/level" "github.com/golang/snappy" "github.com/opentracing/opentracing-go" + "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/config" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" @@ -71,6 +73,8 @@ type Push struct { backoff *backoff.Config entries entries + + metrics *Metrics } type entry struct { @@ -108,6 +112,7 @@ func NewPush( useTLS bool, backoffCfg *backoff.Config, logger log.Logger, + registrer prometheus.Registerer, ) (*Push, error) { client, err := config.NewClientFromConfig(cfg, "pattern-ingester-push", config.WithHTTP2Disabled()) if err != nil { @@ -142,6 +147,7 @@ func NewPush( entries: entries{ entries: make([]entry, 0), }, + metrics: NewMetrics(registrer), } go p.run(pushPeriod) @@ -222,6 +228,10 @@ func (p *Push) buildPayload(ctx context.Context) ([]byte, error) { payload = snappy.Encode(nil, payload) + p.metrics.streamsPerPush.WithLabelValues(p.tenantID).Observe(float64(len(streams))) + p.metrics.entriesPerPush.WithLabelValues(p.tenantID).Observe(float64(len(entries))) + p.metrics.servicesTracked.WithLabelValues(p.tenantID).Set(float64(serviceLimit)) + sp.LogKV( "event", "build aggregated metrics payload", "num_service", len(entriesByStream), @@ -267,7 +277,7 @@ func (p *Push) run(pushPeriod time.Duration) { break } - if status > 0 && status != 429 && status/100 != 5 { + if status > 0 && util.IsRateLimited(status) && !util.IsServerError(status) { level.Error(p.logger).Log("msg", "failed to send entry, server rejected push with a non-retryable status code", "status", status, "err", err) pushTicker.Reset(pushPeriod) break @@ -302,6 +312,8 @@ func (p *Push) send(ctx context.Context, payload []byte) (int, error) { defer sp.Finish() req, err := http.NewRequestWithContext(ctx, "POST", p.lokiURL, bytes.NewReader(payload)) + p.metrics.payloadSize.WithLabelValues(p.tenantID).Observe(float64(len(payload))) + if err != nil { return -1, fmt.Errorf("failed to create push request: %w", err) } @@ -320,23 +332,29 @@ func (p *Push) send(ctx context.Context, payload []byte) (int, error) { resp, err = p.httpClient.Do(req) if err != nil { + if errors.Is(ctx.Err(), context.DeadlineExceeded) { + p.metrics.writeTimeout.WithLabelValues(p.tenantID).Inc() + } return -1, fmt.Errorf("failed to push payload: %w", err) } - status := resp.StatusCode - if status/100 != 2 { + statusCode := resp.StatusCode + if util.IsError(statusCode) { + errType := util.ErrorTypeFromHTTPStatus(statusCode) + scanner := bufio.NewScanner(io.LimitReader(resp.Body, defaultMaxReponseBufferLen)) line := "" if scanner.Scan() { line = scanner.Text() } - err = fmt.Errorf("server returned HTTP status %s (%d): %s", resp.Status, status, line) + err = fmt.Errorf("server returned HTTP status %s (%d): %s", resp.Status, statusCode, line) + p.metrics.pushErrors.WithLabelValues(p.tenantID, errType).Inc() } if err := resp.Body.Close(); err != nil { level.Error(p.logger).Log("msg", "failed to close response body", "error", err) } - return status, err + return statusCode, err } func AggregatedMetricEntry( diff --git a/pkg/pattern/aggregation/push_test.go b/pkg/pattern/aggregation/push_test.go index 149b54a97715..a0a4974278bc 100644 --- a/pkg/pattern/aggregation/push_test.go +++ b/pkg/pattern/aggregation/push_test.go @@ -58,6 +58,7 @@ func Test_Push(t *testing.T) { false, &backoff, log.NewNopLogger(), + nil, ) require.NoError(t, err) ts, payload := testPayload() @@ -82,7 +83,7 @@ func Test_Push(t *testing.T) { "user", "secret", false, &backoff, - log.NewNopLogger(), + log.NewNopLogger(), nil, ) require.NoError(t, err) ts, payload := testPayload() @@ -123,6 +124,7 @@ func Test_Push(t *testing.T) { quit: make(chan struct{}), backoff: &backoff, entries: entries{}, + metrics: NewMetrics(nil), } lbls1 := labels.New(labels.Label{Name: "test", Value: "test"}) diff --git a/pkg/pattern/ingester.go b/pkg/pattern/ingester.go index 90e69f843333..1b88387d7b55 100644 --- a/pkg/pattern/ingester.go +++ b/pkg/pattern/ingester.go @@ -403,6 +403,7 @@ func (i *Ingester) GetOrCreateInstance(instanceID string) (*instance, error) { / aggCfg.UseTLS, &aggCfg.BackoffConfig, i.logger, + i.registerer, ) if err != nil { return nil, err diff --git a/pkg/ruler/evaluator_remote.go b/pkg/ruler/evaluator_remote.go index 8ef0fb88e415..63aeebfa9d4f 100644 --- a/pkg/ruler/evaluator_remote.go +++ b/pkg/ruler/evaluator_remote.go @@ -25,7 +25,6 @@ import ( "github.com/grafana/dskit/instrument" "github.com/grafana/dskit/middleware" "github.com/grafana/dskit/user" - grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" otgrpc "github.com/opentracing-contrib/go-grpc" "github.com/opentracing/opentracing-go" "github.com/prometheus/client_golang/prometheus" @@ -182,11 +181,9 @@ func DialQueryFrontend(cfg *QueryFrontendConfig) (httpgrpc.HTTPClient, error) { PermitWithoutStream: true, }, ), - grpc.WithUnaryInterceptor( - grpc_middleware.ChainUnaryClient( - otgrpc.OpenTracingClientInterceptor(opentracing.GlobalTracer()), - middleware.ClientUserHeaderInterceptor, - ), + grpc.WithChainUnaryInterceptor( + otgrpc.OpenTracingClientInterceptor(opentracing.GlobalTracer()), + middleware.ClientUserHeaderInterceptor, ), grpc.WithDefaultServiceConfig(serviceConfig), }, diff --git a/pkg/ruler/registry.go b/pkg/ruler/registry.go index 29297fcab5a7..67ef1d9bbf32 100644 --- a/pkg/ruler/registry.go +++ b/pkg/ruler/registry.go @@ -8,10 +8,10 @@ import ( "sync" "time" + "dario.cat/mergo" "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/grafana/dskit/user" - "github.com/imdario/mergo" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" promConfig "github.com/prometheus/common/config" diff --git a/pkg/storage/chunk/cache/memcached_client.go b/pkg/storage/chunk/cache/memcached_client.go index ffdc817b68b4..afd976babf36 100644 --- a/pkg/storage/chunk/cache/memcached_client.go +++ b/pkg/storage/chunk/cache/memcached_client.go @@ -19,7 +19,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/sony/gobreaker" + "github.com/sony/gobreaker/v2" "github.com/grafana/loki/v3/pkg/util/constants" "github.com/grafana/loki/v3/pkg/util/jumphash" @@ -50,7 +50,7 @@ type memcachedClient struct { addresses []string provider *dns.Provider - cbs map[ /*address*/ string]*gobreaker.CircuitBreaker + cbs map[string]*gobreaker.CircuitBreaker[interface{}] cbFailures uint cbTimeout time.Duration cbInterval time.Duration @@ -152,7 +152,7 @@ func NewMemcachedClient(cfg MemcachedClientConfig, name string, r prometheus.Reg service: cfg.Service, logger: logger, provider: dns.NewProvider(logger, dnsProviderRegisterer, dns.GolangResolverType), - cbs: make(map[string]*gobreaker.CircuitBreaker), + cbs: make(map[string]*gobreaker.CircuitBreaker[interface{}]), cbFailures: cfg.CBFailures, cbInterval: cfg.CBInterval, cbTimeout: cfg.CBTimeout, @@ -201,7 +201,7 @@ func (c *memcachedClient) dialViaCircuitBreaker(network, address string, timeout c.Lock() cb := c.cbs[address] if cb == nil { - cb = gobreaker.NewCircuitBreaker(gobreaker.Settings{ + cb = gobreaker.NewCircuitBreaker[interface{}](gobreaker.Settings{ Name: c.name + ":" + address, Interval: c.cbInterval, Timeout: c.cbTimeout, @@ -295,7 +295,7 @@ func (c *memcachedClient) updateMemcacheServers() error { // Copy across circuit-breakers for current set of addresses, thus // leaving behind any for servers we won't talk to again c.Lock() - newCBs := make(map[string]*gobreaker.CircuitBreaker, len(servers)) + newCBs := make(map[string]*gobreaker.CircuitBreaker[interface{}], len(servers)) for _, address := range servers { if cb, exists := c.cbs[address]; exists { newCBs[address] = cb diff --git a/pkg/storage/chunk/cache/redis_cache_test.go b/pkg/storage/chunk/cache/redis_cache_test.go index 0215b5702e74..91ee7f98362d 100644 --- a/pkg/storage/chunk/cache/redis_cache_test.go +++ b/pkg/storage/chunk/cache/redis_cache_test.go @@ -7,7 +7,7 @@ import ( "github.com/alicebob/miniredis/v2" "github.com/go-kit/log" - "github.com/go-redis/redis/v8" + "github.com/redis/go-redis/v9" "github.com/stretchr/testify/require" ) diff --git a/pkg/storage/chunk/cache/redis_client.go b/pkg/storage/chunk/cache/redis_client.go index 52a718027658..55731c7b11a4 100644 --- a/pkg/storage/chunk/cache/redis_client.go +++ b/pkg/storage/chunk/cache/redis_client.go @@ -12,7 +12,7 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/go-redis/redis/v8" + "github.com/redis/go-redis/v9" ) // RedisConfig defines how a RedisCache should be constructed. @@ -63,15 +63,15 @@ func NewRedisClient(cfg *RedisConfig) (*RedisClient, error) { } opt := &redis.UniversalOptions{ - Addrs: endpoints, - MasterName: cfg.MasterName, - Username: cfg.Username, - Password: cfg.Password.String(), - DB: cfg.DB, - PoolSize: cfg.PoolSize, - IdleTimeout: cfg.IdleTimeout, - MaxConnAge: cfg.MaxConnAge, - RouteRandomly: cfg.RouteRandomly, + Addrs: endpoints, + MasterName: cfg.MasterName, + Username: cfg.Username, + Password: cfg.Password.String(), + DB: cfg.DB, + PoolSize: cfg.PoolSize, + ConnMaxIdleTime: cfg.IdleTimeout, + ConnMaxLifetime: cfg.MaxConnAge, + RouteRandomly: cfg.RouteRandomly, } if cfg.EnableTLS { opt.TLSConfig = &tls.Config{InsecureSkipVerify: cfg.InsecureSkipVerify} diff --git a/pkg/storage/chunk/cache/redis_client_test.go b/pkg/storage/chunk/cache/redis_client_test.go index 2a8b7426f56d..275eb9fcb412 100644 --- a/pkg/storage/chunk/cache/redis_client_test.go +++ b/pkg/storage/chunk/cache/redis_client_test.go @@ -7,7 +7,7 @@ import ( "time" "github.com/alicebob/miniredis/v2" - "github.com/go-redis/redis/v8" + "github.com/redis/go-redis/v9" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/pkg/storage/stores/shipper/bloomshipper/blockscache.go b/pkg/storage/stores/shipper/bloomshipper/blockscache.go index a7992af267c5..3e1c1f656c5d 100644 --- a/pkg/storage/stores/shipper/bloomshipper/blockscache.go +++ b/pkg/storage/stores/shipper/bloomshipper/blockscache.go @@ -36,7 +36,7 @@ type Cache interface { Put(ctx context.Context, key string, value BlockDirectory) error PutInc(ctx context.Context, key string, value BlockDirectory) error PutMany(ctx context.Context, keys []string, values []BlockDirectory) error - Get(ctx context.Context, key string) (BlockDirectory, bool) + Get(ctx context.Context, key string, opts ...CacheGetOption) (BlockDirectory, bool) Release(ctx context.Context, key string) error Stop() } @@ -288,27 +288,50 @@ func (c *BlocksCache) evict(key string, element *list.Element, reason string) { c.metrics.entriesEvicted.WithLabelValues(reason).Inc() } +type cacheGetOptions struct { + ReportHitMiss bool +} + +func (o *cacheGetOptions) apply(options ...CacheGetOption) { + for _, opt := range options { + opt(o) + } +} + +type CacheGetOption func(opts *cacheGetOptions) + +func WithSkipHitMissMetrics(v bool) CacheGetOption { + return func(opts *cacheGetOptions) { + opts.ReportHitMiss = !v + } +} + // Get implements Cache. // Get returns the stored value against the given key. -func (c *BlocksCache) Get(ctx context.Context, key string) (BlockDirectory, bool) { +func (c *BlocksCache) Get(ctx context.Context, key string, opts ...CacheGetOption) (BlockDirectory, bool) { if ctx.Err() != nil { return BlockDirectory{}, false } + opt := &cacheGetOptions{ReportHitMiss: true} + opt.apply(opts...) + c.lock.Lock() defer c.lock.Unlock() - entry := c.get(key) + entry := c.get(key, opt) if entry == nil { return BlockDirectory{}, false } return entry.Value, true } -func (c *BlocksCache) get(key string) *Entry { +func (c *BlocksCache) get(key string, opt *cacheGetOptions) *Entry { element, exists := c.entries[key] if !exists { - c.metrics.misses.Inc() + if opt.ReportHitMiss { + c.metrics.misses.Inc() + } return nil } @@ -317,7 +340,10 @@ func (c *BlocksCache) get(key string) *Entry { c.lru.MoveToFront(element) - c.metrics.hits.Inc() + if opt.ReportHitMiss { + c.metrics.hits.Inc() + } + return entry } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index 3e81e2da5b1d..fb22c89869a0 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -22,7 +22,7 @@ import ( "github.com/grafana/loki/v3/pkg/util/spanlogger" ) -var downloadQueueCapacity = 10000 +var downloadQueueCapacity = 100000 type options struct { ignoreNotFound bool // ignore 404s from object storage; default=true @@ -31,6 +31,8 @@ type options struct { // NB(owen-d): this can only be safely used when blooms are not captured outside // of iteration or it can introduce use-after-free bugs usePool mempool.Allocator + + CacheGetOptions []CacheGetOption } func (o *options) apply(opts ...FetchOption) { @@ -59,6 +61,12 @@ func WithPool(v mempool.Allocator) FetchOption { } } +func WithCacheGetOptions(cacheOpts ...CacheGetOption) FetchOption { + return func(opts *options) { + opts.CacheGetOptions = cacheOpts + } +} + type fetcher interface { FetchMetas(ctx context.Context, refs []MetaRef) ([]Meta, error) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) @@ -244,7 +252,7 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc var enqueueTime time.Duration for i := 0; i < n; i++ { key := cacheKey(refs[i]) - dir, isFound, err := f.fromCache(ctx, key) + dir, isFound, err := f.fromCache(ctx, key, cfg.CacheGetOptions...) if err != nil { return results, err } @@ -376,14 +384,14 @@ func (f *Fetcher) processTask(ctx context.Context, task downloadRequest[BlockRef } } -func (f *Fetcher) fromCache(ctx context.Context, key string) (BlockDirectory, bool, error) { +func (f *Fetcher) fromCache(ctx context.Context, key string, opts ...CacheGetOption) (BlockDirectory, bool, error) { var zero BlockDirectory if ctx.Err() != nil { return zero, false, errors.Wrap(ctx.Err(), "from cache") } - item, found := f.blocksCache.Get(ctx, key) + item, found := f.blocksCache.Get(ctx, key, opts...) // item wasn't found if !found { diff --git a/pkg/storage/stores/shipper/indexshipper/shipper.go b/pkg/storage/stores/shipper/indexshipper/shipper.go index 5b3037c45b08..2917b1fc7974 100644 --- a/pkg/storage/stores/shipper/indexshipper/shipper.go +++ b/pkg/storage/stores/shipper/indexshipper/shipper.go @@ -33,6 +33,9 @@ const ( ModeReadOnly = Mode("RO") // ModeWriteOnly is to allow only write operations ModeWriteOnly = Mode("WO") + // ModeDisabled is a no-op implementation which does nothing & does not error. + // It's used by the blockbuilder which handles index operations independently. + ModeDisabled = Mode("NO") // FilesystemObjectStoreType holds the periodic config type for the filesystem store FilesystemObjectStoreType = "filesystem" @@ -142,6 +145,8 @@ type indexShipper struct { func NewIndexShipper(prefix string, cfg Config, storageClient client.ObjectClient, limits downloads.Limits, tenantFilter downloads.TenantFilter, open index.OpenIndexFileFunc, tableRangeToHandle config.TableRange, reg prometheus.Registerer, logger log.Logger) (IndexShipper, error) { switch cfg.Mode { + case ModeDisabled: + return Noop{}, nil case ModeReadOnly, ModeWriteOnly, ModeReadWrite: default: return nil, fmt.Errorf("invalid mode: %v", cfg.Mode) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go index 23a951deacbd..be0a343309c5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go @@ -134,8 +134,8 @@ func setupMultiTenantIndex(t *testing.T, indexFormat int, userStreams map[string dst := NewPrefixedIdentifier( MultitenantTSDBIdentifier{ - nodeName: "test", - ts: ts, + NodeName: "test", + Ts: ts, }, destDir, "", @@ -239,7 +239,7 @@ func TestCompactor_Compact(t *testing.T) { PeriodicTableConfig: config.PeriodicTableConfig{Period: config.ObjectStorageIndexRequiredPeriod}}, Schema: "v12", } - indexBkts := indexBuckets(now, now, []config.TableRange{periodConfig.GetIndexTableNumberRange(config.DayTime{Time: now})}) + indexBkts := IndexBuckets(now, now, []config.TableRange{periodConfig.GetIndexTableNumberRange(config.DayTime{Time: now})}) tableName := indexBkts[0] lbls1 := mustParseLabels(`{foo="bar", a="b"}`) @@ -497,8 +497,8 @@ func TestCompactor_Compact(t *testing.T) { t.Run(name, func(t *testing.T) { tempDir := t.TempDir() objectStoragePath := filepath.Join(tempDir, objectsStorageDirName) - tablePathInStorage := filepath.Join(objectStoragePath, tableName.prefix) - tableWorkingDirectory := filepath.Join(tempDir, workingDirName, tableName.prefix) + tablePathInStorage := filepath.Join(objectStoragePath, tableName.Prefix) + tableWorkingDirectory := filepath.Join(tempDir, workingDirName, tableName.Prefix) require.NoError(t, util.EnsureDirectory(objectStoragePath)) require.NoError(t, util.EnsureDirectory(tablePathInStorage)) @@ -551,7 +551,7 @@ func TestCompactor_Compact(t *testing.T) { objectClient, err := local.NewFSObjectClient(local.FSConfig{Directory: objectStoragePath}) require.NoError(t, err) - _, commonPrefixes, err := objectClient.List(context.Background(), tableName.prefix, "/") + _, commonPrefixes, err := objectClient.List(context.Background(), tableName.Prefix, "/") require.NoError(t, err) initializedIndexSets := map[string]compactor.IndexSet{} @@ -559,19 +559,19 @@ func TestCompactor_Compact(t *testing.T) { existingUserIndexSets := make(map[string]compactor.IndexSet, len(commonPrefixes)) for _, commonPrefix := range commonPrefixes { userID := path.Base(string(commonPrefix)) - idxSet, err := newMockIndexSet(userID, tableName.prefix, filepath.Join(tableWorkingDirectory, userID), objectClient) + idxSet, err := newMockIndexSet(userID, tableName.Prefix, filepath.Join(tableWorkingDirectory, userID), objectClient) require.NoError(t, err) existingUserIndexSets[userID] = idxSet initializedIndexSets[userID] = idxSet } - commonIndexSet, err := newMockIndexSet("", tableName.prefix, tableWorkingDirectory, objectClient) + commonIndexSet, err := newMockIndexSet("", tableName.Prefix, tableWorkingDirectory, objectClient) require.NoError(t, err) // build TableCompactor and compact the index tCompactor := newTableCompactor(context.Background(), commonIndexSet, existingUserIndexSets, func(userID string) (compactor.IndexSet, error) { - idxSet, err := newMockIndexSet(userID, tableName.prefix, filepath.Join(tableWorkingDirectory, userID), objectClient) + idxSet, err := newMockIndexSet(userID, tableName.Prefix, filepath.Join(tableWorkingDirectory, userID), objectClient) require.NoError(t, err) initializedIndexSetsMtx.Lock() @@ -875,9 +875,9 @@ func setupCompactedIndex(t *testing.T) *testContext { schemaCfg := config.SchemaConfig{ Configs: []config.PeriodConfig{periodConfig}, } - indexBuckets := indexBuckets(now, now, []config.TableRange{periodConfig.GetIndexTableNumberRange(config.DayTime{Time: now})}) + indexBuckets := IndexBuckets(now, now, []config.TableRange{periodConfig.GetIndexTableNumberRange(config.DayTime{Time: now})}) tableName := indexBuckets[0] - tableInterval := retention.ExtractIntervalFromTableName(tableName.prefix) + tableInterval := retention.ExtractIntervalFromTableName(tableName.Prefix) // shiftTableStart shift tableInterval.Start by the given amount of milliseconds. // It is used for building chunkmetas relative to start time of the table. shiftTableStart := func(ms int64) int64 { @@ -900,7 +900,7 @@ func setupCompactedIndex(t *testing.T) *testContext { builder.FinalizeChunks() - return newCompactedIndex(context.Background(), tableName.prefix, buildUserID(0), t.TempDir(), periodConfig, builder) + return newCompactedIndex(context.Background(), tableName.Prefix, buildUserID(0), t.TempDir(), periodConfig, builder) } expectedChunkEntries := map[string][]retention.ChunkEntry{ diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/identifier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/identifier.go index 149d41bfa944..eab26fe643d5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/identifier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/identifier.go @@ -161,13 +161,13 @@ func ParseSingleTenantTSDBPath(p string) (id SingleTenantTSDBIdentifier, ok bool } type MultitenantTSDBIdentifier struct { - nodeName string - ts time.Time + NodeName string + Ts time.Time } // Name builds filename with format + `-` + ` func (id MultitenantTSDBIdentifier) Name() string { - return fmt.Sprintf("%d-%s.tsdb", id.ts.Unix(), id.nodeName) + return fmt.Sprintf("%d-%s.tsdb", id.Ts.Unix(), id.NodeName) } func (id MultitenantTSDBIdentifier) Path() string { @@ -200,7 +200,7 @@ func parseMultitenantTSDBNameFromBase(name string) (res MultitenantTSDBIdentifie } return MultitenantTSDBIdentifier{ - ts: time.Unix(int64(ts), 0), - nodeName: strings.Join(xs[1:], "-"), + Ts: time.Unix(int64(ts), 0), + NodeName: strings.Join(xs[1:], "-"), }, true } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go index 756d354d1ed6..0766bd058fdf 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go @@ -62,6 +62,10 @@ const ( fingerprintInterval = 1 << 10 millisecondsInHour = int64(time.Hour / time.Millisecond) + + // reserved; used in multitenant indices to signal the tenant. Eventually compacted away when + // single tenant indices are created. + TenantLabel = "__loki_tenant__" ) type indexWriterStage uint8 diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go index b0d1824936d5..6ca252770169 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go @@ -40,9 +40,9 @@ func (i indexIterFunc) For(_ context.Context, _ int, f func(context.Context, Ind func (i *indexShipperQuerier) indices(ctx context.Context, from, through model.Time, user string) (Index, error) { itr := indexIterFunc(func(f func(context.Context, Index) error) error { // Ensure we query both per tenant and multitenant TSDBs - idxBuckets := indexBuckets(from, through, []config.TableRange{i.tableRange}) + idxBuckets := IndexBuckets(from, through, []config.TableRange{i.tableRange}) for _, bkt := range idxBuckets { - if err := i.shipper.ForEachConcurrent(ctx, bkt.prefix, user, func(multitenant bool, idx shipperindex.Index) error { + if err := i.shipper.ForEachConcurrent(ctx, bkt.Prefix, user, func(multitenant bool, idx shipperindex.Index) error { impl, ok := idx.(Index) if !ok { return fmt.Errorf("unexpected shipper index type: %T", idx) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go index 96f56d7021f4..84c250eb7464 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go @@ -165,13 +165,13 @@ func (m *tsdbManager) buildFromHead(heads *tenantHeads, indexShipper indexshippe // chunks may overlap index period bounds, in which case they're written to multiple pds := make(map[string]chunkInfo) for _, chk := range chks { - idxBuckets := indexBuckets(chk.From(), chk.Through(), tableRanges) + idxBuckets := IndexBuckets(chk.From(), chk.Through(), tableRanges) for _, bucket := range idxBuckets { - chkinfo := pds[bucket.prefix] + chkinfo := pds[bucket.Prefix] chkinfo.chunkMetas = append(chkinfo.chunkMetas, chk) - chkinfo.tsdbFormat = bucket.tsdbFormat - pds[bucket.prefix] = chkinfo + chkinfo.tsdbFormat = bucket.TsdbFormat + pds[bucket.Prefix] = chkinfo } } @@ -208,8 +208,8 @@ func (m *tsdbManager) buildFromHead(heads *tenantHeads, indexShipper indexshippe dstDir := filepath.Join(managerMultitenantDir(m.dir), fmt.Sprint(p)) dst := NewPrefixedIdentifier( MultitenantTSDBIdentifier{ - nodeName: m.nodeName, - ts: heads.start, + NodeName: m.nodeName, + Ts: heads.start, }, dstDir, "", @@ -300,19 +300,24 @@ func (m *tsdbManager) BuildFromWALs(t time.Time, ids []WALIdentifier, legacy boo return nil } -type indexInfo struct { - prefix string - tsdbFormat int +type IndexInfo struct { + BucketStart model.Time + Prefix string + TsdbFormat int } -func indexBuckets(from, through model.Time, tableRanges config.TableRanges) (res []indexInfo) { +func IndexBuckets(from, through model.Time, tableRanges config.TableRanges) (res []IndexInfo) { start := from.Time().UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) end := through.Time().UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) for cur := start; cur <= end; cur++ { cfg := tableRanges.ConfigForTableNumber(cur) if cfg != nil { tsdbFormat, _ := cfg.TSDBFormat() // Ignoring error, as any valid period config should return valid format. - res = append(res, indexInfo{prefix: cfg.IndexTables.Prefix + strconv.Itoa(int(cur)), tsdbFormat: tsdbFormat}) + res = append(res, IndexInfo{ + BucketStart: model.TimeFromUnixNano(cur * int64(config.ObjectStorageIndexRequiredPeriod)), + Prefix: cfg.IndexTables.Prefix + strconv.Itoa(int(cur)), + TsdbFormat: tsdbFormat, + }) } } if len(res) == 0 { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/manager_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/manager_test.go new file mode 100644 index 000000000000..6816b0aeb208 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/manager_test.go @@ -0,0 +1,71 @@ +package tsdb + +import ( + "testing" + "time" + + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/types" +) + +func TestIndexBuckets(t *testing.T) { + var ( + day0 = model.Time(0) + day1 = day0.Add(24 * time.Hour) + day2 = day1.Add(24 * time.Hour) + periods = []config.PeriodConfig{ + { + From: config.NewDayTime(day0), + Schema: "v12", + IndexType: "tsdb", + IndexTables: config.IndexPeriodicTableConfig{ + PeriodicTableConfig: config.PeriodicTableConfig{ + Prefix: "index/", + Period: 24 * time.Hour, + }, + }, + }, + { + From: config.NewDayTime(day2), + Schema: "v13", + IndexType: "tsdb", + IndexTables: config.IndexPeriodicTableConfig{ + PeriodicTableConfig: config.PeriodicTableConfig{ + Prefix: "index2/", + Period: 24 * time.Hour, + }, + }, + }, + } + + tableRanges = config.GetIndexStoreTableRanges(types.TSDBType, periods) + ) + tests := []struct { + name string + from model.Time + through model.Time + expectedInfo []IndexInfo + }{ + { + name: "single table range", + from: day0, + through: day2, + expectedInfo: []IndexInfo{ + {BucketStart: day0, TsdbFormat: index.FormatV2, Prefix: "index/0"}, + {BucketStart: day1, TsdbFormat: index.FormatV2, Prefix: "index/1"}, + {BucketStart: day2, TsdbFormat: index.FormatV3, Prefix: "index2/2"}, + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + res := IndexBuckets(tc.from, tc.through, tableRanges) + require.Equal(t, tc.expectedInfo, res) + }) + } +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go index 1ef58c32a1e5..8ca8a2489e6d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go @@ -85,6 +85,13 @@ func (s *store) init(name, prefix string, indexShipperCfg indexshipper.Config, s var indices []Index opts := DefaultIndexClientOptions() + // early return in case index shipper is disabled. + if indexShipperCfg.Mode == indexshipper.ModeDisabled { + s.indexWriter = noopIndexWriter{} + s.Reader = NewIndexClient(NoopIndex{}, opts, limits) + return nil + } + if indexShipperCfg.Mode == indexshipper.ModeWriteOnly { // We disable bloom filters on write nodes // for the Stats() methods as it's of relatively little @@ -172,3 +179,9 @@ type failingIndexWriter struct{} func (f failingIndexWriter) Append(_ string, _ labels.Labels, _ uint64, _ tsdbindex.ChunkMetas) error { return fmt.Errorf("index writer is not initialized due to tsdb store being initialized in read-only mode") } + +type noopIndexWriter struct{} + +func (f noopIndexWriter) Append(_ string, _ labels.Labels, _ uint64, _ tsdbindex.ChunkMetas) error { + return nil +} diff --git a/pkg/tool/commands/audit.go b/pkg/tool/commands/audit.go index dae7e2c10bb5..6cc49b9b2a56 100644 --- a/pkg/tool/commands/audit.go +++ b/pkg/tool/commands/audit.go @@ -6,9 +6,9 @@ import ( "fmt" "os" + "github.com/alecthomas/kingpin/v2" "github.com/go-kit/log" "github.com/go-kit/log/level" - "gopkg.in/alecthomas/kingpin.v2" "github.com/grafana/loki/v3/pkg/tool/audit" util_cfg "github.com/grafana/loki/v3/pkg/util/cfg" diff --git a/pkg/tool/commands/rules.go b/pkg/tool/commands/rules.go index 2eb5c0eb6cdd..c030e9c24dfc 100644 --- a/pkg/tool/commands/rules.go +++ b/pkg/tool/commands/rules.go @@ -8,11 +8,11 @@ import ( "reflect" "strings" + "github.com/alecthomas/kingpin/v2" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/model/rulefmt" log "github.com/sirupsen/logrus" - "gopkg.in/alecthomas/kingpin.v2" yamlv3 "gopkg.in/yaml.v3" "github.com/grafana/loki/v3/pkg/tool/client" diff --git a/pkg/tool/printer/printer.go b/pkg/tool/printer/printer.go index 1a696a8ffdf9..e0efa0694877 100644 --- a/pkg/tool/printer/printer.go +++ b/pkg/tool/printer/printer.go @@ -9,7 +9,7 @@ import ( "strings" "text/tabwriter" - "github.com/alecthomas/chroma/quick" + "github.com/alecthomas/chroma/v2/quick" "github.com/mitchellh/colorstring" "gopkg.in/yaml.v3" diff --git a/pkg/tool/printer/printer_test.go b/pkg/tool/printer/printer_test.go index c8650d9bd610..a9c9167030cf 100644 --- a/pkg/tool/printer/printer_test.go +++ b/pkg/tool/printer/printer_test.go @@ -4,7 +4,7 @@ import ( "bytes" "testing" - "github.com/alecthomas/chroma/quick" + "github.com/alecthomas/chroma/v2/quick" "github.com/prometheus/prometheus/model/rulefmt" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/util/http.go b/pkg/util/http.go index 3fdfca6df24f..2d9b8a7e2903 100644 --- a/pkg/util/http.go +++ b/pkg/util/http.go @@ -23,6 +23,13 @@ import ( const messageSizeLargerErrFmt = "received message larger than max (%d vs %d)" +const ( + HTTPRateLimited = "rate_limited" + HTTPServerError = "server_error" + HTTPErrorUnknown = "unknown" + HTTPClientError = "client_error" +) + // IsRequestBodyTooLarge returns true if the error is "http: request body too large". func IsRequestBodyTooLarge(err error) bool { return err != nil && strings.Contains(err.Error(), "http: request body too large") @@ -307,3 +314,28 @@ func IsValidURL(endpoint string) bool { return u.Scheme != "" && u.Host != "" } + +func ErrorTypeFromHTTPStatus(status int) string { + errorType := HTTPErrorUnknown + if status == 429 { + errorType = HTTPRateLimited + } else if status/100 == 5 { + errorType = HTTPServerError + } else if status/100 != 2 { + errorType = HTTPClientError + } + + return errorType +} + +func IsError(status int) bool { + return status/200 != 0 +} + +func IsServerError(status int) bool { + return status/100 == 5 +} + +func IsRateLimited(status int) bool { + return status == 429 +} diff --git a/pkg/util/http_test.go b/pkg/util/http_test.go index d032085db502..c41ba0eb1525 100644 --- a/pkg/util/http_test.go +++ b/pkg/util/http_test.go @@ -218,3 +218,65 @@ func TestIsRequestBodyTooLargeRegression(t *testing.T) { _, err := io.ReadAll(http.MaxBytesReader(httptest.NewRecorder(), io.NopCloser(bytes.NewReader([]byte{1, 2, 3, 4})), 1)) assert.True(t, util.IsRequestBodyTooLarge(err)) } + +func TestErrorTypeFromHTTPStatus(t *testing.T) { + tests := []struct { + name string + status int + expectedResult string + }{ + { + name: "rate limited error", + status: 429, + expectedResult: util.HTTPRateLimited, + }, + { + name: "server error - 500", + status: 500, + expectedResult: util.HTTPServerError, + }, + { + name: "server error - 503", + status: 503, + expectedResult: util.HTTPServerError, + }, + { + name: "client error - 400", + status: 400, + expectedResult: util.HTTPClientError, + }, + { + name: "client error - 404", + status: 404, + expectedResult: util.HTTPClientError, + }, + { + name: "success status should return unknown - 200", + status: 200, + expectedResult: util.HTTPErrorUnknown, + }, + { + name: "success status should return unknown - 201", + status: 201, + expectedResult: util.HTTPErrorUnknown, + }, + { + name: "invalid status should return unknown - 600", + status: 600, + expectedResult: util.HTTPClientError, + }, + { + name: "invalid status should return unknown - -1", + status: -1, + expectedResult: util.HTTPClientError, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := util.ErrorTypeFromHTTPStatus(tt.status) + assert.Equal(t, tt.expectedResult, result, "ErrorTypeFromHTTPStatus(%d) = %s; want %s", + tt.status, result, tt.expectedResult) + }) + } +} diff --git a/pkg/util/server/recovery.go b/pkg/util/server/recovery.go index ce3ad109512b..58eac396dd75 100644 --- a/pkg/util/server/recovery.go +++ b/pkg/util/server/recovery.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/dskit/httpgrpc" "github.com/grafana/dskit/middleware" - grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" + grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/v2/interceptors/recovery" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index 7387cc567c6a..23c3b4939f84 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -214,6 +214,7 @@ type Limits struct { BloomSplitSeriesKeyspaceBy int `yaml:"bloom_split_series_keyspace_by" json:"bloom_split_series_keyspace_by" category:"experimental"` BloomTaskTargetSeriesChunkSize flagext.ByteSize `yaml:"bloom_task_target_series_chunk_size" json:"bloom_task_target_series_chunk_size" category:"experimental"` BloomBlockEncoding string `yaml:"bloom_block_encoding" json:"bloom_block_encoding" category:"experimental"` + BloomPrefetchBlocks bool `yaml:"bloom_prefetch_blocks" json:"bloom_prefetch_blocks" category:"experimental"` BloomMaxBlockSize flagext.ByteSize `yaml:"bloom_max_block_size" json:"bloom_max_block_size" category:"experimental"` BloomMaxBloomSize flagext.ByteSize `yaml:"bloom_max_bloom_size" json:"bloom_max_bloom_size" category:"experimental"` @@ -399,6 +400,7 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.DurationVar(&l.BloomGatewayCacheKeyInterval, "bloom-gateway.cache-key-interval", 15*time.Minute, "Experimental. Interval for computing the cache key in the Bloom Gateway.") f.StringVar(&l.BloomBlockEncoding, "bloom-build.block-encoding", "none", "Experimental. Compression algorithm for bloom block pages.") + f.BoolVar(&l.BloomPrefetchBlocks, "bloom-build.prefetch-blocks", false, "Experimental. Prefetch blocks on bloom gateways as soon as they are built.") _ = l.BloomMaxBlockSize.Set(defaultBloomBuildMaxBlockSize) f.Var(&l.BloomMaxBlockSize, "bloom-build.max-block-size", @@ -1055,6 +1057,10 @@ func (o *Overrides) BuilderResponseTimeout(userID string) time.Duration { return o.getOverridesForUser(userID).BloomBuilderResponseTimeout } +func (o *Overrides) PrefetchBloomBlocks(userID string) bool { + return o.getOverridesForUser(userID).BloomPrefetchBlocks +} + func (o *Overrides) BloomTaskMaxRetries(userID string) int { return o.getOverridesForUser(userID).BloomBuildTaskMaxRetries } diff --git a/production/docker/docker-compose.yaml b/production/docker/docker-compose.yaml index 2359b199e2c2..a29e5efbf4db 100644 --- a/production/docker/docker-compose.yaml +++ b/production/docker/docker-compose.yaml @@ -24,7 +24,7 @@ services: - loki grafana: - image: grafana/grafana:9.5.21 + image: grafana/grafana:11.3.1 ports: - "3000:3000" environment: @@ -38,7 +38,7 @@ services: - loki prometheus: - image: prom/prometheus:v2.55.1 + image: prom/prometheus:v3.0.0 ports: - 9090 volumes: diff --git a/production/helm/loki/CHANGELOG.md b/production/helm/loki/CHANGELOG.md index 5f477f978c2e..b1273e533c1e 100644 --- a/production/helm/loki/CHANGELOG.md +++ b/production/helm/loki/CHANGELOG.md @@ -13,6 +13,12 @@ Entries should include a reference to the pull request that introduced the chang [//]: # ( : do not remove this line. This locator is used by the CI pipeline to automatically create a changelog entry for each new Loki release. Add other chart versions and respective changelog entries bellow this line.) +## 6.20.0 + +- [CHANGE] Changed version of Grafana Loki to 3.3.0 + +## 6.19.0-weekly.227 + - [ENHANCEMENT] Expose Topology Spread Constraints in Helm chart templates and default values. ## 6.19.0 diff --git a/production/helm/loki/Chart.yaml b/production/helm/loki/Chart.yaml index ab868acb4b3b..7e09fdbeee3b 100644 --- a/production/helm/loki/Chart.yaml +++ b/production/helm/loki/Chart.yaml @@ -2,8 +2,8 @@ apiVersion: v2 name: loki description: Helm chart for Grafana Loki and Grafana Enterprise Logs supporting both simple, scalable and distributed modes. type: application -appVersion: 3.2.0 -version: 6.19.0 +appVersion: 3.3.0 +version: 6.21.0 home: https://grafana.github.io/helm-charts sources: - https://github.com/grafana/loki diff --git a/production/helm/loki/README.md b/production/helm/loki/README.md index 79cf060fb739..58ff6ccc7a6c 100644 --- a/production/helm/loki/README.md +++ b/production/helm/loki/README.md @@ -1,6 +1,6 @@ # loki -![Version: 6.19.0](https://img.shields.io/badge/Version-6.19.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.2.0](https://img.shields.io/badge/AppVersion-3.2.0-informational?style=flat-square) +![Version: 6.21.0](https://img.shields.io/badge/Version-6.21.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 3.3.0](https://img.shields.io/badge/AppVersion-3.3.0-informational?style=flat-square) Helm chart for Grafana Loki and Grafana Enterprise Logs supporting both simple, scalable and distributed modes. diff --git a/production/helm/loki/templates/admin-api/deployment-admin-api.yaml b/production/helm/loki/templates/admin-api/deployment-admin-api.yaml index f85bbf90014b..2d356882204b 100644 --- a/production/helm/loki/templates/admin-api/deployment-admin-api.yaml +++ b/production/helm/loki/templates/admin-api/deployment-admin-api.yaml @@ -38,6 +38,12 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} spec: + {{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.Version }} + {{- with .Values.adminApi.topologySpreadConstraints }} + topologySpreadConstraints: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} serviceAccountName: {{ template "loki.serviceAccountName" . }} {{- if .Values.adminApi.priorityClassName }} priorityClassName: {{ .Values.adminApi.priorityClassName }} @@ -167,4 +173,4 @@ spec: path: CAs/public.crt {{- end }} {{- end }} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/production/helm/loki/templates/index-gateway/poddisruptionbudget-index-gateway.yaml b/production/helm/loki/templates/index-gateway/poddisruptionbudget-index-gateway.yaml index 22ba1a0b4c33..cc230c2559f5 100644 --- a/production/helm/loki/templates/index-gateway/poddisruptionbudget-index-gateway.yaml +++ b/production/helm/loki/templates/index-gateway/poddisruptionbudget-index-gateway.yaml @@ -7,6 +7,7 @@ apiVersion: {{ include "loki.pdb.apiVersion" . }} kind: PodDisruptionBudget metadata: name: {{ include "loki.indexGatewayFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.indexGatewayLabels" . | nindent 4 }} spec: diff --git a/production/helm/loki/templates/index-gateway/service-index-gateway-headless.yaml b/production/helm/loki/templates/index-gateway/service-index-gateway-headless.yaml index 06506582f9e3..731a9844f858 100644 --- a/production/helm/loki/templates/index-gateway/service-index-gateway-headless.yaml +++ b/production/helm/loki/templates/index-gateway/service-index-gateway-headless.yaml @@ -4,6 +4,7 @@ apiVersion: v1 kind: Service metadata: name: {{ include "loki.indexGatewayFullname" . }}-headless + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.indexGatewaySelectorLabels" . | nindent 4 }} prometheus.io/service-monitor: "false" diff --git a/production/helm/loki/templates/index-gateway/service-index-gateway.yaml b/production/helm/loki/templates/index-gateway/service-index-gateway.yaml index 822a0ce692d3..b1a3523a42f6 100644 --- a/production/helm/loki/templates/index-gateway/service-index-gateway.yaml +++ b/production/helm/loki/templates/index-gateway/service-index-gateway.yaml @@ -4,6 +4,7 @@ apiVersion: v1 kind: Service metadata: name: {{ include "loki.indexGatewayFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.indexGatewayLabels" . | nindent 4 }} {{- with .Values.indexGateway.serviceLabels }} diff --git a/production/helm/loki/templates/index-gateway/statefulset-index-gateway.yaml b/production/helm/loki/templates/index-gateway/statefulset-index-gateway.yaml index 8688a55bcb8f..e24985900545 100644 --- a/production/helm/loki/templates/index-gateway/statefulset-index-gateway.yaml +++ b/production/helm/loki/templates/index-gateway/statefulset-index-gateway.yaml @@ -4,6 +4,7 @@ apiVersion: apps/v1 kind: StatefulSet metadata: name: {{ include "loki.indexGatewayFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.indexGatewayLabels" . | nindent 4 }} {{- with .Values.loki.annotations }} diff --git a/production/helm/loki/templates/ruler/configmap-ruler.yaml b/production/helm/loki/templates/ruler/configmap-ruler.yaml index b74f024b415f..3a0c53397ccc 100644 --- a/production/helm/loki/templates/ruler/configmap-ruler.yaml +++ b/production/helm/loki/templates/ruler/configmap-ruler.yaml @@ -6,6 +6,7 @@ apiVersion: v1 kind: ConfigMap metadata: name: {{ include "loki.rulerFullname" $ }}-{{ include "loki.rulerRulesDirName" $dir }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.rulerLabels" $ | nindent 4 }} data: diff --git a/production/helm/loki/templates/ruler/poddisruptionbudget-ruler.yaml b/production/helm/loki/templates/ruler/poddisruptionbudget-ruler.yaml index 82417651862d..b9c03cf12d34 100644 --- a/production/helm/loki/templates/ruler/poddisruptionbudget-ruler.yaml +++ b/production/helm/loki/templates/ruler/poddisruptionbudget-ruler.yaml @@ -7,6 +7,7 @@ apiVersion: {{ include "loki.pdb.apiVersion" . }} kind: PodDisruptionBudget metadata: name: {{ include "loki.rulerFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.rulerLabels" . | nindent 4 }} spec: diff --git a/production/helm/loki/templates/ruler/service-ruler.yaml b/production/helm/loki/templates/ruler/service-ruler.yaml index 4d58ec85b42a..88476832da1c 100644 --- a/production/helm/loki/templates/ruler/service-ruler.yaml +++ b/production/helm/loki/templates/ruler/service-ruler.yaml @@ -4,6 +4,7 @@ apiVersion: v1 kind: Service metadata: name: {{ include "loki.rulerFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.rulerSelectorLabels" . | nindent 4 }} {{- with .Values.ruler.serviceLabels }} diff --git a/production/helm/loki/templates/ruler/statefulset-ruler.yaml b/production/helm/loki/templates/ruler/statefulset-ruler.yaml index cff41890f01c..87b42f2219e7 100644 --- a/production/helm/loki/templates/ruler/statefulset-ruler.yaml +++ b/production/helm/loki/templates/ruler/statefulset-ruler.yaml @@ -4,6 +4,7 @@ apiVersion: apps/v1 kind: StatefulSet metadata: name: {{ include "loki.rulerFullname" . }} + namespace: {{ $.Release.Namespace }} labels: {{- include "loki.rulerLabels" . | nindent 4 }} app.kubernetes.io/part-of: memberlist diff --git a/production/helm/loki/values.yaml b/production/helm/loki/values.yaml index 7dbde9d9fed1..78961a724300 100644 --- a/production/helm/loki/values.yaml +++ b/production/helm/loki/values.yaml @@ -56,7 +56,7 @@ loki: # -- Docker image repository repository: grafana/loki # -- Overrides the image tag whose default is the chart's appVersion - tag: null + tag: 3.3.0 # -- Overrides the image tag with an image digest digest: null # -- Docker image pull policy @@ -518,7 +518,7 @@ enterprise: # -- Docker image repository repository: grafana/enterprise-logs # -- Docker image tag - tag: null + tag: 3.3.0 # -- Overrides the image tag with an image digest digest: null # -- Docker image pull policy @@ -876,6 +876,8 @@ adminApi: affinity: {} # -- Node selector for admin-api Pods nodeSelector: {} + # -- Topology Spread Constraints for admin-api pods + topologySpreadConstraints: [] # -- Tolerations for admin-api Pods tolerations: [] # -- Grace period to allow the admin-api to shutdown before it is killed diff --git a/production/terraform/modules/s3/versions.tf b/production/terraform/modules/s3/versions.tf index 56898d60ca93..88c6163b2227 100644 --- a/production/terraform/modules/s3/versions.tf +++ b/production/terraform/modules/s3/versions.tf @@ -2,7 +2,7 @@ terraform { required_providers { aws = { source = "hashicorp/aws" - version = "~> 5.76.0" + version = "~> 5.77.0" } random = { diff --git a/tools/gcplog/main.tf b/tools/gcplog/main.tf index c4725c4ff0d0..71ccf30b9af6 100644 --- a/tools/gcplog/main.tf +++ b/tools/gcplog/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { google = { source = "hashicorp/google" - version = "6.11.2" + version = "6.12.0" } } } diff --git a/tools/lambda-promtail/go.mod b/tools/lambda-promtail/go.mod index 8d9be5189aa3..e485cf814d2b 100644 --- a/tools/lambda-promtail/go.mod +++ b/tools/lambda-promtail/go.mod @@ -4,9 +4,9 @@ go 1.22 require ( github.com/aws/aws-lambda-go v1.47.0 - github.com/aws/aws-sdk-go-v2 v1.32.4 - github.com/aws/aws-sdk-go-v2/config v1.28.4 - github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0 + github.com/aws/aws-sdk-go-v2 v1.32.5 + github.com/aws/aws-sdk-go-v2/config v1.28.5 + github.com/aws/aws-sdk-go-v2/service/s3 v1.68.0 github.com/go-kit/log v0.2.1 github.com/gogo/protobuf v1.3.2 github.com/golang/snappy v0.0.4 @@ -23,21 +23,21 @@ require ( github.com/Masterminds/sprig/v3 v3.2.3 // indirect github.com/alecthomas/units v0.0.0-20240626203959-61d1e3462e30 // indirect github.com/armon/go-metrics v0.4.1 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.17.45 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.46 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.20 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.24 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.24 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.24.5 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.33.0 // indirect - github.com/aws/smithy-go v1.22.0 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.24 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.5 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.5 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.24.6 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.1 // indirect + github.com/aws/smithy-go v1.22.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 // indirect github.com/cespare/xxhash v1.1.0 // indirect diff --git a/tools/lambda-promtail/go.sum b/tools/lambda-promtail/go.sum index 0eec1c0fae41..44abdc2e222e 100644 --- a/tools/lambda-promtail/go.sum +++ b/tools/lambda-promtail/go.sum @@ -48,42 +48,42 @@ github.com/aws/aws-lambda-go v1.47.0 h1:0H8s0vumYx/YKs4sE7YM0ktwL2eWse+kfopsRI1s github.com/aws/aws-lambda-go v1.47.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= github.com/aws/aws-sdk-go v1.54.19 h1:tyWV+07jagrNiCcGRzRhdtVjQs7Vy41NwsuOcl0IbVI= github.com/aws/aws-sdk-go v1.54.19/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.32.4 h1:S13INUiTxgrPueTmrm5DZ+MiAo99zYzHEFh1UNkOxNE= -github.com/aws/aws-sdk-go-v2 v1.32.4/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA= -github.com/aws/aws-sdk-go-v2/config v1.28.4 h1:qgD0MKmkIzZR2DrAjWJcI9UkndjR+8f6sjUQvXh0mb0= -github.com/aws/aws-sdk-go-v2/config v1.28.4/go.mod h1:LgnWnNzHZw4MLplSyEGia0WgJ/kCGD86zGCjvNpehJs= -github.com/aws/aws-sdk-go-v2/credentials v1.17.45 h1:DUgm5lFso57E7150RBgu1JpVQoF8fAPretiDStIuVjg= -github.com/aws/aws-sdk-go-v2/credentials v1.17.45/go.mod h1:dnBpENcPC1ekZrGpSWspX+ZRGzhkvqngT2Qp5xBR1dY= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19 h1:woXadbf0c7enQ2UGCi8gW/WuKmE0xIzxBF/eD94jMKQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19/go.mod h1:zminj5ucw7w0r65bP6nhyOd3xL6veAUMc3ElGMoLVb4= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 h1:A2w6m6Tmr+BNXjDsr7M90zkWjsu4JXHwrzPg235STs4= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23/go.mod h1:35EVp9wyeANdujZruvHiQUAo9E3vbhnIO1mTCAxMlY0= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 h1:pgYW9FCabt2M25MoHYCfMrVY2ghiiBKYWUVXfwZs+sU= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23/go.mod h1:c48kLgzO19wAu3CPkDWC28JbaJ+hfQlsdl7I2+oqIbk= +github.com/aws/aws-sdk-go-v2 v1.32.5 h1:U8vdWJuY7ruAkzaOdD7guwJjD06YSKmnKCJs7s3IkIo= +github.com/aws/aws-sdk-go-v2 v1.32.5/go.mod h1:P5WJBrYqqbWVaOxgH0X/FYYD47/nooaPOZPlQdmiN2U= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7 h1:lL7IfaFzngfx0ZwUGOZdsFFnQ5uLvR0hWqqhyE7Q9M8= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.7/go.mod h1:QraP0UcVlQJsmHfioCrveWOC1nbiWUl3ej08h4mXWoc= +github.com/aws/aws-sdk-go-v2/config v1.28.5 h1:Za41twdCXbuyyWv9LndXxZZv3QhTG1DinqlFsSuvtI0= +github.com/aws/aws-sdk-go-v2/config v1.28.5/go.mod h1:4VsPbHP8JdcdUDmbTVgNL/8w9SqOkM5jyY8ljIxLO3o= +github.com/aws/aws-sdk-go-v2/credentials v1.17.46 h1:AU7RcriIo2lXjUfHFnFKYsLCwgbz1E7Mm95ieIRDNUg= +github.com/aws/aws-sdk-go-v2/credentials v1.17.46/go.mod h1:1FmYyLGL08KQXQ6mcTlifyFXfJVCNJTVGuQP4m0d/UA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.20 h1:sDSXIrlsFSFJtWKLQS4PUWRvrT580rrnuLydJrCQ/yA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.20/go.mod h1:WZ/c+w0ofps+/OUqMwWgnfrgzZH1DZO1RIkktICsqnY= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.24 h1:4usbeaes3yJnCFC7kfeyhkdkPtoRYPa/hTmCqMpKpLI= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.24/go.mod h1:5CI1JemjVwde8m2WG3cz23qHKPOxbpkq0HaoreEgLIY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.24 h1:N1zsICrQglfzaBnrfM0Ys00860C+QFwu6u/5+LomP+o= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.24/go.mod h1:dCn9HbJ8+K31i8IQ8EWmWj0EiIk0+vKiHNMxTTYveAg= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23 h1:1SZBDiRzzs3sNhOMVApyWPduWYGAX0imGy06XiBnCAM= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23/go.mod h1:i9TkxgbZmHVh2S0La6CAXtnyFhlCX/pJ0JsOvBAS6Mk= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4 h1:aaPpoG15S2qHkWm4KlEyF01zovK1nW4BBbyXuHNSE90= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4/go.mod h1:eD9gS2EARTKgGr/W5xwgY/ik9z/zqpW+m/xOQbVxrMk= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4 h1:tHxQi/XHPK0ctd/wdOw0t7Xrc2OxcRCnVzv8lwWPu0c= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4/go.mod h1:4GQbF1vJzG60poZqWatZlhP31y8PGCCVTvIGPdaaYJ0= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4 h1:E5ZAVOmI2apR8ADb72Q63KqwwwdW1XcMeXIlrZ1Psjg= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4/go.mod h1:wezzqVUOVVdk+2Z/JzQT4NxAU0NbhRe5W8pIE72jsWI= -github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0 h1:SwaJ0w0MOp0pBTIKTamLVeTKD+iOWyNJRdJ2KCQRg6Q= -github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0/go.mod h1:TMhLIyRIyoGVlaEMAt+ITMbwskSTpcGsCPDq91/ihY0= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.5 h1:HJwZwRt2Z2Tdec+m+fPjvdmkq2s9Ra+VR0hjF7V2o40= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.5/go.mod h1:wrMCEwjFPms+V86TCQQeOxQF/If4vT44FGIOFiMC2ck= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4 h1:zcx9LiGWZ6i6pjdcoE9oXAB6mUdeyC36Ia/QEiIvYdg= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4/go.mod h1:Tp/ly1cTjRLGBBmNccFumbZ8oqpZlpdhFf80SrRh4is= -github.com/aws/aws-sdk-go-v2/service/sts v1.33.0 h1:s7LRgBqhwLaxcocnAniBJp7gaAB+4I4vHzqUqjH18yc= -github.com/aws/aws-sdk-go-v2/service/sts v1.33.0/go.mod h1:9XEUty5v5UAsMiFOBJrNibZgwCeOma73jgGwwhgffa8= -github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM= -github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.24 h1:JX70yGKLj25+lMC5Yyh8wBtvB01GDilyRuJvXJ4piD0= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.24/go.mod h1:+Ln60j9SUTD0LEwnhEB0Xhg61DHqplBrbZpLgyjoEHg= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 h1:iXtILhvDxB6kPvEXgsDhGaZCSC6LQET5ZHSdJozeI0Y= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1/go.mod h1:9nu0fVANtYiAePIBh2/pFUSwtJ402hLnp854CNoDOeE= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.5 h1:gvZOjQKPxFXy1ft3QnEyXmT+IqneM9QAUWlM3r0mfqw= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.5/go.mod h1:DLWnfvIcm9IET/mmjdxeXbBKmTCm0ZB8p1za9BVteM8= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.5 h1:wtpJ4zcwrSbwhECWQoI/g6WM9zqCcSpHDJIWSbMLOu4= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.5/go.mod h1:qu/W9HXQbbQ4+1+JcZp0ZNPV31ym537ZJN+fiS7Ti8E= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.5 h1:P1doBzv5VEg1ONxnJss1Kh5ZG/ewoIE4MQtKKc6Crgg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.5/go.mod h1:NOP+euMW7W3Ukt28tAxPuoWao4rhhqJD3QEBk7oCg7w= +github.com/aws/aws-sdk-go-v2/service/s3 v1.68.0 h1:bFpcqdwtAEsgpZXvkTxIThFQx/EM0oV6kXmfFIGjxME= +github.com/aws/aws-sdk-go-v2/service/s3 v1.68.0/go.mod h1:ralv4XawHjEMaHOWnTFushl0WRqim/gQWesAMF6hTow= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.6 h1:3zu537oLmsPfDMyjnUS2g+F2vITgy5pB74tHI+JBNoM= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.6/go.mod h1:WJSZH2ZvepM6t6jwu4w/Z45Eoi75lPN7DcydSRtJg6Y= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.5 h1:K0OQAsDywb0ltlFrZm0JHPY3yZp/S9OaoLU33S7vPS8= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.5/go.mod h1:ORITg+fyuMoeiQFiVGoqB3OydVTLkClw/ljbblMq6Cc= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.1 h1:6SZUVRQNvExYlMLbHdlKB48x0fLbc2iVROyaNEwBHbU= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.1/go.mod h1:GqWyYCwLXnlUB1lOAXQyNSPqPLQJvmo8J0DWBzp9mtg= +github.com/aws/smithy-go v1.22.1 h1:/HPHZQ0g7f4eUeK6HKglFz8uwVfZKgoI25rb/J+dnro= +github.com/aws/smithy-go v1.22.1/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 h1:6df1vn4bBlDDo4tARvBm7l6KA9iVMnE3NWizDeWSrps= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3/go.mod h1:CIWtjkly68+yqLPbvwwR/fjNJA/idrtULjZWh2v1ys0= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= diff --git a/tools/lambda-promtail/lambda-promtail/s3.go b/tools/lambda-promtail/lambda-promtail/s3.go index 94b76411656e..a6255811378d 100644 --- a/tools/lambda-promtail/lambda-promtail/s3.go +++ b/tools/lambda-promtail/lambda-promtail/s3.go @@ -292,10 +292,10 @@ func processS3Event(ctx context.Context, ev *events.S3Event, pc Client, log *log &s3.GetObjectInput{ Bucket: aws.String(labels["bucket"]), Key: aws.String(labels["key"]), - ExpectedBucketOwner: aws.String(labels["bucketOwner"]), + ExpectedBucketOwner: aws.String(labels["bucket_owner"]), }) if err != nil { - return fmt.Errorf("failed to get object %s from bucket %s on account %s, %s", labels["key"], labels["bucket"], labels["bucketOwner"], err) + return fmt.Errorf("failed to get object %s from bucket %s on account %s, %s", labels["key"], labels["bucket"], labels["bucket_owner"], err) } err = parseS3Log(ctx, batch, labels, obj.Body, log) if err != nil { diff --git a/vendor/github.com/alecthomas/chroma/Makefile b/vendor/github.com/alecthomas/chroma/Makefile deleted file mode 100644 index 34e3c41c633f..000000000000 --- a/vendor/github.com/alecthomas/chroma/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -.PHONY: chromad upload all - -VERSION ?= $(shell git describe --tags --dirty --always) - -all: README.md tokentype_string.go - -README.md: lexers/*/*.go - ./table.py - -tokentype_string.go: types.go - go generate - -chromad: - rm -f chromad - (export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .) - -upload: chromad - scp chromad root@swapoff.org: && \ - ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' diff --git a/vendor/github.com/alecthomas/chroma/README.md b/vendor/github.com/alecthomas/chroma/README.md deleted file mode 100644 index b62b847685fc..000000000000 --- a/vendor/github.com/alecthomas/chroma/README.md +++ /dev/null @@ -1,285 +0,0 @@ -# Chroma — A general purpose syntax highlighter in pure Go -[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/) - -> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. - -Chroma takes source code and other structured text and converts it into syntax -highlighted HTML, ANSI-coloured text, etc. - -Chroma is based heavily on [Pygments](http://pygments.org/), and includes -translators for Pygments lexers and styles. - - -## Table of Contents - - - -1. [Table of Contents](#table-of-contents) -2. [Supported languages](#supported-languages) -3. [Try it](#try-it) -4. [Using the library](#using-the-library) - 1. [Quick start](#quick-start) - 2. [Identifying the language](#identifying-the-language) - 3. [Formatting the output](#formatting-the-output) - 4. [The HTML formatter](#the-html-formatter) -5. [More detail](#more-detail) - 1. [Lexers](#lexers) - 2. [Formatters](#formatters) - 3. [Styles](#styles) -6. [Command-line interface](#command-line-interface) -7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) - - - - -## Supported languages - -Prefix | Language -:----: | -------- -A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk -B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck -C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython -D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan -E | EBNF, Elixir, Elm, EmacsLisp, Erlang -F | Factor, Fish, Forth, Fortran, FSharp -G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy -H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy -I | Idris, Igor, INI, Io -J | J, Java, JavaScript, JSON, Julia, Jungle -K | Kotlin -L | Lighttpd configuration file, LLVM, Lua -M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL -N | NASM, Newspeak, Nginx configuration file, Nim, Nix -O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode -P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python -Q | QBasic -R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust -S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog -T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData -V | VB.net, verilog, VHDL, VimL, vue -W | WDTE -X | XML, Xorg -Y | YAML, YANG -Z | Zig - - -_I will attempt to keep this section up to date, but an authoritative list can be -displayed with `chroma --list`._ - - -## Try it - -Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). - - -## Using the library - -Chroma, like Pygments, has the concepts of -[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), -[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and -[styles](https://github.com/alecthomas/chroma/tree/master/styles). - -Lexers convert source text into a stream of tokens, styles specify how token -types are mapped to colours, and formatters convert tokens and styles into -formatted output. - -A package exists for each of these, containing a global `Registry` variable -with all of the registered implementations. There are also helper functions -for using the registry in each package, such as looking up lexers by name or -matching filenames, etc. - -In all cases, if a lexer, formatter or style can not be determined, `nil` will -be returned. In this situation you may want to default to the `Fallback` -value in each respective package, which provides sane defaults. - - -### Quick start - -A convenience function exists that can be used to simply format some source -text, without any effort: - -```go -err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") -``` - - -### Identifying the language - -To highlight code, you'll first have to identify what language the code is -written in. There are three primary ways to do that: - -1. Detect the language from its filename. - - ```go - lexer := lexers.Match("foo.go") - ``` - -3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). - - ```go - lexer := lexers.Get("go") - ``` - -3. Detect the language from its content. - - ```go - lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") - ``` - -In all cases, `nil` will be returned if the language can not be identified. - -```go -if lexer == nil { - lexer = lexers.Fallback -} -``` - -At this point, it should be noted that some lexers can be extremely chatty. To -mitigate this, you can use the coalescing lexer to coalesce runs of identical -token types into a single token: - -```go -lexer = chroma.Coalesce(lexer) -``` - - -### Formatting the output - -Once a language is identified you will need to pick a formatter and a style (theme). - -```go -style := styles.Get("swapoff") -if style == nil { - style = styles.Fallback -} -formatter := formatters.Get("html") -if formatter == nil { - formatter = formatters.Fallback -} -``` - -Then obtain an iterator over the tokens: - -```go -contents, err := ioutil.ReadAll(r) -iterator, err := lexer.Tokenise(nil, string(contents)) -``` - -And finally, format the tokens from the iterator: - -```go -err := formatter.Format(w, style, iterator) -``` - - -### The HTML formatter - -By default the `html` registered formatter generates standalone HTML with -embedded CSS. More flexibility is available through the `formatters/html` package. - -Firstly, the output generated by the formatter can be customised with the -following constructor options: - -- `Standalone()` - generate standalone HTML with embedded CSS. -- `WithClasses()` - use classes rather than inlined style attributes. -- `ClassPrefix(prefix)` - prefix each generated CSS class. -- `TabWidth(width)` - Set the rendered tab width, in characters. -- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). -- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. -- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). -- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. - -If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: - -```go -formatter := html.New(html.WithClasses()) -err := formatter.WriteCSS(w, style) -``` - - -## More detail - - -### Lexers - -See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) -for details on implementing lexers. Most concepts apply directly to Chroma, -but see existing lexer implementations for real examples. - -In many cases lexers can be automatically converted directly from Pygments by -using the included Python 3 script `pygments2chroma.py`. I use something like -the following: - -```sh -python3 _tools/pygments2chroma.py \ - pygments.lexers.jvm.KotlinLexer \ - > lexers/k/kotlin.go \ - && gofmt -s -w lexers/k/kotlin.go -``` - -See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) -for a list of lexers, and notes on some of the issues importing them. - - -### Formatters - -Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. - -A `noop` formatter is included that outputs the token text only, and a `tokens` -formatter outputs raw tokens. The latter is useful for debugging lexers. - - -### Styles - -Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments. - -All Pygments styles have been converted to Chroma using the `_tools/style.py` script. - -When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color. - -For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background: - -~~~go -chroma.Background: "#f8f8f2 bg:#000000", -~~~ - -Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color. - -For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). - - -## Command-line interface - -A command-line interface to Chroma is included. - -Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases). - -The CLI can be used as a preprocessor to colorise output of `less(1)`, -see documentation for the `LESSOPEN` environment variable. - -The `--fail` flag can be used to suppress output and return with exit status -1 to facilitate falling back to some other preprocessor in case chroma -does not resolve a specific lexer to use for the given file. For example: - -```shell -export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"' -``` - -Replace `cat` with your favourite fallback preprocessor. - -When invoked as `.lessfilter`, the `--fail` flag is automatically turned -on under the hood for easy integration with [lesspipe shipping with -Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); -for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. - - -## What's missing compared to Pygments? - -- Quite a few lexers, for various reasons (pull-requests welcome): - - Pygments lexers for complex languages often include custom code to - handle certain aspects, such as Raku's ability to nest code inside - regular expressions. These require time and effort to convert. - - I mostly only converted languages I had heard of, to reduce the porting cost. -- Some more esoteric features of Pygments are omitted for simplicity. -- Though the Chroma API supports content detection, very few languages support them. - I have plans to implement a statistical analyser at some point, but not enough time. diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/abap.go b/vendor/github.com/alecthomas/chroma/lexers/a/abap.go deleted file mode 100644 index 268aa6a5927b..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/abap.go +++ /dev/null @@ -1,60 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// ABAP lexer. -var Abap = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ABAP", - Aliases: []string{"abap"}, - Filenames: []string{"*.abap", "*.ABAP"}, - MimeTypes: []string{"text/x-abap"}, - CaseInsensitive: true, - }, - abapRules, -)) - -func abapRules() Rules { - return Rules{ - "common": { - {`\s+`, Text, nil}, - {`^\*.*$`, CommentSingle, nil}, - {`\".*?\n`, CommentSingle, nil}, - {`##\w+`, CommentSpecial, nil}, - }, - "variable-names": { - {`<\S+>`, NameVariable, nil}, - {`\w[\w~]*(?:(\[\])|->\*)?`, NameVariable, nil}, - }, - "root": { - Include("common"), - {`CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)`, Keyword, nil}, - {`(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b`, Keyword, nil}, - {`(FORM|PERFORM)(\s+)(\w+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`(PERFORM)(\s+)(\()(\w+)(\))`, ByGroups(Keyword, Text, Punctuation, NameVariable, Punctuation), nil}, - {`(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)`, ByGroups(Keyword, Text, NameFunction, Text, Keyword), nil}, - {`(METHOD)(\s+)([\w~]+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`(\s+)([\w\-]+)([=\-]>)([\w\-~]+)`, ByGroups(Text, NameVariable, Operator, NameFunction), nil}, - {`(?<=(=|-)>)([\w\-~]+)(?=\()`, NameFunction, nil}, - {`(TEXT)(-)(\d{3})`, ByGroups(Keyword, Punctuation, LiteralNumberInteger), nil}, - {`(TEXT)(-)(\w{3})`, ByGroups(Keyword, Punctuation, NameVariable), nil}, - {`(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b`, Keyword, nil}, - {`(?])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b`, Keyword, nil}, - {`(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b`, Keyword, nil}, - {`(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b`, ByGroups(NameBuiltin, Punctuation), nil}, - {`&[0-9]`, Name, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b`, OperatorWord, nil}, - Include("variable-names"), - {`[?*<>=\-+&]`, Operator, nil}, - {`'(''|[^'])*'`, LiteralStringSingle, nil}, - {"`([^`])*`", LiteralStringSingle, nil}, - {`([|}])([^{}|]*?)([|{])`, ByGroups(Punctuation, LiteralStringSingle, Punctuation), nil}, - {`[/;:()\[\],.]`, Punctuation, nil}, - {`(!)(\w+)`, ByGroups(Operator, Name), nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go b/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go deleted file mode 100644 index 85c47afce291..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go +++ /dev/null @@ -1,42 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Abnf lexer. -var Abnf = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ABNF", - Aliases: []string{"abnf"}, - Filenames: []string{"*.abnf"}, - MimeTypes: []string{"text/x-abnf"}, - }, - abnfRules, -)) - -func abnfRules() Rules { - return Rules{ - "root": { - {`;.*$`, CommentSingle, nil}, - {`(%[si])?"[^"]*"`, Literal, nil}, - {`%b[01]+\-[01]+\b`, Literal, nil}, - {`%b[01]+(\.[01]+)*\b`, Literal, nil}, - {`%d[0-9]+\-[0-9]+\b`, Literal, nil}, - {`%d[0-9]+(\.[0-9]+)*\b`, Literal, nil}, - {`%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b`, Literal, nil}, - {`%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b`, Literal, nil}, - {`\b[0-9]+\*[0-9]+`, Operator, nil}, - {`\b[0-9]+\*`, Operator, nil}, - {`\b[0-9]+`, Operator, nil}, - {`\*`, Operator, nil}, - {Words(``, `\b`, `ALPHA`, `BIT`, `CHAR`, `CR`, `CRLF`, `CTL`, `DIGIT`, `DQUOTE`, `HEXDIG`, `HTAB`, `LF`, `LWSP`, `OCTET`, `SP`, `VCHAR`, `WSP`), Keyword, nil}, - {`[a-zA-Z][a-zA-Z0-9-]+\b`, NameClass, nil}, - {`(=/|=|/)`, Operator, nil}, - {`[\[\]()]`, Punctuation, nil}, - {`\s+`, Text, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go deleted file mode 100644 index df55d6daca94..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go +++ /dev/null @@ -1,43 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Actionscript lexer. -var Actionscript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ActionScript", - Aliases: []string{"as", "actionscript"}, - Filenames: []string{"*.as"}, - MimeTypes: []string{"application/x-actionscript", "text/x-actionscript", "text/actionscript"}, - NotMultiline: true, - DotAll: true, - }, - actionscriptRules, -)) - -func actionscriptRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, nil}, - {`[~^*!%&<>|+=:;,/?\\-]+`, Operator, nil}, - {`[{}\[\]();.]+`, Punctuation, nil}, - {Words(``, `\b`, `case`, `default`, `for`, `each`, `in`, `while`, `do`, `break`, `return`, `continue`, `if`, `else`, `throw`, `try`, `catch`, `var`, `with`, `new`, `typeof`, `arguments`, `instanceof`, `this`, `switch`), Keyword, nil}, - {Words(``, `\b`, `class`, `public`, `final`, `internal`, `native`, `override`, `private`, `protected`, `static`, `import`, `extends`, `implements`, `interface`, `intrinsic`, `return`, `super`, `dynamic`, `function`, `const`, `get`, `namespace`, `package`, `set`), KeywordDeclaration, nil}, - {`(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b`, KeywordConstant, nil}, - {Words(``, `\b`, `Accessibility`, `AccessibilityProperties`, `ActionScriptVersion`, `ActivityEvent`, `AntiAliasType`, `ApplicationDomain`, `AsBroadcaster`, `Array`, `AsyncErrorEvent`, `AVM1Movie`, `BevelFilter`, `Bitmap`, `BitmapData`, `BitmapDataChannel`, `BitmapFilter`, `BitmapFilterQuality`, `BitmapFilterType`, `BlendMode`, `BlurFilter`, `Boolean`, `ByteArray`, `Camera`, `Capabilities`, `CapsStyle`, `Class`, `Color`, `ColorMatrixFilter`, `ColorTransform`, `ContextMenu`, `ContextMenuBuiltInItems`, `ContextMenuEvent`, `ContextMenuItem`, `ConvultionFilter`, `CSMSettings`, `DataEvent`, `Date`, `DefinitionError`, `DeleteObjectSample`, `Dictionary`, `DisplacmentMapFilter`, `DisplayObject`, `DisplacmentMapFilterMode`, `DisplayObjectContainer`, `DropShadowFilter`, `Endian`, `EOFError`, `Error`, `ErrorEvent`, `EvalError`, `Event`, `EventDispatcher`, `EventPhase`, `ExternalInterface`, `FileFilter`, `FileReference`, `FileReferenceList`, `FocusDirection`, `FocusEvent`, `Font`, `FontStyle`, `FontType`, `FrameLabel`, `FullScreenEvent`, `Function`, `GlowFilter`, `GradientBevelFilter`, `GradientGlowFilter`, `GradientType`, `Graphics`, `GridFitType`, `HTTPStatusEvent`, `IBitmapDrawable`, `ID3Info`, `IDataInput`, `IDataOutput`, `IDynamicPropertyOutputIDynamicPropertyWriter`, `IEventDispatcher`, `IExternalizable`, `IllegalOperationError`, `IME`, `IMEConversionMode`, `IMEEvent`, `int`, `InteractiveObject`, `InterpolationMethod`, `InvalidSWFError`, `InvokeEvent`, `IOError`, `IOErrorEvent`, `JointStyle`, `Key`, `Keyboard`, `KeyboardEvent`, `KeyLocation`, `LineScaleMode`, `Loader`, `LoaderContext`, `LoaderInfo`, `LoadVars`, `LocalConnection`, `Locale`, `Math`, `Matrix`, `MemoryError`, `Microphone`, `MorphShape`, `Mouse`, `MouseEvent`, `MovieClip`, `MovieClipLoader`, `Namespace`, `NetConnection`, `NetStatusEvent`, `NetStream`, `NewObjectSample`, `Number`, `Object`, `ObjectEncoding`, `PixelSnapping`, `Point`, `PrintJob`, `PrintJobOptions`, `PrintJobOrientation`, `ProgressEvent`, `Proxy`, `QName`, `RangeError`, `Rectangle`, `ReferenceError`, `RegExp`, `Responder`, `Sample`, `Scene`, `ScriptTimeoutError`, `Security`, `SecurityDomain`, `SecurityError`, `SecurityErrorEvent`, `SecurityPanel`, `Selection`, `Shape`, `SharedObject`, `SharedObjectFlushStatus`, `SimpleButton`, `Socket`, `Sound`, `SoundChannel`, `SoundLoaderContext`, `SoundMixer`, `SoundTransform`, `SpreadMethod`, `Sprite`, `StackFrame`, `StackOverflowError`, `Stage`, `StageAlign`, `StageDisplayState`, `StageQuality`, `StageScaleMode`, `StaticText`, `StatusEvent`, `String`, `StyleSheet`, `SWFVersion`, `SyncEvent`, `SyntaxError`, `System`, `TextColorType`, `TextField`, `TextFieldAutoSize`, `TextFieldType`, `TextFormat`, `TextFormatAlign`, `TextLineMetrics`, `TextRenderer`, `TextSnapshot`, `Timer`, `TimerEvent`, `Transform`, `TypeError`, `uint`, `URIError`, `URLLoader`, `URLLoaderDataFormat`, `URLRequest`, `URLRequestHeader`, `URLRequestMethod`, `URLStream`, `URLVariabeles`, `VerifyError`, `Video`, `XML`, `XMLDocument`, `XMLList`, `XMLNode`, `XMLNodeType`, `XMLSocket`, `XMLUI`), NameBuiltin, nil}, - {Words(``, `\b`, `decodeURI`, `decodeURIComponent`, `encodeURI`, `escape`, `eval`, `isFinite`, `isNaN`, `isXMLName`, `clearInterval`, `fscommand`, `getTimer`, `getURL`, `getVersion`, `parseFloat`, `parseInt`, `setInterval`, `trace`, `updateAfterEvent`, `unescape`), NameFunction, nil}, - {`[$a-zA-Z_]\w*`, NameOther, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-f]+`, LiteralNumberHex, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go deleted file mode 100644 index 45596dcc1750..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go +++ /dev/null @@ -1,60 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Actionscript 3 lexer. -var Actionscript3 = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ActionScript 3", - Aliases: []string{"as3", "actionscript3"}, - Filenames: []string{"*.as"}, - MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, - DotAll: true, - }, - actionscript3Rules, -)) - -func actionscript3Rules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, - {`(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)`, ByGroups(KeywordDeclaration, Text, Name, Text, Punctuation, Text, KeywordType), nil}, - {`(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)`, ByGroups(Keyword, Text, NameNamespace, Text), nil}, - {`(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()`, ByGroups(Keyword, Text, KeywordType, Text, Operator), nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`/(\\\\|\\/|[^\n])*/[gisx]*`, LiteralStringRegex, nil}, - {`(\.)([$a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, - {`(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b`, Keyword, nil}, - {`(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b`, KeywordDeclaration, nil}, - {`(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b`, KeywordConstant, nil}, - {`(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b`, NameFunction, nil}, - {`[$a-zA-Z_]\w*`, Name, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-f]+`, LiteralNumberHex, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[~^*!%&<>|+=:;,/?\\{}\[\]().-]+`, Operator, nil}, - }, - "funcparams": { - {`\s+`, Text, nil}, - {`(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)`, ByGroups(Text, Punctuation, Name, Text, Operator, Text, KeywordType, Text), Push("defval")}, - {`\)`, Operator, Push("type")}, - }, - "type": { - {`(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)`, ByGroups(Text, Operator, Text, KeywordType), Pop(2)}, - {`\s+`, Text, Pop(2)}, - Default(Pop(2)), - }, - "defval": { - {`(=)(\s*)([^(),]+)(\s*)(,?)`, ByGroups(Operator, Text, UsingSelf("root"), Text, Operator), Pop(1)}, - {`,`, Operator, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/ada.go b/vendor/github.com/alecthomas/chroma/lexers/a/ada.go deleted file mode 100644 index 916727124bfb..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/ada.go +++ /dev/null @@ -1,118 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Ada lexer. -var Ada = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Ada", - Aliases: []string{"ada", "ada95", "ada2005"}, - Filenames: []string{"*.adb", "*.ads", "*.ada"}, - MimeTypes: []string{"text/x-ada"}, - CaseInsensitive: true, - }, - adaRules, -)) - -func adaRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`--.*?\n`, CommentSingle, nil}, - {`[^\S\n]+`, Text, nil}, - {`function|procedure|entry`, KeywordDeclaration, Push("subprogram")}, - {`(subtype|type)(\s+)(\w+)`, ByGroups(KeywordDeclaration, Text, KeywordType), Push("type_def")}, - {`task|protected`, KeywordDeclaration, nil}, - {`(subtype)(\s+)`, ByGroups(KeywordDeclaration, Text), nil}, - {`(end)(\s+)`, ByGroups(KeywordReserved, Text), Push("end")}, - {`(pragma)(\s+)(\w+)`, ByGroups(KeywordReserved, Text, CommentPreproc), nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {Words(``, `\b`, `Address`, `Byte`, `Boolean`, `Character`, `Controlled`, `Count`, `Cursor`, `Duration`, `File_Mode`, `File_Type`, `Float`, `Generator`, `Integer`, `Long_Float`, `Long_Integer`, `Long_Long_Float`, `Long_Long_Integer`, `Natural`, `Positive`, `Reference_Type`, `Short_Float`, `Short_Integer`, `Short_Short_Float`, `Short_Short_Integer`, `String`, `Wide_Character`, `Wide_String`), KeywordType, nil}, - {`(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b`, OperatorWord, nil}, - {`generic|private`, KeywordDeclaration, nil}, - {`package`, KeywordDeclaration, Push("package")}, - {`array\b`, KeywordReserved, Push("array_def")}, - {`(with|use)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`(\w+)(\s*)(:)(\s*)(constant)`, ByGroups(NameConstant, Text, Punctuation, Text, KeywordReserved), nil}, - {`<<\w+>>`, NameLabel, nil}, - {`(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)`, ByGroups(NameLabel, Text, Punctuation, Text, KeywordReserved), nil}, - {Words(`\b`, `\b`, `abort`, `abs`, `abstract`, `accept`, `access`, `aliased`, `all`, `array`, `at`, `begin`, `body`, `case`, `constant`, `declare`, `delay`, `delta`, `digits`, `do`, `else`, `elsif`, `end`, `entry`, `exception`, `exit`, `interface`, `for`, `goto`, `if`, `is`, `limited`, `loop`, `new`, `null`, `of`, `or`, `others`, `out`, `overriding`, `pragma`, `protected`, `raise`, `range`, `record`, `renames`, `requeue`, `return`, `reverse`, `select`, `separate`, `subtype`, `synchronized`, `task`, `tagged`, `terminate`, `then`, `type`, `until`, `when`, `while`, `xor`), KeywordReserved, nil}, - {`"[^"]*"`, LiteralString, nil}, - Include("attribute"), - Include("numbers"), - {`'[^']'`, LiteralStringChar, nil}, - {`(\w+)(\s*|[(,])`, ByGroups(Name, UsingSelf("root")), nil}, - {`(<>|=>|:=|[()|:;,.'])`, Punctuation, nil}, - {`[*<>+=/&-]`, Operator, nil}, - {`\n+`, Text, nil}, - }, - "numbers": { - {`[0-9_]+#[0-9a-f]+#`, LiteralNumberHex, nil}, - {`[0-9_]+\.[0-9_]*`, LiteralNumberFloat, nil}, - {`[0-9_]+`, LiteralNumberInteger, nil}, - }, - "attribute": { - {`(')(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, - }, - "subprogram": { - {`\(`, Punctuation, Push("#pop", "formal_part")}, - {`;`, Punctuation, Pop(1)}, - {`is\b`, KeywordReserved, Pop(1)}, - {`"[^"]+"|\w+`, NameFunction, nil}, - Include("root"), - }, - "end": { - {`(if|case|record|loop|select)`, KeywordReserved, nil}, - {`"[^"]+"|[\w.]+`, NameFunction, nil}, - {`\s+`, Text, nil}, - {`;`, Punctuation, Pop(1)}, - }, - "type_def": { - {`;`, Punctuation, Pop(1)}, - {`\(`, Punctuation, Push("formal_part")}, - {`with|and|use`, KeywordReserved, nil}, - {`array\b`, KeywordReserved, Push("#pop", "array_def")}, - {`record\b`, KeywordReserved, Push("record_def")}, - {`(null record)(;)`, ByGroups(KeywordReserved, Punctuation), Pop(1)}, - Include("root"), - }, - "array_def": { - {`;`, Punctuation, Pop(1)}, - {`(\w+)(\s+)(range)`, ByGroups(KeywordType, Text, KeywordReserved), nil}, - Include("root"), - }, - "record_def": { - {`end record`, KeywordReserved, Pop(1)}, - Include("root"), - }, - "import": { - {`[\w.]+`, NameNamespace, Pop(1)}, - Default(Pop(1)), - }, - "formal_part": { - {`\)`, Punctuation, Pop(1)}, - {`\w+`, NameVariable, nil}, - {`,|:[^=]`, Punctuation, nil}, - {`(in|not|null|out|access)\b`, KeywordReserved, nil}, - Include("root"), - }, - "package": { - {`body`, KeywordDeclaration, nil}, - {`is\s+new|renames`, KeywordReserved, nil}, - {`is`, KeywordReserved, Pop(1)}, - {`;`, Punctuation, Pop(1)}, - {`\(`, Punctuation, Push("package_instantiation")}, - {`([\w.]+)`, NameClass, nil}, - Include("root"), - }, - "package_instantiation": { - {`("[^"]+"|\w+)(\s+)(=>)`, ByGroups(NameVariable, Text, Punctuation), nil}, - {`[\w.\'"]`, Text, nil}, - {`\)`, Punctuation, Pop(1)}, - Include("root"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/al.go b/vendor/github.com/alecthomas/chroma/lexers/a/al.go deleted file mode 100644 index 4055f3651eba..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/al.go +++ /dev/null @@ -1,47 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Al lexer. -var Al = internal.Register(MustNewLazyLexer( - &Config{ - Name: "AL", - Aliases: []string{"al"}, - Filenames: []string{"*.al", "*.dal"}, - MimeTypes: []string{"text/x-al"}, - DotAll: true, - CaseInsensitive: true, - }, - alRules, -)) - -// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage -func alRules() Rules { - return Rules{ - "root": { - {`\s+`, TextWhitespace, nil}, - {`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil}, - {`(?s)//.*?\n`, CommentSingle, nil}, - {`\"([^\"])*\"`, Text, nil}, - {`'([^'])*'`, LiteralString, nil}, - {`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil}, - {`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil}, - {`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil}, - {`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil}, - {`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil}, - {`\b([<>]=|<>|<|>)\b?`, Operator, nil}, - {`\b(\-|\+|\/|\*)\b`, Operator, nil}, - {`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil}, - {`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil}, - {`\s*[(\.\.)&\|]\s*`, Operator, nil}, - {`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil}, - {`[;:,]`, Punctuation, nil}, - {`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil}, - {`\w+`, Text, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go b/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go deleted file mode 100644 index a947edad60a8..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go +++ /dev/null @@ -1,46 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Angular2 lexer. -var Angular2 = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Angular2", - Aliases: []string{"ng2"}, - Filenames: []string{}, - MimeTypes: []string{}, - }, - angular2Rules, -)) - -func angular2Rules() Rules { - return Rules{ - "root": { - {`[^{([*#]+`, Other, nil}, - {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, - {`([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text, Operator, Text), Push("attr")}, - {`([([]+)([\w:.-]+)([\])]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text), nil}, - {`([*#])([\w:.-]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Operator), Push("attr")}, - {`([*#])([\w:.-]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation), nil}, - }, - "ngExpression": { - {`\s+(\|\s+)?`, Text, nil}, - {`\}\}`, CommentPreproc, Pop(1)}, - {`:?(true|false)`, LiteralStringBoolean, nil}, - {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, - {`[a-zA-Z][\w-]*(\(.*\))?`, NameVariable, nil}, - {`\.[\w-]+(\(.*\))?`, NameVariable, nil}, - {`(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)`, ByGroups(Operator, Text, LiteralString, Text, Operator, Text, LiteralString, Text), nil}, - }, - "attr": { - {`".*?"`, LiteralString, Pop(1)}, - {`'.*?'`, LiteralString, Pop(1)}, - {`[^\s>]+`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go b/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go deleted file mode 100644 index c744353f1139..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go +++ /dev/null @@ -1,105 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// ANTLR lexer. -var ANTLR = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ANTLR", - Aliases: []string{"antlr"}, - Filenames: []string{}, - MimeTypes: []string{}, - }, - antlrRules, -)) - -func antlrRules() Rules { - return Rules{ - "whitespace": { - {`\s+`, TextWhitespace, nil}, - }, - "comments": { - {`//.*$`, Comment, nil}, - {`/\*(.|\n)*?\*/`, Comment, nil}, - }, - "root": { - Include("whitespace"), - Include("comments"), - {`(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)`, ByGroups(Keyword, TextWhitespace, Keyword, TextWhitespace, NameClass, Punctuation), nil}, - {`options\b`, Keyword, Push("options")}, - {`tokens\b`, Keyword, Push("tokens")}, - {`(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), Push("action")}, - {`(catch|finally)\b`, Keyword, Push("exception")}, - {`(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, NameLabel, TextWhitespace, Punctuation), Push("action")}, - {`((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?`, ByGroups(Keyword, TextWhitespace, NameLabel, Punctuation), Push("rule-alts", "rule-prelims")}, - }, - "exception": { - {`\n`, TextWhitespace, Pop(1)}, - {`\s`, TextWhitespace, nil}, - Include("comments"), - {`\[`, Punctuation, Push("nested-arg-action")}, - {`\{`, Punctuation, Push("action")}, - }, - "rule-prelims": { - Include("whitespace"), - Include("comments"), - {`returns\b`, Keyword, nil}, - {`\[`, Punctuation, Push("nested-arg-action")}, - {`\{`, Punctuation, Push("action")}, - {`(throws)(\s+)([A-Za-z]\w*)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, - {`(,)(\s*)([A-Za-z]\w*)`, ByGroups(Punctuation, TextWhitespace, NameLabel), nil}, - {`options\b`, Keyword, Push("options")}, - {`(scope)(\s+)(\{)`, ByGroups(Keyword, TextWhitespace, Punctuation), Push("action")}, - {`(scope)(\s+)([A-Za-z]\w*)(\s*)(;)`, ByGroups(Keyword, TextWhitespace, NameLabel, TextWhitespace, Punctuation), nil}, - {`(@[A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation), Push("action")}, - {`:`, Punctuation, Pop(1)}, - }, - "rule-alts": { - Include("whitespace"), - Include("comments"), - {`options\b`, Keyword, Push("options")}, - {`:`, Punctuation, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`<<([^>]|>[^>])>>`, LiteralString, nil}, - {`\$?[A-Z_]\w*`, NameConstant, nil}, - {`\$?[a-z_]\w*`, NameVariable, nil}, - {`(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)`, Operator, nil}, - {`,`, Punctuation, nil}, - {`\[`, Punctuation, Push("nested-arg-action")}, - {`\{`, Punctuation, Push("action")}, - {`;`, Punctuation, Pop(1)}, - }, - "tokens": { - Include("whitespace"), - Include("comments"), - {`\{`, Punctuation, nil}, - {`([A-Z]\w*)(\s*)(=)?(\s*)(\'(?:\\\\|\\\'|[^\']*)\')?(\s*)(;)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, LiteralString, TextWhitespace, Punctuation), nil}, - {`\}`, Punctuation, Pop(1)}, - }, - "options": { - Include("whitespace"), - Include("comments"), - {`\{`, Punctuation, nil}, - {`([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\'(?:\\\\|\\\'|[^\']*)\'|[0-9]+|\*)(\s*)(;)`, ByGroups(NameVariable, TextWhitespace, Punctuation, TextWhitespace, Text, TextWhitespace, Punctuation), nil}, - {`\}`, Punctuation, Pop(1)}, - }, - "action": { - {`([^${}\'"/\\]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+`, Other, nil}, - {`(\\)(%)`, ByGroups(Punctuation, Other), nil}, - {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "nested-arg-action": { - {`([^$\[\]\'"/]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+`, Other, nil}, - {`\[`, Punctuation, Push()}, - {`\]`, Punctuation, Pop(1)}, - {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, - {`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apache.go b/vendor/github.com/alecthomas/chroma/lexers/a/apache.go deleted file mode 100644 index 5685eb15ce06..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/apache.go +++ /dev/null @@ -1,42 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Apacheconf lexer. -var Apacheconf = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ApacheConf", - Aliases: []string{"apacheconf", "aconf", "apache"}, - Filenames: []string{".htaccess", "apache.conf", "apache2.conf"}, - MimeTypes: []string{"text/x-apacheconf"}, - CaseInsensitive: true, - }, - apacheconfRules, -)) - -func apacheconfRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`(#.*?)$`, Comment, nil}, - {`(<[^\s>]+)(?:(\s+)(.*?))?(>)`, ByGroups(NameTag, Text, LiteralString, NameTag), nil}, - {`([a-z]\w*)(\s+)`, ByGroups(NameBuiltin, Text), Push("value")}, - {`\.+`, Text, nil}, - }, - "value": { - {`\\\n`, Text, nil}, - {`$`, Text, Pop(1)}, - {`\\`, Text, nil}, - {`[^\S\n]+`, Text, nil}, - {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, - {`\d+`, LiteralNumber, nil}, - {`/([a-z0-9][\w./-]+)`, LiteralStringOther, nil}, - {`(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b`, Keyword, nil}, - {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, - {`[^\s"\\]+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apl.go b/vendor/github.com/alecthomas/chroma/lexers/a/apl.go deleted file mode 100644 index f929d2e2b17a..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/apl.go +++ /dev/null @@ -1,40 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Apl lexer. -var Apl = internal.Register(MustNewLazyLexer( - &Config{ - Name: "APL", - Aliases: []string{"apl"}, - Filenames: []string{"*.apl"}, - MimeTypes: []string{}, - }, - aplRules, -)) - -func aplRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`[⍝#].*$`, CommentSingle, nil}, - {`\'((\'\')|[^\'])*\'`, LiteralStringSingle, nil}, - {`"(("")|[^"])*"`, LiteralStringDouble, nil}, - {`[⋄◇()]`, Punctuation, nil}, - {`[\[\];]`, LiteralStringRegex, nil}, - {`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil}, - {`[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil}, - {`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil}, - {`[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]`, NameAttribute, nil}, - {`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]`, Operator, nil}, - {`⍬`, NameConstant, nil}, - {`[⎕⍞]`, NameVariableGlobal, nil}, - {`[←→]`, KeywordDeclaration, nil}, - {`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, - {`[{}]`, KeywordType, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go b/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go deleted file mode 100644 index b6a53c5ff824..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go +++ /dev/null @@ -1,59 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Applescript lexer. -var Applescript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "AppleScript", - Aliases: []string{"applescript"}, - Filenames: []string{"*.applescript"}, - MimeTypes: []string{}, - DotAll: true, - }, - applescriptRules, -)) - -func applescriptRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`¬\n`, LiteralStringEscape, nil}, - {`'s\s+`, Text, nil}, - {`(--|#).*?$`, Comment, nil}, - {`\(\*`, CommentMultiline, Push("comment")}, - {`[(){}!,.:]`, Punctuation, nil}, - {`(«)([^»]+)(»)`, ByGroups(Text, NameBuiltin, Text), nil}, - {`\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)`, ByGroups(Keyword, NameBuiltin), nil}, - {`(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)`, Operator, nil}, - {`\b(and|or|is equal|equals|(is )?equal to|is not|isn't|isn't equal( to)?|is not equal( to)?|doesn't equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn't less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn't greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn't less than|does not come before|doesn't come before|(is )?less than or equal( to)?|is not greater than|isn't greater than|does not come after|doesn't come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn't contain|is in|is contained by|is not in|is not contained by|isn't contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b`, OperatorWord, nil}, - {`^(\s*(?:on|end)\s+)(zoomed|write to file|will zoom|will show|will select tab view item|will resize( sub views)?|will resign active|will quit|will pop up|will open|will move|will miniaturize|will hide|will finish launching|will display outline cell|will display item cell|will display cell|will display browser cell|will dismiss|will close|will become active|was miniaturized|was hidden|update toolbar item|update parameters|update menu item|shown|should zoom|should selection change|should select tab view item|should select row|should select item|should select column|should quit( after last window closed)?|should open( untitled)?|should expand item|should end editing|should collapse item|should close|should begin editing|selection changing|selection changed|selected tab view item|scroll wheel|rows changed|right mouse up|right mouse dragged|right mouse down|resized( sub views)?|resigned main|resigned key|resigned active|read from file|prepare table drop|prepare table drag|prepare outline drop|prepare outline drag|prepare drop|plugin loaded|parameters updated|panel ended|opened|open untitled|number of rows|number of items|number of browser rows|moved|mouse up|mouse moved|mouse exited|mouse entered|mouse dragged|mouse down|miniaturized|load data representation|launched|keyboard up|keyboard down|items changed|item value changed|item value|item expandable|idle|exposed|end editing|drop|drag( (entered|exited|updated))?|double clicked|document nib name|dialog ended|deminiaturized|data representation|conclude drop|column resized|column moved|column clicked|closed|clicked toolbar item|clicked|choose menu item|child of item|changed|change item value|change cell value|cell value changed|cell value|bounds changed|begin editing|became main|became key|awake from nib|alert ended|activated|action|accept table drop|accept outline drop)`, ByGroups(Keyword, NameFunction), nil}, - {`^(\s*)(in|on|script|to)(\s+)`, ByGroups(Text, Keyword, Text), nil}, - {`\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b`, ByGroups(Keyword, NameClass), nil}, - {`\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b`, NameConstant, nil}, - {`\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b`, NameBuiltin, nil}, - {`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|with|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil}, - {`\b(global|local|prop(erty)?|set|get)\b`, Keyword, nil}, - {`\b(but|put|returning|the)\b`, NameBuiltin, nil}, - {`\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b`, NameBuiltin, nil}, - {`\b(about|above|against|apart from|around|aside from|at|below|beneath|beside|between|for|given|instead of|on|onto|out of|over|since)\b`, NameBuiltin, nil}, - {`\b(accepts arrow key|action method|active|alignment|allowed identifiers|allows branch selection|allows column reordering|allows column resizing|allows column selection|allows customization|allows editing text attributes|allows empty selection|allows mixed state|allows multiple selection|allows reordering|allows undo|alpha( value)?|alternate image|alternate increment value|alternate title|animation delay|associated file name|associated object|auto completes|auto display|auto enables items|auto repeat|auto resizes( outline column)?|auto save expanded items|auto save name|auto save table columns|auto saves configuration|auto scroll|auto sizes all columns to fit|auto sizes cells|background color|bezel state|bezel style|bezeled|border rect|border type|bordered|bounds( rotation)?|box type|button returned|button type|can choose directories|can choose files|can draw|can hide|cell( (background color|size|type))?|characters|class|click count|clicked( data)? column|clicked data item|clicked( data)? row|closeable|collating|color( (mode|panel))|command key down|configuration|content(s| (size|view( margins)?))?|context|continuous|control key down|control size|control tint|control view|controller visible|coordinate system|copies( on scroll)?|corner view|current cell|current column|current( field)? editor|current( menu)? item|current row|current tab view item|data source|default identifiers|delta (x|y|z)|destination window|directory|display mode|displayed cell|document( (edited|rect|view))?|double value|dragged column|dragged distance|dragged items|draws( cell)? background|draws grid|dynamically scrolls|echos bullets|edge|editable|edited( data)? column|edited data item|edited( data)? row|enabled|enclosing scroll view|ending page|error handling|event number|event type|excluded from windows menu|executable path|expanded|fax number|field editor|file kind|file name|file type|first responder|first visible column|flipped|floating|font( panel)?|formatter|frameworks path|frontmost|gave up|grid color|has data items|has horizontal ruler|has horizontal scroller|has parent data item|has resize indicator|has shadow|has sub menu|has vertical ruler|has vertical scroller|header cell|header view|hidden|hides when deactivated|highlights by|horizontal line scroll|horizontal page scroll|horizontal ruler view|horizontally resizable|icon image|id|identifier|ignores multiple clicks|image( (alignment|dims when disabled|frame style|scaling))?|imports graphics|increment value|indentation per level|indeterminate|index|integer value|intercell spacing|item height|key( (code|equivalent( modifier)?|window))?|knob thickness|label|last( visible)? column|leading offset|leaf|level|line scroll|loaded|localized sort|location|loop mode|main( (bunde|menu|window))?|marker follows cell|matrix mode|maximum( content)? size|maximum visible columns|menu( form representation)?|miniaturizable|miniaturized|minimized image|minimized title|minimum column width|minimum( content)? size|modal|modified|mouse down state|movie( (controller|file|rect))?|muted|name|needs display|next state|next text|number of tick marks|only tick mark values|opaque|open panel|option key down|outline table column|page scroll|pages across|pages down|palette label|pane splitter|parent data item|parent window|pasteboard|path( (names|separator))?|playing|plays every frame|plays selection only|position|preferred edge|preferred type|pressure|previous text|prompt|properties|prototype cell|pulls down|rate|released when closed|repeated|requested print time|required file type|resizable|resized column|resource path|returns records|reuses columns|rich text|roll over|row height|rulers visible|save panel|scripts path|scrollable|selectable( identifiers)?|selected cell|selected( data)? columns?|selected data items?|selected( data)? rows?|selected item identifier|selection by rect|send action on arrow key|sends action when done editing|separates columns|separator item|sequence number|services menu|shared frameworks path|shared support path|sheet|shift key down|shows alpha|shows state by|size( mode)?|smart insert delete enabled|sort case sensitivity|sort column|sort order|sort type|sorted( data rows)?|sound|source( mask)?|spell checking enabled|starting page|state|string value|sub menu|super menu|super view|tab key traverses cells|tab state|tab type|tab view|table view|tag|target( printer)?|text color|text container insert|text container origin|text returned|tick mark position|time stamp|title(d| (cell|font|height|position|rect))?|tool tip|toolbar|trailing offset|transparent|treat packages as directories|truncated labels|types|unmodified characters|update views|use sort indicator|user defaults|uses data source|uses ruler|uses threaded animation|uses title from previous column|value wraps|version|vertical( (line scroll|page scroll|ruler view))?|vertically resizable|view|visible( document rect)?|volume|width|window|windows menu|wraps|zoomable|zoomed)\b`, NameAttribute, nil}, - {`\b(action cell|alert reply|application|box|browser( cell)?|bundle|button( cell)?|cell|clip view|color well|color-panel|combo box( item)?|control|data( (cell|column|item|row|source))?|default entry|dialog reply|document|drag info|drawer|event|font(-panel)?|formatter|image( (cell|view))?|matrix|menu( item)?|item|movie( view)?|open-panel|outline view|panel|pasteboard|plugin|popup button|progress indicator|responder|save-panel|scroll view|secure text field( cell)?|slider|sound|split view|stepper|tab view( item)?|table( (column|header cell|header view|view))|text( (field( cell)?|view))?|toolbar( item)?|user-defaults|view|window)s?\b`, NameBuiltin, nil}, - {`\b(animate|append|call method|center|close drawer|close panel|display|display alert|display dialog|display panel|go|hide|highlight|increment|item for|load image|load movie|load nib|load panel|load sound|localized string|lock focus|log|open drawer|path for|pause|perform action|play|register|resume|scroll|select( all)?|show|size to fit|start|step back|step forward|stop|synchronize|unlock focus|update)\b`, NameBuiltin, nil}, - {`\b((in )?back of|(in )?front of|[0-9]+(st|nd|rd|th)|first|second|third|fourth|fifth|sixth|seventh|eighth|ninth|tenth|after|back|before|behind|every|front|index|last|middle|some|that|through|thru|where|whose)\b`, NameBuiltin, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`\b([a-zA-Z]\w*)\b`, NameVariable, nil}, - {`[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?`, LiteralNumberFloat, nil}, - {`[-+]?\d+`, LiteralNumberInteger, nil}, - }, - "comment": { - {`\(\*`, CommentMultiline, Push()}, - {`\*\)`, CommentMultiline, Pop(1)}, - {`[^*(]+`, CommentMultiline, nil}, - {`[*(]`, CommentMultiline, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go b/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go deleted file mode 100644 index 0edbe3f9343c..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go +++ /dev/null @@ -1,114 +0,0 @@ -package a - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Arduino lexer. -var Arduino = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Arduino", - Aliases: []string{"arduino"}, - Filenames: []string{"*.ino"}, - MimeTypes: []string{"text/x-arduino"}, - EnsureNL: true, - }, - arduinoRules, -)) - -func arduinoRules() Rules { - return Rules{ - "statements": { - {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, - {`char(16_t|32_t)\b`, KeywordType, nil}, - {`(class)\b`, ByGroups(Keyword, Text), Push("classname")}, - {`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, - {`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, - {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, - {`\d+[LlUu]*`, LiteralNumberInteger, nil}, - {`\*/`, Error, nil}, - {`[~!%^&*+=|?:<>/-]`, Operator, nil}, - {`[()\[\],.]`, Punctuation, nil}, - {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, - {`(_Bool|_Complex|_Imaginary|array|atomic_bool|atomic_char|atomic_int|atomic_llong|atomic_long|atomic_schar|atomic_short|atomic_uchar|atomic_uint|atomic_ullong|atomic_ulong|atomic_ushort|auto|bool|boolean|BooleanVariables|Byte|byte|Char|char|char16_t|char32_t|class|complex|Const|const|const_cast|delete|double|dynamic_cast|enum|explicit|extern|Float|float|friend|inline|Int|int|int16_t|int32_t|int64_t|int8_t|Long|long|new|NULL|null|operator|private|PROGMEM|protected|public|register|reinterpret_cast|short|signed|sizeof|Static|static|static_cast|String|struct|typedef|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|virtual|Void|void|Volatile|volatile|word)\b`, KeywordType, nil}, - // Start of: Arduino-specific syntax - {`(and|final|If|Loop|loop|not|or|override|setup|Setup|throw|try|xor)\b`, Keyword, nil}, // Addition to keywords already defined by C++ - {`(ANALOG_MESSAGE|BIN|CHANGE|DEC|DEFAULT|DIGITAL_MESSAGE|EXTERNAL|FALLING|FIRMATA_STRING|HALF_PI|HEX|HIGH|INPUT|INPUT_PULLUP|INTERNAL|INTERNAL1V1|INTERNAL1V1|INTERNAL2V56|INTERNAL2V56|LED_BUILTIN|LED_BUILTIN_RX|LED_BUILTIN_TX|LOW|LSBFIRST|MSBFIRST|OCT|OUTPUT|PI|REPORT_ANALOG|REPORT_DIGITAL|RISING|SET_PIN_MODE|SYSEX_START|SYSTEM_RESET|TWO_PI)\b`, KeywordConstant, nil}, - {`(boolean|const|byte|word|string|String|array)\b`, NameVariable, nil}, - {`(Keyboard|KeyboardController|MouseController|SoftwareSerial|EthernetServer|EthernetClient|LiquidCrystal|RobotControl|GSMVoiceCall|EthernetUDP|EsploraTFT|HttpClient|RobotMotor|WiFiClient|GSMScanner|FileSystem|Scheduler|GSMServer|YunClient|YunServer|IPAddress|GSMClient|GSMModem|Keyboard|Ethernet|Console|GSMBand|Esplora|Stepper|Process|WiFiUDP|GSM_SMS|Mailbox|USBHost|Firmata|PImage|Client|Server|GSMPIN|FileIO|Bridge|Serial|EEPROM|Stream|Mouse|Audio|Servo|File|Task|GPRS|WiFi|Wire|TFT|GSM|SPI|SD)\b`, NameClass, nil}, - {`(abs|Abs|accept|ACos|acos|acosf|addParameter|analogRead|AnalogRead|analogReadResolution|AnalogReadResolution|analogReference|AnalogReference|analogWrite|AnalogWrite|analogWriteResolution|AnalogWriteResolution|answerCall|asin|ASin|asinf|atan|ATan|atan2|ATan2|atan2f|atanf|attach|attached|attachGPRS|attachInterrupt|AttachInterrupt|autoscroll|available|availableForWrite|background|beep|begin|beginPacket|beginSD|beginSMS|beginSpeaker|beginTFT|beginTransmission|beginWrite|bit|Bit|BitClear|bitClear|bitRead|BitRead|bitSet|BitSet|BitWrite|bitWrite|blink|blinkVersion|BSSID|buffer|byte|cbrt|cbrtf|Ceil|ceil|ceilf|changePIN|char|charAt|checkPIN|checkPUK|checkReg|circle|cityNameRead|cityNameWrite|clear|clearScreen|click|close|compareTo|compassRead|concat|config|connect|connected|constrain|Constrain|copysign|copysignf|cos|Cos|cosf|cosh|coshf|countryNameRead|countryNameWrite|createChar|cursor|debugPrint|degrees|Delay|delay|DelayMicroseconds|delayMicroseconds|detach|DetachInterrupt|detachInterrupt|DigitalPinToInterrupt|digitalPinToInterrupt|DigitalRead|digitalRead|DigitalWrite|digitalWrite|disconnect|display|displayLogos|drawBMP|drawCompass|encryptionType|end|endPacket|endSMS|endsWith|endTransmission|endWrite|equals|equalsIgnoreCase|exists|exitValue|Exp|exp|expf|fabs|fabsf|fdim|fdimf|fill|find|findUntil|float|floor|Floor|floorf|flush|fma|fmaf|fmax|fmaxf|fmin|fminf|fmod|fmodf|gatewayIP|get|getAsynchronously|getBand|getButton|getBytes|getCurrentCarrier|getIMEI|getKey|getModifiers|getOemKey|getPINUsed|getResult|getSignalStrength|getSocket|getVoiceCallStatus|getXChange|getYChange|hangCall|height|highByte|HighByte|home|hypot|hypotf|image|indexOf|int|interrupts|IPAddress|IRread|isActionDone|isAlpha|isAlphaNumeric|isAscii|isControl|isDigit|isDirectory|isfinite|isGraph|isHexadecimalDigit|isinf|isListening|isLowerCase|isnan|isPIN|isPressed|isPrintable|isPunct|isSpace|isUpperCase|isValid|isWhitespace|keyboardRead|keyPressed|keyReleased|knobRead|lastIndexOf|ldexp|ldexpf|leftToRight|length|line|lineFollowConfig|listen|listenOnLocalhost|loadImage|localIP|log|Log|log10|log10f|logf|long|lowByte|LowByte|lrint|lrintf|lround|lroundf|macAddress|maintain|map|Map|Max|max|messageAvailable|Micros|micros|millis|Millis|Min|min|mkdir|motorsStop|motorsWrite|mouseDragged|mouseMoved|mousePressed|mouseReleased|move|noAutoscroll|noBlink|noBuffer|noCursor|noDisplay|noFill|noInterrupts|NoInterrupts|noListenOnLocalhost|noStroke|noTone|NoTone|onReceive|onRequest|open|openNextFile|overflow|parseCommand|parseFloat|parseInt|parsePacket|pauseMode|peek|PinMode|pinMode|playFile|playMelody|point|pointTo|position|Pow|pow|powf|prepare|press|print|printFirmwareVersion|println|printVersion|process|processInput|PulseIn|pulseIn|pulseInLong|PulseInLong|put|radians|random|Random|randomSeed|RandomSeed|read|readAccelerometer|readBlue|readButton|readBytes|readBytesUntil|readGreen|readJoystickButton|readJoystickSwitch|readJoystickX|readJoystickY|readLightSensor|readMessage|readMicrophone|readNetworks|readRed|readSlider|readString|readStringUntil|readTemperature|ready|rect|release|releaseAll|remoteIP|remoteNumber|remotePort|remove|replace|requestFrom|retrieveCallingNumber|rewindDirectory|rightToLeft|rmdir|robotNameRead|robotNameWrite|round|roundf|RSSI|run|runAsynchronously|running|runShellCommand|runShellCommandAsynchronously|scanNetworks|scrollDisplayLeft|scrollDisplayRight|seek|sendAnalog|sendDigitalPortPair|sendDigitalPorts|sendString|sendSysex|Serial_Available|Serial_Begin|Serial_End|Serial_Flush|Serial_Peek|Serial_Print|Serial_Println|Serial_Read|serialEvent|setBand|setBitOrder|setCharAt|setClockDivider|setCursor|setDataMode|setDNS|setFirmwareVersion|setMode|setPINUsed|setSpeed|setTextSize|setTimeout|ShiftIn|shiftIn|ShiftOut|shiftOut|shutdown|signbit|sin|Sin|sinf|sinh|sinhf|size|sizeof|Sq|sq|Sqrt|sqrt|sqrtf|SSID|startLoop|startsWith|step|stop|stroke|subnetMask|substring|switchPIN|tan|Tan|tanf|tanh|tanhf|tempoWrite|text|toCharArray|toInt|toLowerCase|tone|Tone|toUpperCase|transfer|trim|trunc|truncf|tuneWrite|turn|updateIR|userNameRead|userNameWrite|voiceCall|waitContinue|width|WiFiServer|word|write|writeBlue|writeGreen|writeJSON|writeMessage|writeMicroseconds|writeRed|writeRGB|yield|Yield)\b`, NameFunction, nil}, - // End of: Arduino-specific syntax - {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, - {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, - {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, - {`(true|false|NULL)\b`, NameBuiltin, nil}, - {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "root": { - Include("whitespace"), - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, - Default(Push("statement")), - {Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, - {`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, - }, - "classname": { - {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, - {`\s*(?=>)`, Text, Pop(1)}, - }, - "whitespace": { - {`^#if\s+0`, CommentPreproc, Push("if0")}, - {`^#`, CommentPreproc, Push("macro")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - }, - "statement": { - Include("whitespace"), - Include("statements"), - {`[{}]`, Punctuation, nil}, - {`;`, Punctuation, Pop(1)}, - }, - "function": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - }, - "macro": { - {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, - {`[^/\n]+`, CommentPreproc, nil}, - {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, - {`//.*?\n`, CommentSingle, Pop(1)}, - {`/`, CommentPreproc, nil}, - {`(?<=\\)\n`, CommentPreproc, nil}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "if0": { - {`^\s*#if.*?(?+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, - {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, - {`[})\].]`, Punctuation, nil}, - {`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")}, - {`function\b`, KeywordDeclaration, Push("slashstartsregex")}, - {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil}, - {`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil}, - {`[@$a-zA-Z_]\w*`, NameOther, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go b/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go deleted file mode 100644 index d8916d0e6b0e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go +++ /dev/null @@ -1,50 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Ballerina lexer. -var Ballerina = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Ballerina", - Aliases: []string{"ballerina"}, - Filenames: []string{"*.bal"}, - MimeTypes: []string{"text/x-ballerina"}, - DotAll: true, - }, - ballerinaRules, -)) - -func ballerinaRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b`, Keyword, nil}, - {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, - {`@[^\W\d][\w.]*`, NameDecorator, nil}, - {`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil}, - {`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, - {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, - {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, - {`([^\W\d]|\$)[\w$]*`, Name, nil}, - {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, - {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, - {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, - {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, - {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, - {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, - {`\n`, Text, nil}, - }, - "import": { - {`[\w.]+`, NameNamespace, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bash.go b/vendor/github.com/alecthomas/chroma/lexers/b/bash.go deleted file mode 100644 index 34b1e222d3bc..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/bash.go +++ /dev/null @@ -1,100 +0,0 @@ -package b - -import ( - "regexp" - - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// TODO(moorereason): can this be factored away? -var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) - -// Bash lexer. -var Bash = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Bash", - Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, - Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, - MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, - }, - bashRules, -).SetAnalyser(func(text string) float32 { - if bashAnalyserRe.FindString(text) != "" { - return 1.0 - } - return 0.0 -})) - -func bashRules() Rules { - return Rules{ - "root": { - Include("basic"), - {"`", LiteralStringBacktick, Push("backticks")}, - Include("data"), - Include("interp"), - }, - "interp": { - {`\$\(\(`, Keyword, Push("math")}, - {`\$\(`, Keyword, Push("paren")}, - {`\$\{#?`, LiteralStringInterpol, Push("curly")}, - {`\$[a-zA-Z_]\w*`, NameVariable, nil}, - {`\$(?:\d+|[#$?!_*@-])`, NameVariable, nil}, - {`\$`, Text, nil}, - }, - "basic": { - {`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, - {"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, - {`\A#!.+\n`, CommentPreproc, nil}, - {`#.*(\S|$)`, CommentSingle, nil}, - {`\\[\w\W]`, LiteralStringEscape, nil}, - {`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, - {`[\[\]{}()=]`, Operator, nil}, - {`<<<`, Operator, nil}, - {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, - {`&&|\|\|`, Operator, nil}, - }, - "data": { - {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, - {`(?s)'.*?'`, LiteralStringSingle, nil}, - {`;`, Punctuation, nil}, - {`&`, Punctuation, nil}, - {`\|`, Punctuation, nil}, - {`\s+`, Text, nil}, - {`\d+(?= |$)`, LiteralNumber, nil}, - {"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil}, - {`<`, Text, nil}, - }, - "string": { - {`"`, LiteralStringDouble, Pop(1)}, - {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, - Include("interp"), - }, - "curly": { - {`\}`, LiteralStringInterpol, Pop(1)}, - {`:-`, Keyword, nil}, - {`\w+`, NameVariable, nil}, - {"[^}:\"\\'`$\\\\]+", Punctuation, nil}, - {`:`, Punctuation, nil}, - Include("root"), - }, - "paren": { - {`\)`, Keyword, Pop(1)}, - Include("root"), - }, - "math": { - {`\)\)`, Keyword, Pop(1)}, - {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, - {`\d+#\d+`, LiteralNumber, nil}, - {`\d+#(?! )`, LiteralNumber, nil}, - {`\d+`, LiteralNumber, nil}, - Include("root"), - }, - "backticks": { - {"`", LiteralStringBacktick, Pop(1)}, - Include("root"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go b/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go deleted file mode 100644 index 055cbb37af9c..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go +++ /dev/null @@ -1,27 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// BashSession lexer. -var BashSession = internal.Register(MustNewLazyLexer( - &Config{ - Name: "BashSession", - Aliases: []string{"bash-session", "console", "shell-session"}, - Filenames: []string{".sh-session"}, - MimeTypes: []string{"text/x-sh"}, - EnsureNL: true, - }, - bashsessionRules, -)) - -func bashsessionRules() Rules { - return Rules{ - "root": { - {`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using(Bash)), nil}, - {`^.+\n?`, GenericOutput, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/batch.go b/vendor/github.com/alecthomas/chroma/lexers/b/batch.go deleted file mode 100644 index f7a86776e5a9..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/batch.go +++ /dev/null @@ -1,198 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Batchfile lexer. -var Batchfile = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Batchfile", - Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, - Filenames: []string{"*.bat", "*.cmd"}, - MimeTypes: []string{"application/x-dos-batch"}, - CaseInsensitive: true, - }, - batchfileRules, -)) - -func batchfileRules() Rules { - return Rules{ - "root": { - {`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, - {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, - {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, - Include("redirect"), - {`[\n\x1a]+`, Text, nil}, - {`\(`, Punctuation, Push("root/compound")}, - {`@+`, Punctuation, nil}, - {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, - {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, - {Words(``, `(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow")}, - {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call")}, - {`call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, Keyword, nil}, - {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, - {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, - {`for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^)`, Keyword, Push("for2", "for")}, - {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label")}, - {`(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, - {`rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))`, CommentSingle, Push("follow")}, - {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic")}, - {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow")}, - Default(Push("follow")), - }, - "follow": { - {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, - Include("redirect"), - {`(?=[\n\x1a])`, Text, Pop(1)}, - {`\|\|?|&&?`, Punctuation, Pop(1)}, - Include("text"), - }, - "arithmetic": { - {`0[0-7]+`, LiteralNumberOct, nil}, - {`0x[\da-f]+`, LiteralNumberHex, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`[(),]+`, Punctuation, nil}, - {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, - {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+`, UsingSelf("variable"), nil}, - {`(?=[\x00|&])`, Text, Pop(1)}, - Include("follow"), - }, - "call": { - {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, - }, - "label": { - {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^"%^\n\x1a&<>|])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, - }, - "redirect": { - {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, - {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, - }, - "root/compound": { - {`\)`, Punctuation, Pop(1)}, - {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow/compound")}, - {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, - Include("redirect/compound"), - {`[\n\x1a]+`, Text, nil}, - {`\(`, Punctuation, Push("root/compound")}, - {`@+`, Punctuation, nil}, - {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, - {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, - {Words(``, `(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow/compound")}, - {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call/compound")}, - {`call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, Keyword, nil}, - {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, - {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, - {`for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^)`, Keyword, Push("for2", "for")}, - {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label/compound")}, - {`(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, - {`rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))`, CommentSingle, Push("follow/compound")}, - {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic/compound")}, - {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=)]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow/compound")}, - Default(Push("follow/compound")), - }, - "follow/compound": { - {`(?=\))`, Text, Pop(1)}, - {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, - Include("redirect/compound"), - {`(?=[\n\x1a])`, Text, Pop(1)}, - {`\|\|?|&&?`, Punctuation, Pop(1)}, - Include("text"), - }, - "arithmetic/compound": { - {`(?=\))`, Text, Pop(1)}, - {`0[0-7]+`, LiteralNumberOct, nil}, - {`0x[\da-f]+`, LiteralNumberHex, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`[(),]+`, Punctuation, nil}, - {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, - {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+`, UsingSelf("variable"), nil}, - {`(?=[\x00|&])`, Text, Pop(1)}, - Include("follow"), - }, - "call/compound": { - {`(?=\))`, Text, Pop(1)}, - {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, - }, - "label/compound": { - {`(?=\))`, Text, Pop(1)}, - {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^"%^\n\x1a&<>|)])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, - }, - "redirect/compound": { - {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, - {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, - }, - "variable-or-escape": { - {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, - {`%%|\^[\n\x1a]?(\^!|[\w\W])`, LiteralStringEscape, nil}, - }, - "string": { - {`"`, LiteralStringDouble, Pop(1)}, - {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, - {`\^!|%%`, LiteralStringEscape, nil}, - {`[^"%^\n\x1a]+|[%^]`, LiteralStringDouble, nil}, - Default(Pop(1)), - }, - "sqstring": { - Include("variable-or-escape"), - {`[^%]+|%`, LiteralStringSingle, nil}, - }, - "bqstring": { - Include("variable-or-escape"), - {`[^%]+|%`, LiteralStringBacktick, nil}, - }, - "text": { - {`"`, LiteralStringDouble, Push("string")}, - Include("variable-or-escape"), - {`[^"%^\n\x1a&<>|\t\v\f\r ,;=\xa0\d)]+|.`, Text, nil}, - }, - "variable": { - {`"`, LiteralStringDouble, Push("string")}, - Include("variable-or-escape"), - {`[^"%^\n\x1a]+|.`, NameVariable, nil}, - }, - "for": { - {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()`, ByGroups(UsingSelf("text"), Keyword, UsingSelf("text"), Punctuation), Pop(1)}, - Include("follow"), - }, - "for2": { - {`\)`, Punctuation, nil}, - {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(UsingSelf("text"), Keyword), Pop(1)}, - {`[\n\x1a]+`, Text, nil}, - Include("follow"), - }, - "for/f": { - {`(")((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"])*?")([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(LiteralStringDouble, UsingSelf("string"), Text, Punctuation), nil}, - {`"`, LiteralStringDouble, Push("#pop", "for2", "string")}, - {`('(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?')([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(UsingSelf("sqstring"), Text, Punctuation), nil}, - {"(`(?:%%|(?:(?:%(?:\\*|(?:~[a-z]*(?:\\$[^:]+:)?)?\\d|[^%:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])[^=\\n\\x1a]*=(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])*)?)?%))|(?:\\^?![^!:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])[^=\\n\\x1a]*=(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])*)?)?\\^?!))|[\\w\\W])*?`)([\\n\\x1a\\t\\v\\f\\r ,;=\\xa0]*)(\\))", ByGroups(UsingSelf("bqstring"), Text, Punctuation), nil}, - Include("for2"), - }, - "for/l": { - {`-?\d+`, LiteralNumberInteger, nil}, - Include("for2"), - }, - "if": { - {`((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)`, ByGroups(Keyword, UsingSelf("text"), LiteralNumberInteger), Pop(1)}, - {`(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text"), UsingSelf("variable")), Pop(1)}, - {`(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text")), Pop(1)}, - {`((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(UsingSelf("arithmetic"), OperatorWord, UsingSelf("arithmetic")), Pop(1)}, - {`(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)`, UsingSelf("text"), Push("#pop", "if2")}, - }, - "if2": { - {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), Operator, UsingSelf("text")), Pop(1)}, - {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), OperatorWord, UsingSelf("text")), Pop(1)}, - }, - "(?": { - {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, - {`\(`, Punctuation, Push("#pop", "else?", "root/compound")}, - Default(Pop(1)), - }, - "else?": { - {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, - {`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go b/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go deleted file mode 100644 index d6a0ae3347d5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go +++ /dev/null @@ -1,80 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Bibtex lexer. -var Bibtex = internal.Register(MustNewLazyLexer( - &Config{ - Name: "BibTeX", - Aliases: []string{"bib", "bibtex"}, - Filenames: []string{"*.bib"}, - MimeTypes: []string{"text/x-bibtex"}, - NotMultiline: true, - CaseInsensitive: true, - }, - bibtexRules, -)) - -func bibtexRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`@comment`, Comment, nil}, - {`@preamble`, NameClass, Push("closing-brace", "value", "opening-brace")}, - {`@string`, NameClass, Push("closing-brace", "field", "opening-brace")}, - {"@[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameClass, Push("closing-brace", "command-body", "opening-brace")}, - {`.+`, Comment, nil}, - }, - "opening-brace": { - Include("whitespace"), - {`[{(]`, Punctuation, Pop(1)}, - }, - "closing-brace": { - Include("whitespace"), - {`[})]`, Punctuation, Pop(1)}, - }, - "command-body": { - Include("whitespace"), - {`[^\s\,\}]+`, NameLabel, Push("#pop", "fields")}, - }, - "fields": { - Include("whitespace"), - {`,`, Punctuation, Push("field")}, - Default(Pop(1)), - }, - "field": { - Include("whitespace"), - {"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameAttribute, Push("value", "=")}, - Default(Pop(1)), - }, - "=": { - Include("whitespace"), - {`=`, Punctuation, Pop(1)}, - }, - "value": { - Include("whitespace"), - {"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameVariable, nil}, - {`"`, LiteralString, Push("quoted-string")}, - {`\{`, LiteralString, Push("braced-string")}, - {`[\d]+`, LiteralNumber, nil}, - {`#`, Punctuation, nil}, - Default(Pop(1)), - }, - "quoted-string": { - {`\{`, LiteralString, Push("braced-string")}, - {`"`, LiteralString, Pop(1)}, - {`[^\{\"]+`, LiteralString, nil}, - }, - "braced-string": { - {`\{`, LiteralString, Push()}, - {`\}`, LiteralString, Pop(1)}, - {`[^\{\}]+`, LiteralString, nil}, - }, - "whitespace": { - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go b/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go deleted file mode 100644 index ec136fbc565e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go +++ /dev/null @@ -1,112 +0,0 @@ -package b - -import ( - "strings" - - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Bicep lexer. -var Bicep = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Bicep", - Aliases: []string{"bicep"}, - Filenames: []string{"*.bicep"}, - }, - bicepRules, -)) - -func bicepRules() Rules { - bicepFunctions := []string{ - "any", - "array", - "concat", - "contains", - "empty", - "first", - "intersection", - "items", - "last", - "length", - "min", - "max", - "range", - "skip", - "take", - "union", - "dateTimeAdd", - "utcNow", - "deployment", - "environment", - "loadFileAsBase64", - "loadTextContent", - "int", - "json", - "extensionResourceId", - "getSecret", - "list", - "listKeys", - "listKeyValue", - "listAccountSas", - "listSecrets", - "pickZones", - "reference", - "resourceId", - "subscriptionResourceId", - "tenantResourceId", - "managementGroup", - "resourceGroup", - "subscription", - "tenant", - "base64", - "base64ToJson", - "base64ToString", - "dataUri", - "dataUriToString", - "endsWith", - "format", - "guid", - "indexOf", - "lastIndexOf", - "length", - "newGuid", - "padLeft", - "replace", - "split", - "startsWith", - "string", - "substring", - "toLower", - "toUpper", - "trim", - "uniqueString", - "uri", - "uriComponent", - "uriComponentToString", - } - - return Rules{ - "root": { - {`//[^\n\r]+`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`([']?\w+[']?)(:)`, ByGroups(NameProperty, Punctuation), nil}, - {`\b('(resourceGroup|subscription|managementGroup|tenant)')\b`, KeywordNamespace, nil}, - {`'[\w\$\{\(\)\}\.]{1,}?'`, LiteralStringInterpol, nil}, - {`('''|').*?('''|')`, LiteralString, nil}, - {`\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b`, NameDecorator, nil}, - {`\b(az|sys)\.`, NameNamespace, nil}, - {`\b(` + strings.Join(bicepFunctions, "|") + `)\b`, NameFunction, nil}, - // https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/bicep-functions-logical - {`\b(bool)(\()`, ByGroups(NameFunction, Punctuation), nil}, - {`\b(for|if|in)\b`, Keyword, nil}, - {`\b(module|output|param|resource|var)\b`, KeywordDeclaration, nil}, - {`\b(array|bool|int|object|string)\b`, KeywordType, nil}, - // https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/operators - {`(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)`, Operator, nil}, - {`[\(\)\[\]\.:\?{}@=]`, Punctuation, nil}, - {`[\w_-]+`, Text, nil}, - {`\s+`, TextWhitespace, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go b/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go deleted file mode 100644 index 119f96b09845..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go +++ /dev/null @@ -1,52 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Blitzbasic lexer. -var Blitzbasic = internal.Register(MustNewLazyLexer( - &Config{ - Name: "BlitzBasic", - Aliases: []string{"blitzbasic", "b3d", "bplus"}, - Filenames: []string{"*.bb", "*.decls"}, - MimeTypes: []string{"text/x-bb"}, - CaseInsensitive: true, - }, - blitzbasicRules, -)) - -func blitzbasicRules() Rules { - return Rules{ - "root": { - {`[ \t]+`, Text, nil}, - {`;.*?\n`, CommentSingle, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, - {`\.[0-9]+(?!\.)`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`\$[0-9a-f]+`, LiteralNumberHex, nil}, - {`\%[10]+`, LiteralNumberBin, nil}, - {Words(`\b`, `\b`, `Shl`, `Shr`, `Sar`, `Mod`, `Or`, `And`, `Not`, `Abs`, `Sgn`, `Handle`, `Int`, `Float`, `Str`, `First`, `Last`, `Before`, `After`), Operator, nil}, - {`([+\-*/~=<>^])`, Operator, nil}, - {`[(),:\[\]\\]`, Punctuation, nil}, - {`\.([ \t]*)([a-z]\w*)`, NameLabel, nil}, - {`\b(New)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, - {`\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameLabel), nil}, - {`\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b`, ByGroups(Operator, Text, Punctuation, Text, NameClass), nil}, - {`\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()`, ByGroups(NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass, Text, Punctuation), nil}, - {`\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(KeywordReserved, Text, NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, - {`\b(Type)([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, - {`\b(Pi|True|False|Null)\b`, KeywordConstant, nil}, - {`\b(Local|Global|Const|Field|Dim)\b`, KeywordDeclaration, nil}, - {Words(`\b`, `\b`, `End`, `Return`, `Exit`, `Chr`, `Len`, `Asc`, `New`, `Delete`, `Insert`, `Include`, `Function`, `Type`, `If`, `Then`, `Else`, `ElseIf`, `EndIf`, `For`, `To`, `Next`, `Step`, `Each`, `While`, `Wend`, `Repeat`, `Until`, `Forever`, `Select`, `Case`, `Default`, `Goto`, `Gosub`, `Data`, `Read`, `Restore`), KeywordReserved, nil}, - {`([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(NameVariable, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, - }, - "string": { - {`""`, LiteralStringDouble, nil}, - {`"C?`, LiteralStringDouble, Pop(1)}, - {`[^"]+`, LiteralStringDouble, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go b/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go deleted file mode 100644 index dcf7360c1d09..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go +++ /dev/null @@ -1,28 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Bnf lexer. -var Bnf = internal.Register(MustNewLazyLexer( - &Config{ - Name: "BNF", - Aliases: []string{"bnf"}, - Filenames: []string{"*.bnf"}, - MimeTypes: []string{"text/x-bnf"}, - }, - bnfRules, -)) - -func bnfRules() Rules { - return Rules{ - "root": { - {`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, - {`::=`, Operator, nil}, - {`[^<>:]+`, Text, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go b/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go deleted file mode 100644 index d35e9c6db974..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go +++ /dev/null @@ -1,38 +0,0 @@ -package b - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Brainfuck lexer. -var Brainfuck = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Brainfuck", - Aliases: []string{"brainfuck", "bf"}, - Filenames: []string{"*.bf", "*.b"}, - MimeTypes: []string{"application/x-brainfuck"}, - }, - brainfuckRules, -)) - -func brainfuckRules() Rules { - return Rules{ - "common": { - {`[.,]+`, NameTag, nil}, - {`[+-]+`, NameBuiltin, nil}, - {`[<>]+`, NameVariable, nil}, - {`[^.,+\-<>\[\]]+`, Comment, nil}, - }, - "root": { - {`\[`, Keyword, Push("loop")}, - {`\]`, Error, nil}, - Include("common"), - }, - "loop": { - {`\[`, Keyword, Push()}, - {`\]`, Keyword, Pop(1)}, - Include("common"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/c.go b/vendor/github.com/alecthomas/chroma/lexers/c/c.go deleted file mode 100644 index 3698c4f29771..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/c.go +++ /dev/null @@ -1,96 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// C lexer. -var C = internal.Register(MustNewLazyLexer( - &Config{ - Name: "C", - Aliases: []string{"c"}, - Filenames: []string{"*.c", "*.h", "*.idc", "*.x[bp]m"}, - MimeTypes: []string{"text/x-chdr", "text/x-csrc", "image/x-xbitmap", "image/x-xpixmap"}, - EnsureNL: true, - }, - cRules, -)) - -func cRules() Rules { - return Rules{ - "whitespace": { - {`^#if\s+0`, CommentPreproc, Push("if0")}, - {`^#`, CommentPreproc, Push("macro")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - }, - "statements": { - {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, - {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, - {`\d+[LlUu]*`, LiteralNumberInteger, nil}, - {`\*/`, Error, nil}, - {`[~!%^&*+=|?:<>/-]`, Operator, nil}, - {`[()\[\],.]`, Punctuation, nil}, - {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, - {`(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, - {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, - {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, - {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, - {`(true|false|NULL)\b`, NameBuiltin, nil}, - {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "root": { - Include("whitespace"), - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, - Default(Push("statement")), - }, - "statement": { - Include("whitespace"), - Include("statements"), - {`[{}]`, Punctuation, nil}, - {`;`, Punctuation, Pop(1)}, - }, - "function": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - }, - "macro": { - {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, - {`[^/\n]+`, CommentPreproc, nil}, - {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, - {`//.*?\n`, CommentSingle, Pop(1)}, - {`/`, CommentPreproc, nil}, - {`(?<=\\)\n`, CommentPreproc, nil}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "if0": { - {`^\s*#if.*?(?\]`, NameDecorator, Push("matcher")}, - // These cannot have matchers but may have things that look like - // matchers in their arguments, so we just parse as a subdirective. - {`try_files`, Keyword, Push("subdirective")}, - // These are special, they can nest more directives - {`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, - // Any other directive - {`[^\s#]+`, Keyword, Push("directive")}, - Include("base"), - }, - "matcher": { - {`\{`, Punctuation, Push("block")}, - // Not can be one-liner - {`not`, Keyword, Push("deep_not_matcher")}, - // Any other same-line matcher - {`[^\s#]+`, Keyword, Push("arguments")}, - // Terminators - {`\n`, Text, Pop(1)}, - {`\}`, Punctuation, Pop(1)}, - Include("base"), - }, - "block": { - {`\}`, Punctuation, Pop(2)}, - // Not can be one-liner - {`not`, Keyword, Push("not_matcher")}, - // Any other subdirective - {`[^\s#]+`, Keyword, Push("subdirective")}, - Include("base"), - }, - "nested_block": { - {`\}`, Punctuation, Pop(2)}, - // Matcher definition - {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, - // Something that starts with literally < is probably a docs stub - {`\<[^#]+\>`, Keyword, Push("nested_directive")}, - // Any other directive - {`[^\s#]+`, Keyword, Push("nested_directive")}, - Include("base"), - }, - "not_matcher": { - {`\}`, Punctuation, Pop(2)}, - {`\{(?=\s)`, Punctuation, Push("block")}, - {`[^\s#]+`, Keyword, Push("arguments")}, - {`\s+`, Text, nil}, - }, - "deep_not_matcher": { - {`\}`, Punctuation, Pop(2)}, - {`\{(?=\s)`, Punctuation, Push("block")}, - {`[^\s#]+`, Keyword, Push("deep_subdirective")}, - {`\s+`, Text, nil}, - }, - "directive": { - {`\{(?=\s)`, Punctuation, Push("block")}, - Include("matcher_token"), - Include("comments_pop_1"), - {`\n`, Text, Pop(1)}, - Include("base"), - }, - "nested_directive": { - {`\{(?=\s)`, Punctuation, Push("nested_block")}, - Include("matcher_token"), - Include("comments_pop_1"), - {`\n`, Text, Pop(1)}, - Include("base"), - }, - "subdirective": { - {`\{(?=\s)`, Punctuation, Push("block")}, - Include("comments_pop_1"), - {`\n`, Text, Pop(1)}, - Include("base"), - }, - "arguments": { - {`\{(?=\s)`, Punctuation, Push("block")}, - Include("comments_pop_2"), - {`\\\n`, Text, nil}, // Skip escaped newlines - {`\n`, Text, Pop(2)}, - Include("base"), - }, - "deep_subdirective": { - {`\{(?=\s)`, Punctuation, Push("block")}, - Include("comments_pop_3"), - {`\n`, Text, Pop(3)}, - Include("base"), - }, - "matcher_token": { - {`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher - {`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher - {`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher - {`\[\\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs - }, - "comments": { - {`^#.*\n`, CommentSingle, nil}, // Comment at start of line - {`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace - }, - "comments_pop_1": { - {`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line - {`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace - }, - "comments_pop_2": { - {`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line - {`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace - }, - "comments_pop_3": { - {`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line - {`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace - }, - "base": { - Include("comments"), - {`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, - {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, - {`[a-z-]+/[a-z-+]+`, LiteralString, nil}, - {`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, - {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder - {`\[(?=[^#{}$]+\])`, Punctuation, nil}, - {`\]|\|`, Punctuation, nil}, - {`[^\s#{}$\]]+`, LiteralString, nil}, - {`/[^\s#]*`, Name, nil}, - {`\s+`, Text, nil}, - }, - } -} - -// Caddyfile lexer. -var Caddyfile = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Caddyfile", - Aliases: []string{"caddyfile", "caddy"}, - Filenames: []string{"Caddyfile*"}, - MimeTypes: []string{}, - }, - caddyfileRules, -)) - -func caddyfileRules() Rules { - return Rules{ - "root": { - Include("comments"), - // Global options block - {`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, - // Snippets - {`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, - // Site label - {`[^#{(\s,]+`, GenericHeading, Push("label")}, - // Site label with placeholder - {`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, - {`\s+`, Text, nil}, - }, - "globals": { - {`\}`, Punctuation, Pop(1)}, - {`[^\s#]+`, Keyword, Push("directive")}, - Include("base"), - }, - "snippet": { - {`\}`, Punctuation, Pop(1)}, - // Matcher definition - {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, - // Any directive - {`[^\s#]+`, Keyword, Push("directive")}, - Include("base"), - }, - "label": { - // Allow multiple labels, comma separated, newlines after - // a comma means another label is coming - {`,\s*\n?`, Text, nil}, - {` `, Text, nil}, - // Site label with placeholder - {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, - // Site label - {`[^#{(\s,]+`, GenericHeading, nil}, - // Comment after non-block label (hack because comments end in \n) - {`#.*\n`, CommentSingle, Push("site_block")}, - // Note: if \n, we'll never pop out of the site_block, it's valid - {`\{(?=\s)|\n`, Punctuation, Push("site_block")}, - }, - "site_block": { - {`\}`, Punctuation, Pop(2)}, - Include("site_block_common"), - }, - }.Merge(caddyfileCommonRules()) -} - -// Caddyfile directive-only lexer. -var CaddyfileDirectives = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Caddyfile Directives", - Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, - Filenames: []string{}, - MimeTypes: []string{}, - }, - caddyfileDirectivesRules, -)) - -func caddyfileDirectivesRules() Rules { - return Rules{ - // Same as "site_block" in Caddyfile - "root": { - Include("site_block_common"), - }, - }.Merge(caddyfileCommonRules()) -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go b/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go deleted file mode 100644 index ec0d892c0fa5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go +++ /dev/null @@ -1,65 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Cap'N'Proto Proto lexer. -var CapNProto = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Cap'n Proto", - Aliases: []string{"capnp"}, - Filenames: []string{"*.capnp"}, - MimeTypes: []string{}, - }, - capNProtoRules, -)) - -func capNProtoRules() Rules { - return Rules{ - "root": { - {`#.*?$`, CommentSingle, nil}, - {`@[0-9a-zA-Z]*`, NameDecorator, nil}, - {`=`, Literal, Push("expression")}, - {`:`, NameClass, Push("type")}, - {`\$`, NameAttribute, Push("annotation")}, - {`(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b`, Keyword, nil}, - {`[\w.]+`, Name, nil}, - {`[^#@=:$\w]+`, Text, nil}, - }, - "type": { - {`[^][=;,(){}$]+`, NameClass, nil}, - {`[[(]`, NameClass, Push("parentype")}, - Default(Pop(1)), - }, - "parentype": { - {`[^][;()]+`, NameClass, nil}, - {`[[(]`, NameClass, Push()}, - {`[])]`, NameClass, Pop(1)}, - Default(Pop(1)), - }, - "expression": { - {`[^][;,(){}$]+`, Literal, nil}, - {`[[(]`, Literal, Push("parenexp")}, - Default(Pop(1)), - }, - "parenexp": { - {`[^][;()]+`, Literal, nil}, - {`[[(]`, Literal, Push()}, - {`[])]`, Literal, Pop(1)}, - Default(Pop(1)), - }, - "annotation": { - {`[^][;,(){}=:]+`, NameAttribute, nil}, - {`[[(]`, NameAttribute, Push("annexp")}, - Default(Pop(1)), - }, - "annexp": { - {`[^][;()]+`, NameAttribute, nil}, - {`[[(]`, NameAttribute, Push()}, - {`[])]`, NameAttribute, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go b/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go deleted file mode 100644 index 9d424d4b2818..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go +++ /dev/null @@ -1,67 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Ceylon lexer. -var Ceylon = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Ceylon", - Aliases: []string{"ceylon"}, - Filenames: []string{"*.ceylon"}, - MimeTypes: []string{"text/x-ceylon"}, - DotAll: true, - }, - ceylonRules, -)) - -func ceylonRules() Rules { - return Rules{ - "root": { - {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, - {`[^\S\n]+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("comment")}, - {`(shared|abstract|formal|default|actual|variable|deprecated|small|late|literal|doc|by|see|throws|optional|license|tagged|final|native|annotation|sealed)\b`, NameDecorator, nil}, - {`(break|case|catch|continue|else|finally|for|in|if|return|switch|this|throw|try|while|is|exists|dynamic|nonempty|then|outer|assert|let)\b`, Keyword, nil}, - {`(abstracts|extends|satisfies|super|given|of|out|assign)\b`, KeywordDeclaration, nil}, - {`(function|value|void|new)\b`, KeywordType, nil}, - {`(assembly|module|package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(class|interface|object|alias)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'`, LiteralStringChar, nil}, - {"\".*``.*``.*\"", LiteralStringInterpol, nil}, - {`(\.)([a-z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, - {`[a-zA-Z_]\w*:`, NameLabel, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, - {`\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, - {`\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, - {`[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, - {`#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+`, LiteralNumberHex, nil}, - {`#[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`\$([01]{4})(_[01]{4})+`, LiteralNumberBin, nil}, - {`\$[01]+`, LiteralNumberBin, nil}, - {`\d{1,3}(_\d{3})+[kMGTP]?`, LiteralNumberInteger, nil}, - {`[0-9]+[kMGTP]?`, LiteralNumberInteger, nil}, - {`\n`, Text, nil}, - }, - "class": { - {`[A-Za-z_]\w*`, NameClass, Pop(1)}, - }, - "import": { - {`[a-z][\w.]*`, NameNamespace, Pop(1)}, - }, - "comment": { - {`[^*/]`, CommentMultiline, nil}, - {`/\*`, CommentMultiline, Push()}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[*/]`, CommentMultiline, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go b/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go deleted file mode 100644 index f3050346f9e0..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go +++ /dev/null @@ -1,60 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Cfengine3 lexer. -var Cfengine3 = internal.Register(MustNewLazyLexer( - &Config{ - Name: "CFEngine3", - Aliases: []string{"cfengine3", "cf3"}, - Filenames: []string{"*.cf"}, - MimeTypes: []string{}, - }, - cfengine3Rules, -)) - -func cfengine3Rules() Rules { - return Rules{ - "root": { - {`#.*?\n`, Comment, nil}, - {`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, - {`(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()`, ByGroups(Keyword, Text, Keyword, Text, NameFunction, Punctuation), Push("arglist")}, - {`(body|bundle)(\s+)(\S+)(\s+)(\w+)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, - {`(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)`, ByGroups(Punctuation, NameVariable, Punctuation, Text, KeywordType, Text, Operator, Text), nil}, - {`(\S+)(\s*)(=>)(\s*)`, ByGroups(KeywordReserved, Text, Operator, Text), nil}, - {`"`, LiteralString, Push("string")}, - {`(\w+)(\()`, ByGroups(NameFunction, Punctuation), nil}, - {`([\w.!&|()]+)(::)`, ByGroups(NameClass, Punctuation), nil}, - {`(\w+)(:)`, ByGroups(KeywordDeclaration, Punctuation), nil}, - {`@[{(][^)}]+[})]`, NameVariable, nil}, - {`[(){},;]`, Punctuation, nil}, - {`=>`, Operator, nil}, - {`->`, Operator, nil}, - {`\d+\.\d+`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`\w+`, NameFunction, nil}, - {`\s+`, Text, nil}, - }, - "string": { - {`\$[{(]`, LiteralStringInterpol, Push("interpol")}, - {`\\.`, LiteralStringEscape, nil}, - {`"`, LiteralString, Pop(1)}, - {`\n`, LiteralString, nil}, - {`.`, LiteralString, nil}, - }, - "interpol": { - {`\$[{(]`, LiteralStringInterpol, Push()}, - {`[})]`, LiteralStringInterpol, Pop(1)}, - {`[^${()}]+`, LiteralStringInterpol, nil}, - }, - "arglist": { - {`\)`, Punctuation, Pop(1)}, - {`,`, Punctuation, nil}, - {`\w+`, NameVariable, nil}, - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go b/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go deleted file mode 100644 index 58db9aac9fd6..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go +++ /dev/null @@ -1,67 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Chaiscript lexer. -var Chaiscript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "ChaiScript", - Aliases: []string{"chai", "chaiscript"}, - Filenames: []string{"*.chai"}, - MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, - DotAll: true, - }, - chaiscriptRules, -)) - -func chaiscriptRules() Rules { - return Rules{ - "commentsandwhitespace": { - {`\s+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`^\#.*?\n`, CommentSingle, nil}, - }, - "slashstartsregex": { - Include("commentsandwhitespace"), - {`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, - {`(?=/)`, Text, Push("#pop", "badregex")}, - Default(Pop(1)), - }, - "badregex": { - {`\n`, Text, Pop(1)}, - }, - "root": { - Include("commentsandwhitespace"), - {`\n`, Text, nil}, - {`[^\S\n]+`, Text, nil}, - {`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, - {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, - {`[})\].]`, Punctuation, nil}, - {`[=+\-*/]`, Operator, nil}, - {`(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b`, Keyword, Push("slashstartsregex")}, - {`(var)\b`, KeywordDeclaration, Push("slashstartsregex")}, - {`(attr|def|fun)\b`, KeywordReserved, nil}, - {`(true|false)\b`, KeywordConstant, nil}, - {`(eval|throw)\b`, NameBuiltin, nil}, - {"`\\S+`", NameBuiltin, nil}, - {`[$a-zA-Z_]\w*`, NameOther, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`"`, LiteralStringDouble, Push("dqstring")}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - }, - "dqstring": { - {`\$\{[^"}]+?\}`, LiteralStringInterpol, nil}, - {`\$`, LiteralStringDouble, nil}, - {`\\\\`, LiteralStringDouble, nil}, - {`\\"`, LiteralStringDouble, nil}, - {`[^\\"$]+`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go b/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go deleted file mode 100644 index bd5fb9d9cc75..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go +++ /dev/null @@ -1,41 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" - . "github.com/alecthomas/chroma/lexers/p" // nolint -) - -// Cheetah lexer. -var Cheetah = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Cheetah", - Aliases: []string{"cheetah", "spitfire"}, - Filenames: []string{"*.tmpl", "*.spt"}, - MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, - }, - cheetahRules, -)) - -func cheetahRules() Rules { - return Rules{ - "root": { - {`(##[^\n]*)$`, ByGroups(Comment), nil}, - {`#[*](.|\n)*?[*]#`, Comment, nil}, - {`#end[^#\n]*(?:#|$)`, CommentPreproc, nil}, - {`#slurp$`, CommentPreproc, nil}, - {`(#[a-zA-Z]+)([^#\n]*)(#|$)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, - {`(\$)([a-zA-Z_][\w.]*\w)`, ByGroups(CommentPreproc, Using(Python)), nil}, - {`(\$\{!?)(.*?)(\})(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, - {`(?sx) - (.+?) # anything, followed by: - (?: - (?=\#[#a-zA-Z]*) | # an eval comment - (?=\$[a-zA-Z_{]) | # a substitution - \Z # end of string - ) - `, Other, nil}, - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go b/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go deleted file mode 100644 index f99f906c208e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go +++ /dev/null @@ -1,42 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Clojure lexer. -var Clojure = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Clojure", - Aliases: []string{"clojure", "clj"}, - Filenames: []string{"*.clj"}, - MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, - }, - clojureRules, -)) - -func clojureRules() Rules { - return Rules{ - "root": { - {`;.*$`, CommentSingle, nil}, - {`[,\s]+`, Text, nil}, - {`-?\d+\.\d+`, LiteralNumberFloat, nil}, - {`-?\d+`, LiteralNumberInteger, nil}, - {`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {`\\(.|[a-z]+)`, LiteralStringChar, nil}, - {`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {"~@|[`\\'#^~&@]", Operator, nil}, - {Words(``, ` `, `.`, `def`, `do`, `fn`, `if`, `let`, `new`, `quote`, `var`, `loop`), Keyword, nil}, - {Words(``, ` `, `def-`, `defn`, `defn-`, `defmacro`, `defmulti`, `defmethod`, `defstruct`, `defonce`, `declare`, `definline`, `definterface`, `defprotocol`, `defrecord`, `deftype`, `defproject`, `ns`), KeywordDeclaration, nil}, - {Words(``, ` `, `*`, `+`, `-`, `->`, `/`, `<`, `<=`, `=`, `==`, `>`, `>=`, `..`, `accessor`, `agent`, `agent-errors`, `aget`, `alength`, `all-ns`, `alter`, `and`, `append-child`, `apply`, `array-map`, `aset`, `aset-boolean`, `aset-byte`, `aset-char`, `aset-double`, `aset-float`, `aset-int`, `aset-long`, `aset-short`, `assert`, `assoc`, `await`, `await-for`, `bean`, `binding`, `bit-and`, `bit-not`, `bit-or`, `bit-shift-left`, `bit-shift-right`, `bit-xor`, `boolean`, `branch?`, `butlast`, `byte`, `cast`, `char`, `children`, `class`, `clear-agent-errors`, `comment`, `commute`, `comp`, `comparator`, `complement`, `concat`, `conj`, `cons`, `constantly`, `cond`, `if-not`, `construct-proxy`, `contains?`, `count`, `create-ns`, `create-struct`, `cycle`, `dec`, `deref`, `difference`, `disj`, `dissoc`, `distinct`, `doall`, `doc`, `dorun`, `doseq`, `dosync`, `dotimes`, `doto`, `double`, `down`, `drop`, `drop-while`, `edit`, `end?`, `ensure`, `eval`, `every?`, `false?`, `ffirst`, `file-seq`, `filter`, `find`, `find-doc`, `find-ns`, `find-var`, `first`, `float`, `flush`, `for`, `fnseq`, `frest`, `gensym`, `get-proxy-class`, `get`, `hash-map`, `hash-set`, `identical?`, `identity`, `if-let`, `import`, `in-ns`, `inc`, `index`, `insert-child`, `insert-left`, `insert-right`, `inspect-table`, `inspect-tree`, `instance?`, `int`, `interleave`, `intersection`, `into`, `into-array`, `iterate`, `join`, `key`, `keys`, `keyword`, `keyword?`, `last`, `lazy-cat`, `lazy-cons`, `left`, `lefts`, `line-seq`, `list*`, `list`, `load`, `load-file`, `locking`, `long`, `loop`, `macroexpand`, `macroexpand-1`, `make-array`, `make-node`, `map`, `map-invert`, `map?`, `mapcat`, `max`, `max-key`, `memfn`, `merge`, `merge-with`, `meta`, `min`, `min-key`, `name`, `namespace`, `neg?`, `new`, `newline`, `next`, `nil?`, `node`, `not`, `not-any?`, `not-every?`, `not=`, `ns-imports`, `ns-interns`, `ns-map`, `ns-name`, `ns-publics`, `ns-refers`, `ns-resolve`, `ns-unmap`, `nth`, `nthrest`, `or`, `parse`, `partial`, `path`, `peek`, `pop`, `pos?`, `pr`, `pr-str`, `print`, `print-str`, `println`, `println-str`, `prn`, `prn-str`, `project`, `proxy`, `proxy-mappings`, `quot`, `rand`, `rand-int`, `range`, `re-find`, `re-groups`, `re-matcher`, `re-matches`, `re-pattern`, `re-seq`, `read`, `read-line`, `reduce`, `ref`, `ref-set`, `refer`, `rem`, `remove`, `remove-method`, `remove-ns`, `rename`, `rename-keys`, `repeat`, `replace`, `replicate`, `resolve`, `rest`, `resultset-seq`, `reverse`, `rfirst`, `right`, `rights`, `root`, `rrest`, `rseq`, `second`, `select`, `select-keys`, `send`, `send-off`, `seq`, `seq-zip`, `seq?`, `set`, `short`, `slurp`, `some`, `sort`, `sort-by`, `sorted-map`, `sorted-map-by`, `sorted-set`, `special-symbol?`, `split-at`, `split-with`, `str`, `string?`, `struct`, `struct-map`, `subs`, `subvec`, `symbol`, `symbol?`, `sync`, `take`, `take-nth`, `take-while`, `test`, `time`, `to-array`, `to-array-2d`, `tree-seq`, `true?`, `union`, `up`, `update-proxy`, `val`, `vals`, `var-get`, `var-set`, `var?`, `vector`, `vector-zip`, `vector?`, `when`, `when-first`, `when-let`, `when-not`, `with-local-vars`, `with-meta`, `with-open`, `with-out-str`, `xml-seq`, `xml-zip`, `zero?`, `zipmap`, `zipper`), NameBuiltin, nil}, - {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, - {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, - {`(\[|\])`, Punctuation, nil}, - {`(\{|\})`, Punctuation, nil}, - {`(\(|\))`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go b/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go deleted file mode 100644 index 0e0708db49f8..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go +++ /dev/null @@ -1,48 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Cmake lexer. -var Cmake = internal.Register(MustNewLazyLexer( - &Config{ - Name: "CMake", - Aliases: []string{"cmake"}, - Filenames: []string{"*.cmake", "CMakeLists.txt"}, - MimeTypes: []string{"text/x-cmake"}, - }, - cmakeRules, -)) - -func cmakeRules() Rules { - return Rules{ - "root": { - {`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, - Include("keywords"), - Include("ws"), - }, - "args": { - {`\(`, Punctuation, Push()}, - {`\)`, Punctuation, Pop(1)}, - {`(\$\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, - {`(\$ENV\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, - {`(\$<)(.+?)(>)`, ByGroups(Operator, NameVariable, Operator), nil}, - {`(?s)".*?"`, LiteralStringDouble, nil}, - {`\\\S+`, LiteralString, nil}, - {`[^)$"# \t\n]+`, LiteralString, nil}, - {`\n`, Text, nil}, - Include("keywords"), - Include("ws"), - }, - "string": {}, - "keywords": { - {`\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b`, Keyword, nil}, - }, - "ws": { - {`[ \t]+`, Text, nil}, - {`#.*\n`, Comment, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go b/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go deleted file mode 100644 index 8b2f6d9bc781..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go +++ /dev/null @@ -1,55 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Cobol lexer. -var Cobol = internal.Register(MustNewLazyLexer( - &Config{ - Name: "COBOL", - Aliases: []string{"cobol"}, - Filenames: []string{"*.cob", "*.COB", "*.cpy", "*.CPY"}, - MimeTypes: []string{"text/x-cobol"}, - CaseInsensitive: true, - }, - cobolRules, -)) - -func cobolRules() Rules { - return Rules{ - "root": { - Include("comment"), - Include("strings"), - Include("core"), - Include("nums"), - {`[a-z0-9]([\w\-]*[a-z0-9]+)?`, NameVariable, nil}, - {`[ \t]+`, Text, nil}, - }, - "comment": { - {`(^.{6}[*/].*\n|^.{6}|\*>.*\n)`, Comment, nil}, - }, - "core": { - {`(^|(?<=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))`, NameConstant, nil}, - {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCEPT`, `ADD`, `ALLOCATE`, `CALL`, `CANCEL`, `CLOSE`, `COMPUTE`, `CONFIGURATION`, `CONTINUE`, `DATA`, `DELETE`, `DISPLAY`, `DIVIDE`, `DIVISION`, `ELSE`, `END`, `END-ACCEPT`, `END-ADD`, `END-CALL`, `END-COMPUTE`, `END-DELETE`, `END-DISPLAY`, `END-DIVIDE`, `END-EVALUATE`, `END-IF`, `END-MULTIPLY`, `END-OF-PAGE`, `END-PERFORM`, `END-READ`, `END-RETURN`, `END-REWRITE`, `END-SEARCH`, `END-START`, `END-STRING`, `END-SUBTRACT`, `END-UNSTRING`, `END-WRITE`, `ENVIRONMENT`, `EVALUATE`, `EXIT`, `FD`, `FILE`, `FILE-CONTROL`, `FOREVER`, `FREE`, `GENERATE`, `GO`, `GOBACK`, `IDENTIFICATION`, `IF`, `INITIALIZE`, `INITIATE`, `INPUT-OUTPUT`, `INSPECT`, `INVOKE`, `I-O-CONTROL`, `LINKAGE`, `LOCAL-STORAGE`, `MERGE`, `MOVE`, `MULTIPLY`, `OPEN`, `PERFORM`, `PROCEDURE`, `PROGRAM-ID`, `RAISE`, `READ`, `RELEASE`, `RESUME`, `RETURN`, `REWRITE`, `SCREEN`, `SD`, `SEARCH`, `SECTION`, `SET`, `SORT`, `START`, `STOP`, `STRING`, `SUBTRACT`, `SUPPRESS`, `TERMINATE`, `THEN`, `UNLOCK`, `UNSTRING`, `USE`, `VALIDATE`, `WORKING-STORAGE`, `WRITE`), KeywordReserved, nil}, - {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCESS`, `ADDRESS`, `ADVANCING`, `AFTER`, `ALL`, `ALPHABET`, `ALPHABETIC`, `ALPHABETIC-LOWER`, `ALPHABETIC-UPPER`, `ALPHANUMERIC`, `ALPHANUMERIC-EDITED`, `ALSO`, `ALTER`, `ALTERNATEANY`, `ARE`, `AREA`, `AREAS`, `ARGUMENT-NUMBER`, `ARGUMENT-VALUE`, `AS`, `ASCENDING`, `ASSIGN`, `AT`, `AUTO`, `AUTO-SKIP`, `AUTOMATIC`, `AUTOTERMINATE`, `BACKGROUND-COLOR`, `BASED`, `BEEP`, `BEFORE`, `BELL`, `BLANK`, `BLINK`, `BLOCK`, `BOTTOM`, `BY`, `BYTE-LENGTH`, `CHAINING`, `CHARACTER`, `CHARACTERS`, `CLASS`, `CODE`, `CODE-SET`, `COL`, `COLLATING`, `COLS`, `COLUMN`, `COLUMNS`, `COMMA`, `COMMAND-LINE`, `COMMIT`, `COMMON`, `CONSTANT`, `CONTAINS`, `CONTENT`, `CONTROL`, `CONTROLS`, `CONVERTING`, `COPY`, `CORR`, `CORRESPONDING`, `COUNT`, `CRT`, `CURRENCY`, `CURSOR`, `CYCLE`, `DATE`, `DAY`, `DAY-OF-WEEK`, `DE`, `DEBUGGING`, `DECIMAL-POINT`, `DECLARATIVES`, `DEFAULT`, `DELIMITED`, `DELIMITER`, `DEPENDING`, `DESCENDING`, `DETAIL`, `DISK`, `DOWN`, `DUPLICATES`, `DYNAMIC`, `EBCDIC`, `ENTRY`, `ENVIRONMENT-NAME`, `ENVIRONMENT-VALUE`, `EOL`, `EOP`, `EOS`, `ERASE`, `ERROR`, `ESCAPE`, `EXCEPTION`, `EXCLUSIVE`, `EXTEND`, `EXTERNAL`, `FILE-ID`, `FILLER`, `FINAL`, `FIRST`, `FIXED`, `FLOAT-LONG`, `FLOAT-SHORT`, `FOOTING`, `FOR`, `FOREGROUND-COLOR`, `FORMAT`, `FROM`, `FULL`, `FUNCTION`, `FUNCTION-ID`, `GIVING`, `GLOBAL`, `GROUP`, `HEADING`, `HIGHLIGHT`, `I-O`, `ID`, `IGNORE`, `IGNORING`, `IN`, `INDEX`, `INDEXED`, `INDICATE`, `INITIAL`, `INITIALIZED`, `INPUT`, `INTO`, `INTRINSIC`, `INVALID`, `IS`, `JUST`, `JUSTIFIED`, `KEY`, `LABEL`, `LAST`, `LEADING`, `LEFT`, `LENGTH`, `LIMIT`, `LIMITS`, `LINAGE`, `LINAGE-COUNTER`, `LINE`, `LINES`, `LOCALE`, `LOCK`, `LOWLIGHT`, `MANUAL`, `MEMORY`, `MINUS`, `MODE`, `MULTIPLE`, `NATIONAL`, `NATIONAL-EDITED`, `NATIVE`, `NEGATIVE`, `NEXT`, `NO`, `NULL`, `NULLS`, `NUMBER`, `NUMBERS`, `NUMERIC`, `NUMERIC-EDITED`, `OBJECT-COMPUTER`, `OCCURS`, `OF`, `OFF`, `OMITTED`, `ON`, `ONLY`, `OPTIONAL`, `ORDER`, `ORGANIZATION`, `OTHER`, `OUTPUT`, `OVERFLOW`, `OVERLINE`, `PACKED-DECIMAL`, `PADDING`, `PAGE`, `PARAGRAPH`, `PLUS`, `POINTER`, `POSITION`, `POSITIVE`, `PRESENT`, `PREVIOUS`, `PRINTER`, `PRINTING`, `PROCEDURE-POINTER`, `PROCEDURES`, `PROCEED`, `PROGRAM`, `PROGRAM-POINTER`, `PROMPT`, `QUOTE`, `QUOTES`, `RANDOM`, `RD`, `RECORD`, `RECORDING`, `RECORDS`, `RECURSIVE`, `REDEFINES`, `REEL`, `REFERENCE`, `RELATIVE`, `REMAINDER`, `REMOVAL`, `RENAMES`, `REPLACING`, `REPORT`, `REPORTING`, `REPORTS`, `REPOSITORY`, `REQUIRED`, `RESERVE`, `RETURNING`, `REVERSE-VIDEO`, `REWIND`, `RIGHT`, `ROLLBACK`, `ROUNDED`, `RUN`, `SAME`, `SCROLL`, `SECURE`, `SEGMENT-LIMIT`, `SELECT`, `SENTENCE`, `SEPARATE`, `SEQUENCE`, `SEQUENTIAL`, `SHARING`, `SIGN`, `SIGNED`, `SIGNED-INT`, `SIGNED-LONG`, `SIGNED-SHORT`, `SIZE`, `SORT-MERGE`, `SOURCE`, `SOURCE-COMPUTER`, `SPECIAL-NAMES`, `STANDARD`, `STANDARD-1`, `STANDARD-2`, `STATUS`, `SUM`, `SYMBOLIC`, `SYNC`, `SYNCHRONIZED`, `TALLYING`, `TAPE`, `TEST`, `THROUGH`, `THRU`, `TIME`, `TIMES`, `TO`, `TOP`, `TRAILING`, `TRANSFORM`, `TYPE`, `UNDERLINE`, `UNIT`, `UNSIGNED`, `UNSIGNED-INT`, `UNSIGNED-LONG`, `UNSIGNED-SHORT`, `UNTIL`, `UP`, `UPDATE`, `UPON`, `USAGE`, `USING`, `VALUE`, `VALUES`, `VARYING`, `WAIT`, `WHEN`, `WITH`, `WORDS`, `YYYYDDD`, `YYYYMMDD`), KeywordPseudo, nil}, - {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACTIVE-CLASS`, `ALIGNED`, `ANYCASE`, `ARITHMETIC`, `ATTRIBUTE`, `B-AND`, `B-NOT`, `B-OR`, `B-XOR`, `BIT`, `BOOLEAN`, `CD`, `CENTER`, `CF`, `CH`, `CHAIN`, `CLASS-ID`, `CLASSIFICATION`, `COMMUNICATION`, `CONDITION`, `DATA-POINTER`, `DESTINATION`, `DISABLE`, `EC`, `EGI`, `EMI`, `ENABLE`, `END-RECEIVE`, `ENTRY-CONVENTION`, `EO`, `ESI`, `EXCEPTION-OBJECT`, `EXPANDS`, `FACTORY`, `FLOAT-BINARY-16`, `FLOAT-BINARY-34`, `FLOAT-BINARY-7`, `FLOAT-DECIMAL-16`, `FLOAT-DECIMAL-34`, `FLOAT-EXTENDED`, `FORMAT`, `FUNCTION-POINTER`, `GET`, `GROUP-USAGE`, `IMPLEMENTS`, `INFINITY`, `INHERITS`, `INTERFACE`, `INTERFACE-ID`, `INVOKE`, `LC_ALL`, `LC_COLLATE`, `LC_CTYPE`, `LC_MESSAGES`, `LC_MONETARY`, `LC_NUMERIC`, `LC_TIME`, `LINE-COUNTER`, `MESSAGE`, `METHOD`, `METHOD-ID`, `NESTED`, `NONE`, `NORMAL`, `OBJECT`, `OBJECT-REFERENCE`, `OPTIONS`, `OVERRIDE`, `PAGE-COUNTER`, `PF`, `PH`, `PROPERTY`, `PROTOTYPE`, `PURGE`, `QUEUE`, `RAISE`, `RAISING`, `RECEIVE`, `RELATION`, `REPLACE`, `REPRESENTS-NOT-A-NUMBER`, `RESET`, `RESUME`, `RETRY`, `RF`, `RH`, `SECONDS`, `SEGMENT`, `SELF`, `SEND`, `SOURCES`, `STATEMENT`, `STEP`, `STRONG`, `SUB-QUEUE-1`, `SUB-QUEUE-2`, `SUB-QUEUE-3`, `SUPER`, `SYMBOL`, `SYSTEM-DEFAULT`, `TABLE`, `TERMINAL`, `TEXT`, `TYPEDEF`, `UCS-4`, `UNIVERSAL`, `USER-DEFAULT`, `UTF-16`, `UTF-8`, `VAL-STATUS`, `VALID`, `VALIDATE`, `VALIDATE-STATUS`), Error, nil}, - {`(^|(?<=[^\w\-]))(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|BINARY-C-LONG|BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|BINARY)\s*($|(?=[^\w\-]))`, KeywordType, nil}, - {`(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)`, Operator, nil}, - {`([(),;:&%.])`, Punctuation, nil}, - {`(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*($|(?=[^\w\-]))`, NameFunction, nil}, - {`(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))`, NameBuiltin, nil}, - {`(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|greater|less|than|not|and|or)\s*($|(?=[^\w\-]))`, OperatorWord, nil}, - }, - "strings": { - {`"[^"\n]*("|\n)`, LiteralStringDouble, nil}, - {`'[^'\n]*('|\n)`, LiteralStringSingle, nil}, - }, - "nums": { - {`\d+(\s*|\.$|$)`, LiteralNumberInteger, nil}, - {`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, - {`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go b/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go deleted file mode 100644 index 381a8fed86df..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go +++ /dev/null @@ -1,95 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Coffeescript lexer. -var Coffeescript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "CoffeeScript", - Aliases: []string{"coffee-script", "coffeescript", "coffee"}, - Filenames: []string{"*.coffee"}, - MimeTypes: []string{"text/coffeescript"}, - NotMultiline: true, - DotAll: true, - }, - coffeescriptRules, -)) - -func coffeescriptRules() Rules { - return Rules{ - "commentsandwhitespace": { - {`\s+`, Text, nil}, - {`###[^#].*?###`, CommentMultiline, nil}, - {`#(?!##[^#]).*?\n`, CommentSingle, nil}, - }, - "multilineregex": { - {`[^/#]+`, LiteralStringRegex, nil}, - {`///([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, - {`#\{`, LiteralStringInterpol, Push("interpoling_string")}, - {`[/#]`, LiteralStringRegex, nil}, - }, - "slashstartsregex": { - Include("commentsandwhitespace"), - {`///`, LiteralStringRegex, Push("#pop", "multilineregex")}, - {`/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, - {`/`, Operator, nil}, - Default(Pop(1)), - }, - "root": { - Include("commentsandwhitespace"), - {`^(?=\s|/)`, Text, Push("slashstartsregex")}, - {"\\+\\+|~|&&|\\band\\b|\\bor\\b|\\bis\\b|\\bisnt\\b|\\bnot\\b|\\?|:|\\|\\||\\\\(?=\\n)|(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\\|\\^/])=?", Operator, Push("slashstartsregex")}, - {`(?:\([^()]*\))?\s*[=-]>`, NameFunction, Push("slashstartsregex")}, - {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, - {`[})\].]`, Punctuation, nil}, - {`(?=|<|>|==`, Operator, nil}, - {`mod\b`, Operator, nil}, - {`(eq|lt|gt|lte|gte|not|is|and|or)\b`, Operator, nil}, - {`\|\||&&`, Operator, nil}, - {`\?`, Operator, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`'.*?'`, LiteralStringSingle, nil}, - {`\d+`, LiteralNumber, nil}, - {`(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b`, Keyword, nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(application|session|client|cookie|super|this|variables|arguments)\b`, NameConstant, nil}, - {`([a-z_$][\w.]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, - {`[a-z_$][\w.]*`, NameVariable, nil}, - {`[()\[\]{};:,.\\]`, Punctuation, nil}, - {`\s+`, Text, nil}, - }, - "string": { - {`""`, LiteralStringDouble, nil}, - {`#.+?#`, LiteralStringInterpol, nil}, - {`[^"#]+`, LiteralStringDouble, nil}, - {`#`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coq.go b/vendor/github.com/alecthomas/chroma/lexers/c/coq.go deleted file mode 100644 index e0103ef66bcb..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/coq.go +++ /dev/null @@ -1,67 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Coq lexer. -var Coq = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Coq", - Aliases: []string{"coq"}, - Filenames: []string{"*.v"}, - MimeTypes: []string{"text/x-coq"}, - }, - coqRules, -)) - -func coqRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, - {`\(\*`, Comment, Push("comment")}, - {Words(`\b`, `\b`, `Section`, `Module`, `End`, `Require`, `Import`, `Export`, `Variable`, `Variables`, `Parameter`, `Parameters`, `Axiom`, `Hypothesis`, `Hypotheses`, `Notation`, `Local`, `Tactic`, `Reserved`, `Scope`, `Open`, `Close`, `Bind`, `Delimit`, `Definition`, `Let`, `Ltac`, `Fixpoint`, `CoFixpoint`, `Morphism`, `Relation`, `Implicit`, `Arguments`, `Set`, `Unset`, `Contextual`, `Strict`, `Prenex`, `Implicits`, `Inductive`, `CoInductive`, `Record`, `Structure`, `Canonical`, `Coercion`, `Theorem`, `Lemma`, `Corollary`, `Proposition`, `Fact`, `Remark`, `Example`, `Proof`, `Goal`, `Save`, `Qed`, `Defined`, `Hint`, `Resolve`, `Rewrite`, `View`, `Search`, `Show`, `Print`, `Printing`, `All`, `Graph`, `Projections`, `inside`, `outside`, `Check`, `Global`, `Instance`, `Class`, `Existing`, `Universe`, `Polymorphic`, `Monomorphic`, `Context`), KeywordNamespace, nil}, - {Words(`\b`, `\b`, `forall`, `exists`, `exists2`, `fun`, `fix`, `cofix`, `struct`, `match`, `end`, `in`, `return`, `let`, `if`, `is`, `then`, `else`, `for`, `of`, `nosimpl`, `with`, `as`), Keyword, nil}, - {Words(`\b`, `\b`, `Type`, `Prop`), KeywordType, nil}, - {Words(`\b`, `\b`, `pose`, `set`, `move`, `case`, `elim`, `apply`, `clear`, `hnf`, `intro`, `intros`, `generalize`, `rename`, `pattern`, `after`, `destruct`, `induction`, `using`, `refine`, `inversion`, `injection`, `rewrite`, `congr`, `unlock`, `compute`, `ring`, `field`, `replace`, `fold`, `unfold`, `change`, `cutrewrite`, `simpl`, `have`, `suff`, `wlog`, `suffices`, `without`, `loss`, `nat_norm`, `assert`, `cut`, `trivial`, `revert`, `bool_congr`, `nat_congr`, `symmetry`, `transitivity`, `auto`, `split`, `left`, `right`, `autorewrite`, `tauto`, `setoid_rewrite`, `intuition`, `eauto`, `eapply`, `econstructor`, `etransitivity`, `constructor`, `erewrite`, `red`, `cbv`, `lazy`, `vm_compute`, `native_compute`, `subst`), Keyword, nil}, - {Words(`\b`, `\b`, `by`, `done`, `exact`, `reflexivity`, `tauto`, `romega`, `omega`, `assumption`, `solve`, `contradiction`, `discriminate`, `congruence`), KeywordPseudo, nil}, - {Words(`\b`, `\b`, `do`, `last`, `first`, `try`, `idtac`, `repeat`), KeywordReserved, nil}, - {`\b([A-Z][\w\']*)`, Name, nil}, - {"(\u03bb|\u03a0|\\|\\}|\\{\\||\\\\/|/\\\\|=>|~|\\}|\\|]|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<->|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, - {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, - {`\b(unit|nat|bool|string|ascii|list)\b`, KeywordType, nil}, - {`[^\W\d][\w']*`, Name, nil}, - {`\d[\d_]*`, LiteralNumberInteger, nil}, - {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, - {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, - {`0[bB][01][01_]*`, LiteralNumberBin, nil}, - {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, - {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, - {`'.'`, LiteralStringChar, nil}, - {`'`, Keyword, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`[~?][a-z][\w\']*:`, Name, nil}, - }, - "comment": { - {`[^(*)]+`, Comment, nil}, - {`\(\*`, Comment, Push()}, - {`\*\)`, Comment, Pop(1)}, - {`[(*)]`, Comment, nil}, - }, - "string": { - {`[^"]+`, LiteralStringDouble, nil}, - {`""`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - "dotted": { - {`\s+`, Text, nil}, - {`\.`, Punctuation, nil}, - {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, - {`[A-Z][\w\']*`, NameClass, Pop(1)}, - {`[a-z][a-z0-9_\']*`, Name, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go b/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go deleted file mode 100644 index 3f4a1fde05cd..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go +++ /dev/null @@ -1,110 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// CPP lexer. -var CPP = internal.Register(MustNewLazyLexer( - &Config{ - Name: "C++", - Aliases: []string{"cpp", "c++"}, - Filenames: []string{"*.cpp", "*.hpp", "*.c++", "*.h++", "*.cc", "*.hh", "*.cxx", "*.hxx", "*.C", "*.H", "*.cp", "*.CPP"}, - MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"}, - EnsureNL: true, - }, - cppRules, -)) - -func cppRules() Rules { - return Rules{ - "statements": { - {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil}, - {`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")}, - {`(class|struct|enum|union)\b(\s*)`, ByGroups(Keyword, Text), Push("classname")}, - {`\[\[.+\]\]`, NameAttribute, nil}, - {`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, - {`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*`, LiteralNumberHex, nil}, - {`0('?[0-7]+)+[LlUu]*`, LiteralNumberOct, nil}, - {`0[Bb][01]('?[01]+)*[LlUu]*`, LiteralNumberBin, nil}, - {`[0-9]('?[0-9]+)*[LlUu]*`, LiteralNumberInteger, nil}, - {`\*/`, Error, nil}, - {`[~!%^&*+=|?:<>/-]`, Operator, nil}, - {`[()\[\],.]`, Punctuation, nil}, - {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, - {`(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, - {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, - {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, - {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, - {`(true|false|NULL)\b`, NameBuiltin, nil}, - {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "root": { - Include("whitespace"), - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, - Default(Push("statement")), - {Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, - {`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, - }, - "classname": { - {`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil}, - {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, - {`\s*(?=[>{])`, Text, Pop(1)}, - }, - "whitespace": { - {`^#if\s+0`, CommentPreproc, Push("if0")}, - {`^#`, CommentPreproc, Push("macro")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - }, - "statement": { - Include("whitespace"), - Include("statements"), - {`[{]`, Punctuation, Push("root")}, - {`[;}]`, Punctuation, Pop(1)}, - }, - "function": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - }, - "macro": { - {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, - {`[^/\n]+`, CommentPreproc, nil}, - {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, - {`//.*?\n`, CommentSingle, Pop(1)}, - {`/`, CommentPreproc, nil}, - {`(?<=\\)\n`, CommentPreproc, nil}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "if0": { - {`^\s*#if.*?(?=~!@#%^&|`?-]+", Operator, nil}, - { - `(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`, - UsingByGroup( - internal.Get, - 1, 6, - NameBuiltin, TextWhitespace, Keyword, TextWhitespace, - LiteralStringHeredoc, LiteralStringHeredoc, LiteralStringHeredoc, - ), - nil, - }, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`0x[0-9a-f]+`, LiteralNumberHex, nil}, - {`[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, LiteralNumberHex, nil}, - {`\.[0-9]+(e[+-]?[0-9]+)?`, Error, nil}, - {`-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`'`, LiteralStringSingle, Push("string")}, - {`"`, LiteralStringName, Push("quoted-ident")}, - {`\$\$`, LiteralStringHeredoc, Push("dollar-string")}, - {`[a-z_]\w*`, Name, nil}, - {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, - {`[;:()\[\]\{\},.]`, Punctuation, nil}, - }, - "multiline-comments": { - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[^/*]+`, CommentMultiline, nil}, - {`[/*]`, CommentMultiline, nil}, - }, - "string": { - {`[^']+`, LiteralStringSingle, nil}, - {`''`, LiteralStringSingle, nil}, - {`'`, LiteralStringSingle, Pop(1)}, - }, - "quoted-ident": { - {`[^"]+`, LiteralStringName, nil}, - {`""`, LiteralStringName, nil}, - {`"`, LiteralStringName, Pop(1)}, - }, - "dollar-string": { - {`[^\$]+`, LiteralStringHeredoc, nil}, - {`\$\$`, LiteralStringHeredoc, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go b/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go deleted file mode 100644 index f06830d2950a..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go +++ /dev/null @@ -1,266 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Crystal lexer. -var Crystal = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Crystal", - Aliases: []string{"cr", "crystal"}, - Filenames: []string{"*.cr"}, - MimeTypes: []string{"text/x-crystal"}, - DotAll: true, - }, - crystalRules, -)) - -func crystalRules() Rules { - return Rules{ - "root": { - {`#.*?$`, CommentSingle, nil}, - {Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil}, - {Words(``, `\b`, `true`, `false`, `nil`), KeywordConstant, nil}, - {`(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, - {`(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("funcname")}, - {"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, - {`(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("classname")}, - {`(self|out|uninitialized)\b|(is_a|responds_to)\?`, KeywordPseudo, nil}, - {Words(``, `\b`, `debugger`, `record`, `pp`, `assert_responds_to`, `spawn`, `parallel`, `getter`, `setter`, `property`, `delegate`, `def_hash`, `def_equals`, `def_equals_and_hash`, `forward_missing_to`), NameBuiltinPseudo, nil}, - {`getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b`, NameBuiltinPseudo, nil}, - {Words(`(?~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, - {`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, - {`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, - {`(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, - {`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, - {`(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, - {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, - {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, - {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, - {`(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, - {`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, - {`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, - {`\$\w+`, NameVariableGlobal, nil}, - {"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, - {`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, - {`::`, Operator, nil}, - Include("strings"), - {`\?(\\[MC]-)*(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, - {`[A-Z][A-Z_]+\b`, NameConstant, nil}, - {`\{%`, LiteralStringInterpol, Push("in-macro-control")}, - {`\{\{`, LiteralStringInterpol, Push("in-macro-expr")}, - {`(@\[)(\s*)([A-Z]\w*)`, ByGroups(Operator, Text, NameDecorator), Push("in-attr")}, - {Words(`(\.|::)`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), ByGroups(Operator, NameOperator), nil}, - {"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, - {`[a-zA-Z_]\w*(?:[!?](?!=))?`, Name, nil}, - {`(\[|\]\??|\*\*|<=>?|>=|<>?|=~|===|!~|&&?|\|\||\.{1,3})`, Operator, nil}, - {`[-+/*%=<>&!^|~]=?`, Operator, nil}, - {`[(){};,/?:\\]`, Punctuation, nil}, - {`\s+`, Text, nil}, - }, - "funcname": { - {"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, - Default(Pop(1)), - }, - "classname": { - {`[A-Z_]\w*`, NameClass, nil}, - {`(\()(\s*)([A-Z_]\w*)(\s*)(\))`, ByGroups(Punctuation, Text, NameClass, Text, Punctuation), nil}, - Default(Pop(1)), - }, - "in-intp": { - {`\{`, LiteralStringInterpol, Push()}, - {`\}`, LiteralStringInterpol, Pop(1)}, - Include("root"), - }, - "string-intp": { - {`#\{`, LiteralStringInterpol, Push("in-intp")}, - }, - "string-escaped": { - {`\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, - }, - "string-intp-escaped": { - Include("string-intp"), - Include("string-escaped"), - }, - "interpolated-regex": { - Include("string-intp"), - {`[\\#]`, LiteralStringRegex, nil}, - {`[^\\#]+`, LiteralStringRegex, nil}, - }, - "interpolated-string": { - Include("string-intp"), - {`[\\#]`, LiteralStringOther, nil}, - {`[^\\#]+`, LiteralStringOther, nil}, - }, - "multiline-regex": { - Include("string-intp"), - {`\\\\`, LiteralStringRegex, nil}, - {`\\/`, LiteralStringRegex, nil}, - {`[\\#]`, LiteralStringRegex, nil}, - {`[^\\/#]+`, LiteralStringRegex, nil}, - {`/[imsx]*`, LiteralStringRegex, Pop(1)}, - }, - "end-part": { - {`.+`, CommentPreproc, Pop(1)}, - }, - "in-macro-control": { - {`\{%`, LiteralStringInterpol, Push()}, - {`%\}`, LiteralStringInterpol, Pop(1)}, - {`for\b|in\b`, Keyword, nil}, - Include("root"), - }, - "in-macro-expr": { - {`\{\{`, LiteralStringInterpol, Push()}, - {`\}\}`, LiteralStringInterpol, Pop(1)}, - Include("root"), - }, - "in-attr": { - {`\[`, Operator, Push()}, - {`\]`, Operator, Pop(1)}, - Include("root"), - }, - "strings": { - {`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, - {Words(`\:@{0,2}`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), LiteralStringSymbol, nil}, - {`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, - {`'(\\\\|\\'|[^']|\\[^'\\]+)'`, LiteralStringChar, nil}, - {`:"`, LiteralStringSymbol, Push("simple-sym")}, - {`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, - {`"`, LiteralStringDouble, Push("simple-string")}, - {"(?&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, - {`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, - {`(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, - }, - "simple-string": { - Include("string-intp-escaped"), - {`[^\\"#]+`, LiteralStringDouble, nil}, - {`[\\#]`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - "simple-sym": { - Include("string-escaped"), - {`[^\\"#]+`, LiteralStringSymbol, nil}, - {`[\\#]`, LiteralStringSymbol, nil}, - {`"`, LiteralStringSymbol, Pop(1)}, - }, - "simple-backtick": { - Include("string-intp-escaped"), - {"[^\\\\`#]+", LiteralStringBacktick, nil}, - {`[\\#]`, LiteralStringBacktick, nil}, - {"`", LiteralStringBacktick, Pop(1)}, - }, - "cb-intp-string": { - {`\\[\{]`, LiteralStringOther, nil}, - {`\{`, LiteralStringOther, Push()}, - {`\}`, LiteralStringOther, Pop(1)}, - Include("string-intp-escaped"), - {`[\\#{}]`, LiteralStringOther, nil}, - {`[^\\#{}]+`, LiteralStringOther, nil}, - }, - "cb-string": { - {`\\[\\{}]`, LiteralStringOther, nil}, - {`\{`, LiteralStringOther, Push()}, - {`\}`, LiteralStringOther, Pop(1)}, - {`[\\#{}]`, LiteralStringOther, nil}, - {`[^\\#{}]+`, LiteralStringOther, nil}, - }, - "cb-regex": { - {`\\[\\{}]`, LiteralStringRegex, nil}, - {`\{`, LiteralStringRegex, Push()}, - {`\}[imsx]*`, LiteralStringRegex, Pop(1)}, - Include("string-intp"), - {`[\\#{}]`, LiteralStringRegex, nil}, - {`[^\\#{}]+`, LiteralStringRegex, nil}, - }, - "sb-intp-string": { - {`\\[\[]`, LiteralStringOther, nil}, - {`\[`, LiteralStringOther, Push()}, - {`\]`, LiteralStringOther, Pop(1)}, - Include("string-intp-escaped"), - {`[\\#\[\]]`, LiteralStringOther, nil}, - {`[^\\#\[\]]+`, LiteralStringOther, nil}, - }, - "sb-string": { - {`\\[\\\[\]]`, LiteralStringOther, nil}, - {`\[`, LiteralStringOther, Push()}, - {`\]`, LiteralStringOther, Pop(1)}, - {`[\\#\[\]]`, LiteralStringOther, nil}, - {`[^\\#\[\]]+`, LiteralStringOther, nil}, - }, - "sb-regex": { - {`\\[\\\[\]]`, LiteralStringRegex, nil}, - {`\[`, LiteralStringRegex, Push()}, - {`\][imsx]*`, LiteralStringRegex, Pop(1)}, - Include("string-intp"), - {`[\\#\[\]]`, LiteralStringRegex, nil}, - {`[^\\#\[\]]+`, LiteralStringRegex, nil}, - }, - "pa-intp-string": { - {`\\[\(]`, LiteralStringOther, nil}, - {`\(`, LiteralStringOther, Push()}, - {`\)`, LiteralStringOther, Pop(1)}, - Include("string-intp-escaped"), - {`[\\#()]`, LiteralStringOther, nil}, - {`[^\\#()]+`, LiteralStringOther, nil}, - }, - "pa-string": { - {`\\[\\()]`, LiteralStringOther, nil}, - {`\(`, LiteralStringOther, Push()}, - {`\)`, LiteralStringOther, Pop(1)}, - {`[\\#()]`, LiteralStringOther, nil}, - {`[^\\#()]+`, LiteralStringOther, nil}, - }, - "pa-regex": { - {`\\[\\()]`, LiteralStringRegex, nil}, - {`\(`, LiteralStringRegex, Push()}, - {`\)[imsx]*`, LiteralStringRegex, Pop(1)}, - Include("string-intp"), - {`[\\#()]`, LiteralStringRegex, nil}, - {`[^\\#()]+`, LiteralStringRegex, nil}, - }, - "ab-intp-string": { - {`\\[<]`, LiteralStringOther, nil}, - {`<`, LiteralStringOther, Push()}, - {`>`, LiteralStringOther, Pop(1)}, - Include("string-intp-escaped"), - {`[\\#<>]`, LiteralStringOther, nil}, - {`[^\\#<>]+`, LiteralStringOther, nil}, - }, - "ab-string": { - {`\\[\\<>]`, LiteralStringOther, nil}, - {`<`, LiteralStringOther, Push()}, - {`>`, LiteralStringOther, Pop(1)}, - {`[\\#<>]`, LiteralStringOther, nil}, - {`[^\\#<>]+`, LiteralStringOther, nil}, - }, - "ab-regex": { - {`\\[\\<>]`, LiteralStringRegex, nil}, - {`<`, LiteralStringRegex, Push()}, - {`>[imsx]*`, LiteralStringRegex, Pop(1)}, - Include("string-intp"), - {`[\\#<>]`, LiteralStringRegex, nil}, - {`[^\\#<>]+`, LiteralStringRegex, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go b/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go deleted file mode 100644 index 4776864dbff2..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go +++ /dev/null @@ -1,56 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// CSharp lexer. -var CSharp = internal.Register(MustNewLazyLexer( - &Config{ - Name: "C#", - Aliases: []string{"csharp", "c#"}, - Filenames: []string{"*.cs"}, - MimeTypes: []string{"text/x-csharp"}, - DotAll: true, - EnsureNL: true, - }, - cSharpRules, -)) - -func cSharpRules() Rules { - return Rules{ - "root": { - {`^\s*\[.*?\]`, NameAttribute, nil}, - {`[^\S\n]+`, Text, nil}, - {`\\\n`, Text, nil}, - {`///[^\n\r]+`, CommentSpecial, nil}, - {`//[^\n\r]+`, CommentSingle, nil}, - {`/[*].*?[*]/`, CommentMultiline, nil}, - {`\n`, Text, nil}, - {`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, - {`[{}]`, Punctuation, nil}, - {`@"(""|[^"])*"`, LiteralString, nil}, - {`\$@?"(""|[^"])*"`, LiteralString, nil}, - {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, - {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, - {`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil}, - {`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b[^\n\r]+`, CommentPreproc, nil}, - {`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil}, - {`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|init|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil}, - {`(global)(::)`, ByGroups(Keyword, Punctuation), nil}, - {`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil}, - {`(class|struct|record|interface)(\s+)`, ByGroups(Keyword, Text), Push("class")}, - {`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")}, - {`@?[_a-zA-Z]\w*`, Name, nil}, - }, - "class": { - {`@?[_a-zA-Z]\w*`, NameClass, Pop(1)}, - Default(Pop(1)), - }, - "namespace": { - {`(?=\()`, Text, Pop(1)}, - {`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/css.go b/vendor/github.com/alecthomas/chroma/lexers/c/css.go deleted file mode 100644 index 9f3a01d5455d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/css.go +++ /dev/null @@ -1,121 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// CSS lexer. -var CSS = internal.Register(MustNewLazyLexer( - &Config{ - Name: "CSS", - Aliases: []string{"css"}, - Filenames: []string{"*.css"}, - MimeTypes: []string{"text/css"}, - }, - cssRules, -)) - -func cssRules() Rules { - return Rules{ - "root": { - Include("basics"), - }, - "basics": { - {`\s+`, Text, nil}, - {`/\*(?:.|\n)*?\*/`, Comment, nil}, - {`\{`, Punctuation, Push("content")}, - {`(\:{1,2})([\w-]+)`, ByGroups(Punctuation, NameDecorator), nil}, - {`(\.)([\w-]+)`, ByGroups(Punctuation, NameClass), nil}, - {`(\#)([\w-]+)`, ByGroups(Punctuation, NameNamespace), nil}, - {`(@)([\w-]+)`, ByGroups(Punctuation, Keyword), Push("atrule")}, - {`[\w-]+`, NameTag, nil}, - {`[~^*!%&$\[\]()<>|+=@:;,./?-]`, Operator, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - }, - "atrule": { - {`\{`, Punctuation, Push("atcontent")}, - {`;`, Punctuation, Pop(1)}, - Include("basics"), - }, - "atcontent": { - Include("basics"), - {`\}`, Punctuation, Pop(2)}, - }, - "atparenthesis": { - Include("common-values"), - {`/\*(?:.|\n)*?\*/`, Comment, nil}, - Include("numeric-values"), - {`[*+/-]`, Operator, nil}, - {`[,]`, Punctuation, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[a-zA-Z_-]\w*`, Name, nil}, - {`\(`, Punctuation, Push("atparenthesis")}, - {`\)`, Punctuation, Pop(1)}, - }, - "content": { - {`\s+`, Text, nil}, - {`\}`, Punctuation, Pop(1)}, - {`;`, Punctuation, nil}, - {`^@.*?$`, CommentPreproc, nil}, - {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, - {`(align-content|align-items|align-self|alignment-baseline|all|animation|animation-delay|animation-direction|animation-duration|animation-fill-mode|animation-iteration-count|animation-name|animation-play-state|animation-timing-function|appearance|azimuth|backface-visibility|background|background-attachment|background-blend-mode|background-clip|background-color|background-image|background-origin|background-position|background-repeat|background-size|baseline-shift|bookmark-label|bookmark-level|bookmark-state|border|border-bottom|border-bottom-color|border-bottom-left-radius|border-bottom-right-radius|border-bottom-style|border-bottom-width|border-boundary|border-collapse|border-color|border-image|border-image-outset|border-image-repeat|border-image-slice|border-image-source|border-image-width|border-left|border-left-color|border-left-style|border-left-width|border-radius|border-right|border-right-color|border-right-style|border-right-width|border-spacing|border-style|border-top|border-top-color|border-top-left-radius|border-top-right-radius|border-top-style|border-top-width|border-width|bottom|box-decoration-break|box-shadow|box-sizing|box-snap|box-suppress|break-after|break-before|break-inside|caption-side|caret|caret-animation|caret-color|caret-shape|chains|clear|clip|clip-path|clip-rule|color|color-interpolation-filters|column-count|column-fill|column-gap|column-rule|column-rule-color|column-rule-style|column-rule-width|column-span|column-width|columns|content|counter-increment|counter-reset|counter-set|crop|cue|cue-after|cue-before|cursor|direction|display|dominant-baseline|elevation|empty-cells|filter|flex|flex-basis|flex-direction|flex-flow|flex-grow|flex-shrink|flex-wrap|float|float-defer|float-offset|float-reference|flood-color|flood-opacity|flow|flow-from|flow-into|font|font-family|font-feature-settings|font-kerning|font-language-override|font-size|font-size-adjust|font-stretch|font-style|font-synthesis|font-variant|font-variant-alternates|font-variant-caps|font-variant-east-asian|font-variant-ligatures|font-variant-numeric|font-variant-position|font-weight|footnote-display|footnote-policy|glyph-orientation-vertical|grid|grid-area|grid-auto-columns|grid-auto-flow|grid-auto-rows|grid-column|grid-column-end|grid-column-gap|grid-column-start|grid-gap|grid-row|grid-row-end|grid-row-gap|grid-row-start|grid-template|grid-template-areas|grid-template-columns|grid-template-rows|hanging-punctuation|height|hyphenate-character|hyphenate-limit-chars|hyphenate-limit-last|hyphenate-limit-lines|hyphenate-limit-zone|hyphens|image-orientation|image-resolution|initial-letter|initial-letter-align|initial-letter-wrap|isolation|justify-content|justify-items|justify-self|left|letter-spacing|lighting-color|line-break|line-grid|line-height|line-snap|list-style|list-style-image|list-style-position|list-style-type|margin|margin-bottom|margin-left|margin-right|margin-top|marker-side|marquee-direction|marquee-loop|marquee-speed|marquee-style|mask|mask-border|mask-border-mode|mask-border-outset|mask-border-repeat|mask-border-slice|mask-border-source|mask-border-width|mask-clip|mask-composite|mask-image|mask-mode|mask-origin|mask-position|mask-repeat|mask-size|mask-type|max-height|max-lines|max-width|min-height|min-width|mix-blend-mode|motion|motion-offset|motion-path|motion-rotation|move-to|nav-down|nav-left|nav-right|nav-up|object-fit|object-position|offset-after|offset-before|offset-end|offset-start|opacity|order|orphans|outline|outline-color|outline-offset|outline-style|outline-width|overflow|overflow-style|overflow-wrap|overflow-x|overflow-y|padding|padding-bottom|padding-left|padding-right|padding-top|page|page-break-after|page-break-before|page-break-inside|page-policy|pause|pause-after|pause-before|perspective|perspective-origin|pitch|pitch-range|play-during|polar-angle|polar-distance|position|presentation-level|quotes|region-fragment|resize|rest|rest-after|rest-before|richness|right|rotation|rotation-point|ruby-align|ruby-merge|ruby-position|running|scroll-snap-coordinate|scroll-snap-destination|scroll-snap-points-x|scroll-snap-points-y|scroll-snap-type|shape-image-threshold|shape-inside|shape-margin|shape-outside|size|speak|speak-as|speak-header|speak-numeral|speak-punctuation|speech-rate|stress|string-set|tab-size|table-layout|text-align|text-align-last|text-combine-upright|text-decoration|text-decoration-color|text-decoration-line|text-decoration-skip|text-decoration-style|text-emphasis|text-emphasis-color|text-emphasis-position|text-emphasis-style|text-indent|text-justify|text-orientation|text-overflow|text-shadow|text-space-collapse|text-space-trim|text-spacing|text-transform|text-underline-position|text-wrap|top|transform|transform-origin|transform-style|transition|transition-delay|transition-duration|transition-property|transition-timing-function|unicode-bidi|user-select|vertical-align|visibility|voice-balance|voice-duration|voice-family|voice-pitch|voice-range|voice-rate|voice-stress|voice-volume|volume|white-space|widows|width|will-change|word-break|word-spacing|word-wrap|wrap-after|wrap-before|wrap-flow|wrap-inside|wrap-through|writing-mode|z-index)(\s*)(\:)`, ByGroups(Keyword, Text, Punctuation), Push("value-start")}, - {`(--[a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(NameVariable, Text, Punctuation), Push("value-start")}, - {`([a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(Name, Text, Punctuation), Push("value-start")}, - {`/\*(?:.|\n)*?\*/`, Comment, nil}, - }, - "value-start": { - Include("common-values"), - {Words(``, `\b`, `align-content`, `align-items`, `align-self`, `alignment-baseline`, `all`, `animation`, `animation-delay`, `animation-direction`, `animation-duration`, `animation-fill-mode`, `animation-iteration-count`, `animation-name`, `animation-play-state`, `animation-timing-function`, `appearance`, `azimuth`, `backface-visibility`, `background`, `background-attachment`, `background-blend-mode`, `background-clip`, `background-color`, `background-image`, `background-origin`, `background-position`, `background-repeat`, `background-size`, `baseline-shift`, `bookmark-label`, `bookmark-level`, `bookmark-state`, `border`, `border-bottom`, `border-bottom-color`, `border-bottom-left-radius`, `border-bottom-right-radius`, `border-bottom-style`, `border-bottom-width`, `border-boundary`, `border-collapse`, `border-color`, `border-image`, `border-image-outset`, `border-image-repeat`, `border-image-slice`, `border-image-source`, `border-image-width`, `border-left`, `border-left-color`, `border-left-style`, `border-left-width`, `border-radius`, `border-right`, `border-right-color`, `border-right-style`, `border-right-width`, `border-spacing`, `border-style`, `border-top`, `border-top-color`, `border-top-left-radius`, `border-top-right-radius`, `border-top-style`, `border-top-width`, `border-width`, `bottom`, `box-decoration-break`, `box-shadow`, `box-sizing`, `box-snap`, `box-suppress`, `break-after`, `break-before`, `break-inside`, `caption-side`, `caret`, `caret-animation`, `caret-color`, `caret-shape`, `chains`, `clear`, `clip`, `clip-path`, `clip-rule`, `color`, `color-interpolation-filters`, `column-count`, `column-fill`, `column-gap`, `column-rule`, `column-rule-color`, `column-rule-style`, `column-rule-width`, `column-span`, `column-width`, `columns`, `content`, `counter-increment`, `counter-reset`, `counter-set`, `crop`, `cue`, `cue-after`, `cue-before`, `cursor`, `direction`, `display`, `dominant-baseline`, `elevation`, `empty-cells`, `filter`, `flex`, `flex-basis`, `flex-direction`, `flex-flow`, `flex-grow`, `flex-shrink`, `flex-wrap`, `float`, `float-defer`, `float-offset`, `float-reference`, `flood-color`, `flood-opacity`, `flow`, `flow-from`, `flow-into`, `font`, `font-family`, `font-feature-settings`, `font-kerning`, `font-language-override`, `font-size`, `font-size-adjust`, `font-stretch`, `font-style`, `font-synthesis`, `font-variant`, `font-variant-alternates`, `font-variant-caps`, `font-variant-east-asian`, `font-variant-ligatures`, `font-variant-numeric`, `font-variant-position`, `font-weight`, `footnote-display`, `footnote-policy`, `glyph-orientation-vertical`, `grid`, `grid-area`, `grid-auto-columns`, `grid-auto-flow`, `grid-auto-rows`, `grid-column`, `grid-column-end`, `grid-column-gap`, `grid-column-start`, `grid-gap`, `grid-row`, `grid-row-end`, `grid-row-gap`, `grid-row-start`, `grid-template`, `grid-template-areas`, `grid-template-columns`, `grid-template-rows`, `hanging-punctuation`, `height`, `hyphenate-character`, `hyphenate-limit-chars`, `hyphenate-limit-last`, `hyphenate-limit-lines`, `hyphenate-limit-zone`, `hyphens`, `image-orientation`, `image-resolution`, `initial-letter`, `initial-letter-align`, `initial-letter-wrap`, `isolation`, `justify-content`, `justify-items`, `justify-self`, `left`, `letter-spacing`, `lighting-color`, `line-break`, `line-grid`, `line-height`, `line-snap`, `list-style`, `list-style-image`, `list-style-position`, `list-style-type`, `margin`, `margin-bottom`, `margin-left`, `margin-right`, `margin-top`, `marker-side`, `marquee-direction`, `marquee-loop`, `marquee-speed`, `marquee-style`, `mask`, `mask-border`, `mask-border-mode`, `mask-border-outset`, `mask-border-repeat`, `mask-border-slice`, `mask-border-source`, `mask-border-width`, `mask-clip`, `mask-composite`, `mask-image`, `mask-mode`, `mask-origin`, `mask-position`, `mask-repeat`, `mask-size`, `mask-type`, `max-height`, `max-lines`, `max-width`, `min-height`, `min-width`, `mix-blend-mode`, `motion`, `motion-offset`, `motion-path`, `motion-rotation`, `move-to`, `nav-down`, `nav-left`, `nav-right`, `nav-up`, `object-fit`, `object-position`, `offset-after`, `offset-before`, `offset-end`, `offset-start`, `opacity`, `order`, `orphans`, `outline`, `outline-color`, `outline-offset`, `outline-style`, `outline-width`, `overflow`, `overflow-style`, `overflow-wrap`, `overflow-x`, `overflow-y`, `padding`, `padding-bottom`, `padding-left`, `padding-right`, `padding-top`, `page`, `page-break-after`, `page-break-before`, `page-break-inside`, `page-policy`, `pause`, `pause-after`, `pause-before`, `perspective`, `perspective-origin`, `pitch`, `pitch-range`, `play-during`, `polar-angle`, `polar-distance`, `position`, `presentation-level`, `quotes`, `region-fragment`, `resize`, `rest`, `rest-after`, `rest-before`, `richness`, `right`, `rotation`, `rotation-point`, `ruby-align`, `ruby-merge`, `ruby-position`, `running`, `scroll-snap-coordinate`, `scroll-snap-destination`, `scroll-snap-points-x`, `scroll-snap-points-y`, `scroll-snap-type`, `shape-image-threshold`, `shape-inside`, `shape-margin`, `shape-outside`, `size`, `speak`, `speak-as`, `speak-header`, `speak-numeral`, `speak-punctuation`, `speech-rate`, `stress`, `string-set`, `tab-size`, `table-layout`, `text-align`, `text-align-last`, `text-combine-upright`, `text-decoration`, `text-decoration-color`, `text-decoration-line`, `text-decoration-skip`, `text-decoration-style`, `text-emphasis`, `text-emphasis-color`, `text-emphasis-position`, `text-emphasis-style`, `text-indent`, `text-justify`, `text-orientation`, `text-overflow`, `text-shadow`, `text-space-collapse`, `text-space-trim`, `text-spacing`, `text-transform`, `text-underline-position`, `text-wrap`, `top`, `transform`, `transform-origin`, `transform-style`, `transition`, `transition-delay`, `transition-duration`, `transition-property`, `transition-timing-function`, `unicode-bidi`, `user-select`, `vertical-align`, `visibility`, `voice-balance`, `voice-duration`, `voice-family`, `voice-pitch`, `voice-range`, `voice-rate`, `voice-stress`, `voice-volume`, `volume`, `white-space`, `widows`, `width`, `will-change`, `word-break`, `word-spacing`, `word-wrap`, `wrap-after`, `wrap-before`, `wrap-flow`, `wrap-inside`, `wrap-through`, `writing-mode`, `z-index`), Keyword, nil}, - {`\!important`, CommentPreproc, nil}, - {`/\*(?:.|\n)*?\*/`, Comment, nil}, - Include("numeric-values"), - {`[~^*!%&<>|+=@:./?-]+`, Operator, nil}, - {`[\[\](),]+`, Punctuation, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[a-zA-Z_][\w-]*`, Name, nil}, - {`;`, Punctuation, Pop(1)}, - {`\}`, Punctuation, Pop(2)}, - }, - "function-start": { - Include("common-values"), - {`/\*(?:.|\n)*?\*/`, Comment, nil}, - Include("numeric-values"), - {`[*+/-]`, Operator, nil}, - {`[,]`, Punctuation, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[a-zA-Z_-]\w*`, Name, nil}, - {`\(`, Punctuation, Push("atparenthesis")}, - {`\)`, Punctuation, Pop(1)}, - }, - "common-values": { - {`\s+`, Text, nil}, - {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, - Include("urls"), - {`(attr|blackness|blend|blenda|blur|brightness|calc|circle|color-mod|contrast|counter|cubic-bezier|device-cmyk|drop-shadow|ellipse|gray|grayscale|hsl|hsla|hue|hue-rotate|hwb|image|inset|invert|lightness|linear-gradient|matrix|matrix3d|opacity|perspective|polygon|radial-gradient|rect|repeating-linear-gradient|repeating-radial-gradient|rgb|rgba|rotate|rotate3d|rotateX|rotateY|rotateZ|saturate|saturation|scale|scale3d|scaleX|scaleY|scaleZ|sepia|shade|skewX|skewY|steps|tint|toggle|translate|translate3d|translateX|translateY|translateZ|whiteness)(\()`, ByGroups(NameBuiltin, Punctuation), Push("function-start")}, - {`([a-zA-Z_][\w-]+)(\()`, ByGroups(NameFunction, Punctuation), Push("function-start")}, - {Words(``, `\b`, `absolute`, `alias`, `all`, `all-petite-caps`, `all-scroll`, `all-small-caps`, `allow-end`, `alpha`, `alternate`, `alternate-reverse`, `always`, `armenian`, `auto`, `avoid`, `avoid-column`, `avoid-page`, `backwards`, `balance`, `baseline`, `below`, `blink`, `block`, `bold`, `bolder`, `border-box`, `both`, `bottom`, `box-decoration`, `break-word`, `capitalize`, `cell`, `center`, `circle`, `clip`, `clone`, `close-quote`, `col-resize`, `collapse`, `color`, `color-burn`, `color-dodge`, `column`, `column-reverse`, `compact`, `condensed`, `contain`, `container`, `content-box`, `context-menu`, `copy`, `cover`, `crisp-edges`, `crosshair`, `currentColor`, `cursive`, `darken`, `dashed`, `decimal`, `decimal-leading-zero`, `default`, `descendants`, `difference`, `digits`, `disc`, `distribute`, `dot`, `dotted`, `double`, `double-circle`, `e-resize`, `each-line`, `ease`, `ease-in`, `ease-in-out`, `ease-out`, `edges`, `ellipsis`, `end`, `ew-resize`, `exclusion`, `expanded`, `extra-condensed`, `extra-expanded`, `fantasy`, `fill`, `fill-box`, `filled`, `first`, `fixed`, `flat`, `flex`, `flex-end`, `flex-start`, `flip`, `force-end`, `forwards`, `from-image`, `full-width`, `geometricPrecision`, `georgian`, `groove`, `hanging`, `hard-light`, `help`, `hidden`, `hide`, `horizontal`, `hue`, `icon`, `infinite`, `inherit`, `initial`, `ink`, `inline`, `inline-block`, `inline-flex`, `inline-table`, `inset`, `inside`, `inter-word`, `invert`, `isolate`, `italic`, `justify`, `large`, `larger`, `last`, `left`, `lighten`, `lighter`, `line-through`, `linear`, `list-item`, `local`, `loose`, `lower-alpha`, `lower-greek`, `lower-latin`, `lower-roman`, `lowercase`, `ltr`, `luminance`, `luminosity`, `mandatory`, `manipulation`, `manual`, `margin-box`, `match-parent`, `medium`, `mixed`, `monospace`, `move`, `multiply`, `n-resize`, `ne-resize`, `nesw-resize`, `no-close-quote`, `no-drop`, `no-open-quote`, `no-repeat`, `none`, `normal`, `not-allowed`, `nowrap`, `ns-resize`, `nw-resize`, `nwse-resize`, `objects`, `oblique`, `off`, `on`, `open`, `open-quote`, `optimizeLegibility`, `optimizeSpeed`, `outset`, `outside`, `over`, `overlay`, `overline`, `padding-box`, `page`, `pan-down`, `pan-left`, `pan-right`, `pan-up`, `pan-x`, `pan-y`, `paused`, `petite-caps`, `pixelated`, `pointer`, `preserve-3d`, `progress`, `proximity`, `relative`, `repeat`, `repeat no-repeat`, `repeat-x`, `repeat-y`, `reverse`, `ridge`, `right`, `round`, `row`, `row-resize`, `row-reverse`, `rtl`, `ruby`, `ruby-base`, `ruby-base-container`, `ruby-text`, `ruby-text-container`, `run-in`, `running`, `s-resize`, `sans-serif`, `saturation`, `scale-down`, `screen`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `sesame`, `show`, `sideways`, `sideways-left`, `sideways-right`, `slice`, `small`, `small-caps`, `smaller`, `smooth`, `snap`, `soft-light`, `solid`, `space`, `space-around`, `space-between`, `spaces`, `square`, `start`, `static`, `step-end`, `step-start`, `sticky`, `stretch`, `strict`, `stroke-box`, `style`, `sw-resize`, `table`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `thick`, `thin`, `titling-caps`, `to`, `top`, `triangle`, `ultra-condensed`, `ultra-expanded`, `under`, `underline`, `unicase`, `unset`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `upright`, `use-glyph-orientation`, `vertical`, `vertical-text`, `view-box`, `visible`, `w-resize`, `wait`, `wavy`, `weight`, `weight style`, `wrap`, `wrap-reverse`, `x-large`, `x-small`, `xx-large`, `xx-small`, `zoom-in`, `zoom-out`), KeywordConstant, nil}, - {Words(``, `\b`, `above`, `aural`, `behind`, `bidi-override`, `center-left`, `center-right`, `cjk-ideographic`, `continuous`, `crop`, `cross`, `embed`, `far-left`, `far-right`, `fast`, `faster`, `hebrew`, `high`, `higher`, `hiragana`, `hiragana-iroha`, `katakana`, `katakana-iroha`, `landscape`, `left-side`, `leftwards`, `level`, `loud`, `low`, `lower`, `message-box`, `middle`, `mix`, `narrower`, `once`, `portrait`, `right-side`, `rightwards`, `silent`, `slow`, `slower`, `small-caption`, `soft`, `spell-out`, `status-bar`, `super`, `text-bottom`, `text-top`, `wider`, `x-fast`, `x-high`, `x-loud`, `x-low`, `x-soft`, `yes`, `pre`, `pre-wrap`, `pre-line`), KeywordConstant, nil}, - {Words(``, `\b`, `aliceblue`, `antiquewhite`, `aqua`, `aquamarine`, `azure`, `beige`, `bisque`, `black`, `blanchedalmond`, `blue`, `blueviolet`, `brown`, `burlywood`, `cadetblue`, `chartreuse`, `chocolate`, `coral`, `cornflowerblue`, `cornsilk`, `crimson`, `cyan`, `darkblue`, `darkcyan`, `darkgoldenrod`, `darkgray`, `darkgreen`, `darkgrey`, `darkkhaki`, `darkmagenta`, `darkolivegreen`, `darkorange`, `darkorchid`, `darkred`, `darksalmon`, `darkseagreen`, `darkslateblue`, `darkslategray`, `darkslategrey`, `darkturquoise`, `darkviolet`, `deeppink`, `deepskyblue`, `dimgray`, `dimgrey`, `dodgerblue`, `firebrick`, `floralwhite`, `forestgreen`, `fuchsia`, `gainsboro`, `ghostwhite`, `gold`, `goldenrod`, `gray`, `green`, `greenyellow`, `grey`, `honeydew`, `hotpink`, `indianred`, `indigo`, `ivory`, `khaki`, `lavender`, `lavenderblush`, `lawngreen`, `lemonchiffon`, `lightblue`, `lightcoral`, `lightcyan`, `lightgoldenrodyellow`, `lightgray`, `lightgreen`, `lightgrey`, `lightpink`, `lightsalmon`, `lightseagreen`, `lightskyblue`, `lightslategray`, `lightslategrey`, `lightsteelblue`, `lightyellow`, `lime`, `limegreen`, `linen`, `magenta`, `maroon`, `mediumaquamarine`, `mediumblue`, `mediumorchid`, `mediumpurple`, `mediumseagreen`, `mediumslateblue`, `mediumspringgreen`, `mediumturquoise`, `mediumvioletred`, `midnightblue`, `mintcream`, `mistyrose`, `moccasin`, `navajowhite`, `navy`, `oldlace`, `olive`, `olivedrab`, `orange`, `orangered`, `orchid`, `palegoldenrod`, `palegreen`, `paleturquoise`, `palevioletred`, `papayawhip`, `peachpuff`, `peru`, `pink`, `plum`, `powderblue`, `purple`, `rebeccapurple`, `red`, `rosybrown`, `royalblue`, `saddlebrown`, `salmon`, `sandybrown`, `seagreen`, `seashell`, `sienna`, `silver`, `skyblue`, `slateblue`, `slategray`, `slategrey`, `snow`, `springgreen`, `steelblue`, `tan`, `teal`, `thistle`, `tomato`, `turquoise`, `violet`, `wheat`, `white`, `whitesmoke`, `yellow`, `yellowgreen`, `transparent`), KeywordConstant, nil}, - }, - "urls": { - {`(url)(\()(".*?")(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringDouble, Punctuation), nil}, - {`(url)(\()('.*?')(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringSingle, Punctuation), nil}, - {`(url)(\()(.*?)(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringOther, Punctuation), nil}, - }, - "numeric-values": { - {`\#[a-zA-Z0-9]{1,6}`, LiteralNumberHex, nil}, - {`[+\-]?[0-9]*[.][0-9]+`, LiteralNumberFloat, Push("numeric-end")}, - {`[+\-]?[0-9]+`, LiteralNumberInteger, Push("numeric-end")}, - }, - "numeric-end": { - {Words(``, `\b`, `deg`, `grad`, `rad`, `turn`, `Hz`, `kHz`, `em`, `ex`, `ch`, `rem`, `vh`, `vw`, `vmin`, `vmax`, `px`, `mm`, `cm`, `in`, `pt`, `pc`, `q`, `dpi`, `dpcm`, `dppx`, `s`, `ms`), KeywordType, nil}, - {`%`, KeywordType, nil}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cython.go b/vendor/github.com/alecthomas/chroma/lexers/c/cython.go deleted file mode 100644 index 0cce20410d4b..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/c/cython.go +++ /dev/null @@ -1,139 +0,0 @@ -package c - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Cython lexer. -var Cython = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Cython", - Aliases: []string{"cython", "pyx", "pyrex"}, - Filenames: []string{"*.pyx", "*.pxd", "*.pxi"}, - MimeTypes: []string{"text/x-cython", "application/x-cython"}, - }, - cythonRules, -)) - -func cythonRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, - {`^(\s*)('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, - {`[^\S\n]+`, Text, nil}, - {`#.*$`, Comment, nil}, - {`[]{}:(),;[]`, Punctuation, nil}, - {`\\\n`, Text, nil}, - {`\\`, Text, nil}, - {`(in|is|and|or|not)\b`, OperatorWord, nil}, - {`(<)([a-zA-Z0-9.?]+)(>)`, ByGroups(Punctuation, KeywordType, Punctuation), nil}, - {`!=|==|<<|>>|[-~+/*%=<>&^|.?]`, Operator, nil}, - {`(from)(\d+)(<=)(\s+)(<)(\d+)(:)`, ByGroups(Keyword, LiteralNumberInteger, Operator, Name, Operator, Name, Punctuation), nil}, - Include("keywords"), - {`(def|property)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, - {`(cp?def)(\s+)`, ByGroups(Keyword, Text), Push("cdef")}, - {`(cdef)(:)`, ByGroups(Keyword, Punctuation), nil}, - {`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, - {`(from)(\s+)`, ByGroups(Keyword, Text), Push("fromimport")}, - {`(c?import)(\s+)`, ByGroups(Keyword, Text), Push("import")}, - Include("builtins"), - Include("backtick"), - {`(?:[rR]|[uU][rR]|[rR][uU])"""`, LiteralString, Push("tdqs")}, - {`(?:[rR]|[uU][rR]|[rR][uU])'''`, LiteralString, Push("tsqs")}, - {`(?:[rR]|[uU][rR]|[rR][uU])"`, LiteralString, Push("dqs")}, - {`(?:[rR]|[uU][rR]|[rR][uU])'`, LiteralString, Push("sqs")}, - {`[uU]?"""`, LiteralString, Combined("stringescape", "tdqs")}, - {`[uU]?'''`, LiteralString, Combined("stringescape", "tsqs")}, - {`[uU]?"`, LiteralString, Combined("stringescape", "dqs")}, - {`[uU]?'`, LiteralString, Combined("stringescape", "sqs")}, - Include("name"), - Include("numbers"), - }, - "keywords": { - {Words(``, `\b`, `assert`, `break`, `by`, `continue`, `ctypedef`, `del`, `elif`, `else`, `except`, `except?`, `exec`, `finally`, `for`, `fused`, `gil`, `global`, `if`, `include`, `lambda`, `nogil`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `as`, `with`), Keyword, nil}, - {`(DEF|IF|ELIF|ELSE)\b`, CommentPreproc, nil}, - }, - "builtins": { - {Words(`(?`, CommentPreproc, Pop(1)}, - {`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, - {`\s+`, Text, nil}, - {`#.*?\n`, CommentSingle, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*\*/`, CommentMultiline, nil}, - {`/\*\*.*?\*/`, LiteralStringDoc, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil}, - {`[~!%^&*+=|:.<>/@-]+`, Operator, nil}, - {`\?`, Operator, nil}, - {`[\[\]{}();,]+`, Punctuation, nil}, - {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, - {`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil}, - {`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")}, - {`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil}, - {`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - Include("magicconstants"), - {`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil}, - {`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil}, - {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil}, - {`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, - {`0[0-7]+`, LiteralNumberOct, nil}, - {`0x[a-f0-9_]+`, LiteralNumberHex, nil}, - {`\d[\d_]*`, LiteralNumberInteger, nil}, - {`0b[01]+`, LiteralNumberBin, nil}, - {`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil}, - {"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil}, - {`"`, LiteralStringDouble, Push("string")}, - }, - "magicfuncs": { - {Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil}, - }, - "magicconstants": { - {Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil}, - }, - "classname": { - {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)}, - }, - "functionname": { - Include("magicfuncs"), - {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)}, - Default(Pop(1)), - }, - "string": { - {`"`, LiteralStringDouble, Pop(1)}, - {`[^{$"\\]+`, LiteralStringDouble, nil}, - {`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil}, - {`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil}, - {`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, - {`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, - {`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, - {`[${\\]`, LiteralStringDouble, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/d.go b/vendor/github.com/alecthomas/chroma/lexers/d/d.go deleted file mode 100644 index ac12d2e23631..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/d.go +++ /dev/null @@ -1,73 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// D lexer. https://dlang.org/spec/lex.html -var D = internal.Register(MustNewLazyLexer( - &Config{ - Name: "D", - Aliases: []string{"d"}, - Filenames: []string{"*.d", "*.di"}, - MimeTypes: []string{"text/x-d"}, - EnsureNL: true, - }, - dRules, -)) - -func dRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - - // https://dlang.org/spec/lex.html#comment - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`/\+.*?\+/`, CommentMultiline, nil}, - - // https://dlang.org/spec/lex.html#keywords - {`(asm|assert|body|break|case|cast|catch|continue|default|debug|delete|deprecated|do|else|finally|for|foreach|foreach_reverse|goto|if|in|invariant|is|macro|mixin|new|out|pragma|return|super|switch|this|throw|try|version|while|with)\b`, Keyword, nil}, - {`__(FILE|FILE_FULL_PATH|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__\b`, NameBuiltin, nil}, - {`__(traits|vector|parameters)\b`, NameBuiltin, nil}, - {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, - - // https://dlang.org/spec/attribute.html#uda - {`@[\w.]*`, NameDecorator, nil}, - {`(abstract|auto|alias|align|const|delegate|enum|export|final|function|inout|lazy|nothrow|override|package|private|protected|public|pure|static|synchronized|template|volatile|__gshared)\b`, KeywordDeclaration, nil}, - - // https://dlang.org/spec/type.html#basic-data-types - {`(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar|string|wstring|dstring)\b`, KeywordType, nil}, - {`(module)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(class|interface|struct|template|union)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - - // https://dlang.org/spec/lex.html#string_literals - // TODO support delimited strings - {`[qr]?"(\\\\|\\"|[^"])*"[cwd]?`, LiteralString, nil}, - {"(`)([^`]*)(`)[cwd]?", LiteralString, nil}, - {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, - {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, - {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, - - // https://dlang.org/spec/lex.html#floatliteral - {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?`, LiteralNumberFloat, nil}, - // https://dlang.org/spec/lex.html#integerliteral - {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, - {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, - {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, - {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, - {`([~^*!%&\[\](){}<>|+=:;,./?-]|q{)`, Operator, nil}, - {`([^\W\d]|\$)[\w$]*`, Name, nil}, - {`\n`, Text, nil}, - }, - "class": { - {`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, - }, - "import": { - {`[\w.]+\*?`, NameNamespace, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dart.go b/vendor/github.com/alecthomas/chroma/lexers/d/dart.go deleted file mode 100644 index c1dbb5cd5e8d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/dart.go +++ /dev/null @@ -1,95 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Dart lexer. -var Dart = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Dart", - Aliases: []string{"dart"}, - Filenames: []string{"*.dart"}, - MimeTypes: []string{"text/x-dart"}, - DotAll: true, - }, - dartRules, -)) - -func dartRules() Rules { - return Rules{ - "root": { - Include("string_literal"), - {`#!(.*?)$`, CommentPreproc, nil}, - {`\b(import|export)\b`, Keyword, Push("import_decl")}, - {`\b(library|source|part of|part)\b`, Keyword, nil}, - {`[^\S\n]+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`\b(class)\b(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`\b(assert|break|case|catch|continue|default|do|else|finally|for|if|in|is|new|return|super|switch|this|throw|try|while)\b`, Keyword, nil}, - {`\b(abstract|async|await|const|extends|factory|final|get|implements|native|operator|set|static|sync|typedef|var|with|yield)\b`, KeywordDeclaration, nil}, - {`\b(bool|double|dynamic|int|num|Object|String|void)\b`, KeywordType, nil}, - {`\b(false|null|true)\b`, KeywordConstant, nil}, - {`[~!%^&*+=|?:<>/-]|as\b`, Operator, nil}, - {`[a-zA-Z_$]\w*:`, NameLabel, nil}, - {`[a-zA-Z_$]\w*`, Name, nil}, - {`[(){}\[\],.;]`, Punctuation, nil}, - {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`\d+(\.\d*)?([eE][+-]?\d+)?`, LiteralNumber, nil}, - {`\.\d+([eE][+-]?\d+)?`, LiteralNumber, nil}, - {`\n`, Text, nil}, - }, - "class": { - {`[a-zA-Z_$]\w*`, NameClass, Pop(1)}, - }, - "import_decl": { - Include("string_literal"), - {`\s+`, Text, nil}, - {`\b(as|show|hide)\b`, Keyword, nil}, - {`[a-zA-Z_$]\w*`, Name, nil}, - {`\,`, Punctuation, nil}, - {`\;`, Punctuation, Pop(1)}, - }, - "string_literal": { - {`r"""([\w\W]*?)"""`, LiteralStringDouble, nil}, - {`r'''([\w\W]*?)'''`, LiteralStringSingle, nil}, - {`r"(.*?)"`, LiteralStringDouble, nil}, - {`r'(.*?)'`, LiteralStringSingle, nil}, - {`"""`, LiteralStringDouble, Push("string_double_multiline")}, - {`'''`, LiteralStringSingle, Push("string_single_multiline")}, - {`"`, LiteralStringDouble, Push("string_double")}, - {`'`, LiteralStringSingle, Push("string_single")}, - }, - "string_common": { - {`\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])`, LiteralStringEscape, nil}, - {`(\$)([a-zA-Z_]\w*)`, ByGroups(LiteralStringInterpol, Name), nil}, - {`(\$\{)(.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, - }, - "string_double": { - {`"`, LiteralStringDouble, Pop(1)}, - {`[^"$\\\n]+`, LiteralStringDouble, nil}, - Include("string_common"), - {`\$+`, LiteralStringDouble, nil}, - }, - "string_double_multiline": { - {`"""`, LiteralStringDouble, Pop(1)}, - {`[^"$\\]+`, LiteralStringDouble, nil}, - Include("string_common"), - {`(\$|\")+`, LiteralStringDouble, nil}, - }, - "string_single": { - {`'`, LiteralStringSingle, Pop(1)}, - {`[^'$\\\n]+`, LiteralStringSingle, nil}, - Include("string_common"), - {`\$+`, LiteralStringSingle, nil}, - }, - "string_single_multiline": { - {`'''`, LiteralStringSingle, Pop(1)}, - {`[^\'$\\]+`, LiteralStringSingle, nil}, - Include("string_common"), - {`(\$|\')+`, LiteralStringSingle, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/diff.go b/vendor/github.com/alecthomas/chroma/lexers/d/diff.go deleted file mode 100644 index d4d6db450ec5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/diff.go +++ /dev/null @@ -1,33 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Diff lexer. -var Diff = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Diff", - Aliases: []string{"diff", "udiff"}, - EnsureNL: true, - Filenames: []string{"*.diff", "*.patch"}, - MimeTypes: []string{"text/x-diff", "text/x-patch"}, - }, - diffRules, -)) - -func diffRules() Rules { - return Rules{ - "root": { - {` .*\n`, Text, nil}, - {`\+.*\n`, GenericInserted, nil}, - {`-.*\n`, GenericDeleted, nil}, - {`!.*\n`, GenericStrong, nil}, - {`@.*\n`, GenericSubheading, nil}, - {`([Ii]ndex|diff).*\n`, GenericHeading, nil}, - {`=.*\n`, GenericHeading, nil}, - {`.*\n`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/django.go b/vendor/github.com/alecthomas/chroma/lexers/d/django.go deleted file mode 100644 index 5d3baa916391..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/django.go +++ /dev/null @@ -1,57 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Django/Jinja lexer. -var DjangoJinja = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Django/Jinja", - Aliases: []string{"django", "jinja"}, - Filenames: []string{}, - MimeTypes: []string{"application/x-django-templating", "application/x-jinja"}, - DotAll: true, - }, - djangoJinjaRules, -)) - -func djangoJinjaRules() Rules { - return Rules{ - "root": { - {`[^{]+`, Other, nil}, - {`\{\{`, CommentPreproc, Push("var")}, - {`\{[*#].*?[*#]\}`, Comment, nil}, - {`(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Comment, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, - {`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Text, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, - {`(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("block")}, - {`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("block")}, - {`\{`, Other, nil}, - }, - "varnames": { - {`(\|)(\s*)([a-zA-Z_]\w*)`, ByGroups(Operator, Text, NameFunction), nil}, - {`(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, - {`(_|true|false|none|True|False|None)\b`, KeywordPseudo, nil}, - {`(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b`, Keyword, nil}, - {`(loop|block|super|forloop)\b`, NameBuiltin, nil}, - {`[a-zA-Z_][\w-]*`, NameVariable, nil}, - {`\.\w+`, NameVariable, nil}, - {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`([{}()\[\]+\-*/,:~]|[><=]=?)`, Operator, nil}, - {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, - }, - "var": { - {`\s+`, Text, nil}, - {`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, - Include("varnames"), - }, - "block": { - {`\s+`, Text, nil}, - {`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, - Include("varnames"), - {`.`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/docker.go b/vendor/github.com/alecthomas/chroma/lexers/d/docker.go deleted file mode 100644 index ea808f047dc6..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/docker.go +++ /dev/null @@ -1,35 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/b" - "github.com/alecthomas/chroma/lexers/internal" - "github.com/alecthomas/chroma/lexers/j" -) - -// Docker lexer. -var Docker = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Docker", - Aliases: []string{"docker", "dockerfile"}, - Filenames: []string{"Dockerfile", "*.docker"}, - MimeTypes: []string{"text/x-dockerfile-config"}, - CaseInsensitive: true, - }, - dockerRules, -)) - -func dockerRules() Rules { - return Rules{ - "root": { - {`#.*`, Comment, nil}, - {`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil}, - {`(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, - {`(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])`, ByGroups(Keyword, Using(b.Bash), Using(j.JSON)), nil}, - {`(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, - {`((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)`, ByGroups(Keyword, LiteralString), nil}, - {`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil}, - {`(.*\\\n)*.+`, Using(b.Bash), nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go b/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go deleted file mode 100644 index aeec6b1d9ec5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go +++ /dev/null @@ -1,73 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Dtd lexer. -var Dtd = internal.Register(MustNewLazyLexer( - &Config{ - Name: "DTD", - Aliases: []string{"dtd"}, - Filenames: []string{"*.dtd"}, - MimeTypes: []string{"application/xml-dtd"}, - DotAll: true, - }, - dtdRules, -)) - -func dtdRules() Rules { - return Rules{ - "root": { - Include("common"), - {`(\s]+)`, ByGroups(Keyword, Text, NameTag), nil}, - {`PUBLIC|SYSTEM`, KeywordConstant, nil}, - {`[\[\]>]`, Keyword, nil}, - }, - "common": { - {`\s+`, Text, nil}, - {`(%|&)[^;]*;`, NameEntity, nil}, - {``, Comment, Pop(1)}, - {`-`, Comment, nil}, - }, - "element": { - Include("common"), - {`EMPTY|ANY|#PCDATA`, KeywordConstant, nil}, - {`[^>\s|()?+*,]+`, NameTag, nil}, - {`>`, Keyword, Pop(1)}, - }, - "attlist": { - Include("common"), - {`CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION`, KeywordConstant, nil}, - {`#REQUIRED|#IMPLIED|#FIXED`, KeywordConstant, nil}, - {`xml:space|xml:lang`, KeywordReserved, nil}, - {`[^>\s|()?+*,]+`, NameAttribute, nil}, - {`>`, Keyword, Pop(1)}, - }, - "entity": { - Include("common"), - {`SYSTEM|PUBLIC|NDATA`, KeywordConstant, nil}, - {`[^>\s|()?+*,]+`, NameEntity, nil}, - {`>`, Keyword, Pop(1)}, - }, - "notation": { - Include("common"), - {`SYSTEM|PUBLIC`, KeywordConstant, nil}, - {`[^>\s|()?+*,]+`, NameAttribute, nil}, - {`>`, Keyword, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go b/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go deleted file mode 100644 index feda74870e18..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go +++ /dev/null @@ -1,76 +0,0 @@ -package d - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Dylan lexer. -var Dylan = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Dylan", - Aliases: []string{"dylan"}, - Filenames: []string{"*.dylan", "*.dyl", "*.intr"}, - MimeTypes: []string{"text/x-dylan"}, - CaseInsensitive: true, - }, - func() Rules { - return Rules{ - "root": { - {`\s+`, Whitespace, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil}, - Default(Push("code")), - }, - "code": { - {`\s+`, Whitespace, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("comment")}, - {`"`, LiteralString, Push("string")}, - {`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, - {`#b[01]+`, LiteralNumberBin, nil}, - {`#o[0-7]+`, LiteralNumberOct, nil}, - {`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil}, - {`[-+]?\d+`, LiteralNumberInteger, nil}, - {`#x[0-9a-f]+`, LiteralNumberHex, nil}, - - {`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`, - ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil}, - {`(\?)(:)(token|name|variable|expression|body|case-body|\*)`, - ByGroups(Operator, Operator, NameVariable), nil}, - {`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil}, - - {`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil}, - {`:=`, Operator, nil}, - {`#[tf]`, Literal, nil}, - {`#"`, LiteralStringSymbol, Push("symbol")}, - {`#[a-z0-9-]+`, Keyword, nil}, - {`#(all-keys|include|key|next|rest)`, Keyword, nil}, - {`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil}, - {`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil}, - {`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil}, - {`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil}, - {`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil}, - {`(error|signal|return|break)`, NameException, nil}, - {`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil}, - }, - "comment": { - {`[^*/]`, CommentMultiline, nil}, - {`/\*`, CommentMultiline, Push()}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[*/]`, CommentMultiline, nil}, - }, - "symbol": { - {`"`, LiteralStringSymbol, Pop(1)}, - {`[^\\"]+`, LiteralStringSymbol, nil}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - }, - } - }, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go b/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go deleted file mode 100644 index 5ccdd38ea328..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go +++ /dev/null @@ -1,55 +0,0 @@ -package e - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Ebnf lexer. -var Ebnf = internal.Register(MustNewLazyLexer( - &Config{ - Name: "EBNF", - Aliases: []string{"ebnf"}, - Filenames: []string{"*.ebnf"}, - MimeTypes: []string{"text/x-ebnf"}, - }, - ebnfRules, -)) - -func ebnfRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - Include("comment_start"), - Include("identifier"), - {`=`, Operator, Push("production")}, - }, - "production": { - Include("whitespace"), - Include("comment_start"), - Include("identifier"), - {`"[^"]*"`, LiteralStringDouble, nil}, - {`'[^']*'`, LiteralStringSingle, nil}, - {`(\?[^?]*\?)`, NameEntity, nil}, - {`[\[\]{}(),|]`, Punctuation, nil}, - {`-`, Operator, nil}, - {`;`, Punctuation, Pop(1)}, - {`\.`, Punctuation, Pop(1)}, - }, - "whitespace": { - {`\s+`, Text, nil}, - }, - "comment_start": { - {`\(\*`, CommentMultiline, Push("comment")}, - }, - "comment": { - {`[^*)]`, CommentMultiline, nil}, - Include("comment_start"), - {`\*\)`, CommentMultiline, Pop(1)}, - {`[*)]`, CommentMultiline, nil}, - }, - "identifier": { - {`([a-zA-Z][\w \-]*)`, Keyword, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go b/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go deleted file mode 100644 index f283f846bef0..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go +++ /dev/null @@ -1,281 +0,0 @@ -package e - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Elixir lexer. -var Elixir = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Elixir", - Aliases: []string{"elixir", "ex", "exs"}, - Filenames: []string{"*.ex", "*.exs"}, - MimeTypes: []string{"text/x-elixir"}, - }, - elixirRules, -)) - -func elixirRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`#.*$`, CommentSingle, nil}, - {`(\?)(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, - {`(\?)(\\x[\da-fA-F]{1,2})`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, - {`(\?)(\\[abdefnrstv])`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, - {`\?\\?.`, LiteralStringChar, nil}, - {`:::`, LiteralStringSymbol, nil}, - {`::`, Operator, nil}, - {`:(?:\.\.\.|<<>>|%\{\}|%|\{\})`, LiteralStringSymbol, nil}, - {`:(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&))`, LiteralStringSymbol, nil}, - {`:"`, LiteralStringSymbol, Push("string_double_atom")}, - {`:'`, LiteralStringSymbol, Push("string_single_atom")}, - {`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, - {`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil}, - {`(not|and|or|when|in)\b`, OperatorWord, nil}, - {`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil}, - {`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil}, - {`(import|require|use|alias)\b`, KeywordNamespace, nil}, - {`(nil|true|false)\b`, NameConstant, nil}, - {`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil}, - {`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, - {`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, - {`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, - {`\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>`, Operator, nil}, - {`\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~`, Operator, nil}, - {`\\\\|\<\<|\>\>|\=\>|\(|\)|\:|\;|\,|\[|\]`, Punctuation, nil}, - {`&\d`, NameEntity, nil}, - {`\<|\>|\+|\-|\*|\/|\!|\^|\&`, Operator, nil}, - {`0b[01](_?[01])*`, LiteralNumberBin, nil}, - {`0o[0-7](_?[0-7])*`, LiteralNumberOct, nil}, - {`0x[\da-fA-F](_?[\dA-Fa-f])*`, LiteralNumberHex, nil}, - {`\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?`, LiteralNumberFloat, nil}, - {`\d(_?\d)*`, LiteralNumberInteger, nil}, - {`"""\s*`, LiteralStringHeredoc, Push("heredoc_double")}, - {`'''\s*$`, LiteralStringHeredoc, Push("heredoc_single")}, - {`"`, LiteralStringDouble, Push("string_double")}, - {`'`, LiteralStringSingle, Push("string_single")}, - Include("sigils"), - {`%\{`, Punctuation, Push("map_key")}, - {`\{`, Punctuation, Push("tuple")}, - }, - "heredoc_double": { - {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_interpol"), - }, - "heredoc_single": { - {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_interpol"), - }, - "heredoc_interpol": { - {`[^#\\\n]+`, LiteralStringHeredoc, nil}, - Include("escapes"), - {`\\.`, LiteralStringHeredoc, nil}, - {`\n+`, LiteralStringHeredoc, nil}, - Include("interpol"), - }, - "heredoc_no_interpol": { - {`[^\\\n]+`, LiteralStringHeredoc, nil}, - {`\\.`, LiteralStringHeredoc, nil}, - {`\n+`, LiteralStringHeredoc, nil}, - }, - "escapes": { - {`(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, - {`(\\x[\da-fA-F]{1,2})`, LiteralStringEscape, nil}, - {`(\\[abdefnrstv])`, LiteralStringEscape, nil}, - }, - "interpol": { - {`#\{`, LiteralStringInterpol, Push("interpol_string")}, - }, - "interpol_string": { - {`\}`, LiteralStringInterpol, Pop(1)}, - Include("root"), - }, - "map_key": { - Include("root"), - {`:`, Punctuation, Push("map_val")}, - {`=>`, Punctuation, Push("map_val")}, - {`\}`, Punctuation, Pop(1)}, - }, - "map_val": { - Include("root"), - {`,`, Punctuation, Pop(1)}, - {`(?=\})`, Punctuation, Pop(1)}, - }, - "tuple": { - Include("root"), - {`\}`, Punctuation, Pop(1)}, - }, - "string_double": { - {`[^#"\\]+`, LiteralStringDouble, nil}, - Include("escapes"), - {`\\.`, LiteralStringDouble, nil}, - {`(")`, ByGroups(LiteralStringDouble), Pop(1)}, - Include("interpol"), - }, - "string_single": { - {`[^#'\\]+`, LiteralStringSingle, nil}, - Include("escapes"), - {`\\.`, LiteralStringSingle, nil}, - {`(')`, ByGroups(LiteralStringSingle), Pop(1)}, - Include("interpol"), - }, - "string_double_atom": { - {`[^#"\\]+`, LiteralStringSymbol, nil}, - Include("escapes"), - {`\\.`, LiteralStringSymbol, nil}, - {`(")`, ByGroups(LiteralStringSymbol), Pop(1)}, - Include("interpol"), - }, - "string_single_atom": { - {`[^#'\\]+`, LiteralStringSymbol, nil}, - Include("escapes"), - {`\\.`, LiteralStringSymbol, nil}, - {`(')`, ByGroups(LiteralStringSymbol), Pop(1)}, - Include("interpol"), - }, - "sigils": { - {`(~[a-z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-intp")}, - {`(~[A-Z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-no-intp")}, - {`(~[a-z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-intp")}, - {`(~[A-Z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-no-intp")}, - {`~[a-z]\{`, LiteralStringOther, Push("cb-intp")}, - {`~[A-Z]\{`, LiteralStringOther, Push("cb-no-intp")}, - {`~[a-z]\[`, LiteralStringOther, Push("sb-intp")}, - {`~[A-Z]\[`, LiteralStringOther, Push("sb-no-intp")}, - {`~[a-z]\(`, LiteralStringOther, Push("pa-intp")}, - {`~[A-Z]\(`, LiteralStringOther, Push("pa-no-intp")}, - {`~[a-z]<`, LiteralStringOther, Push("ab-intp")}, - {`~[A-Z]<`, LiteralStringOther, Push("ab-no-intp")}, - {`~[a-z]/`, LiteralStringOther, Push("slas-intp")}, - {`~[A-Z]/`, LiteralStringOther, Push("slas-no-intp")}, - {`~[a-z]\|`, LiteralStringOther, Push("pipe-intp")}, - {`~[A-Z]\|`, LiteralStringOther, Push("pipe-no-intp")}, - {`~[a-z]"`, LiteralStringOther, Push("quot-intp")}, - {`~[A-Z]"`, LiteralStringOther, Push("quot-no-intp")}, - {`~[a-z]'`, LiteralStringOther, Push("apos-intp")}, - {`~[A-Z]'`, LiteralStringOther, Push("apos-no-intp")}, - }, - "triquot-end": { - {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, - Default(Pop(1)), - }, - "triquot-intp": { - {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_interpol"), - }, - "triquot-no-intp": { - {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_no_interpol"), - }, - "triapos-end": { - {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, - Default(Pop(1)), - }, - "triapos-intp": { - {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_interpol"), - }, - "triapos-no-intp": { - {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, - Include("heredoc_no_interpol"), - }, - "cb-intp": { - {`[^#\}\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "cb-no-intp": { - {`[^\}\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "sb-intp": { - {`[^#\]\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "sb-no-intp": { - {`[^\]\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "pa-intp": { - {`[^#\)\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "pa-no-intp": { - {`[^\)\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "ab-intp": { - {`[^#>\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "ab-no-intp": { - {`[^>\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "slas-intp": { - {`[^#/\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "slas-no-intp": { - {`[^/\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "pipe-intp": { - {`[^#\|\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "pipe-no-intp": { - {`[^\|\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "quot-intp": { - {`[^#"\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "quot-no-intp": { - {`[^"\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - "apos-intp": { - {`[^#'\\]+`, LiteralStringOther, nil}, - Include("escapes"), - {`\\.`, LiteralStringOther, nil}, - {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - Include("interpol"), - }, - "apos-no-intp": { - {`[^'\\]+`, LiteralStringOther, nil}, - {`\\.`, LiteralStringOther, nil}, - {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elm.go b/vendor/github.com/alecthomas/chroma/lexers/e/elm.go deleted file mode 100644 index 0fb6689537b6..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/e/elm.go +++ /dev/null @@ -1,63 +0,0 @@ -package e - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Elm lexer. -var Elm = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Elm", - Aliases: []string{"elm"}, - Filenames: []string{"*.elm"}, - MimeTypes: []string{"text/x-elm"}, - }, - elmRules, -)) - -func elmRules() Rules { - return Rules{ - "root": { - {`\{-`, CommentMultiline, Push("comment")}, - {`--.*`, CommentSingle, nil}, - {`\s+`, Text, nil}, - {`"`, LiteralString, Push("doublequote")}, - {`^\s*module\s*`, KeywordNamespace, Push("imports")}, - {`^\s*import\s*`, KeywordNamespace, Push("imports")}, - {`\[glsl\|.*`, NameEntity, Push("shader")}, - {Words(``, `\b`, `alias`, `as`, `case`, `else`, `if`, `import`, `in`, `let`, `module`, `of`, `port`, `then`, `type`, `where`), KeywordReserved, nil}, - {`[A-Z]\w*`, KeywordType, nil}, - {`^main `, KeywordReserved, nil}, - {Words(`\(`, `\)`, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, - {Words(``, ``, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, - Include("numbers"), - {`[a-z_][a-zA-Z_\']*`, NameVariable, nil}, - {`[,()\[\]{}]`, Punctuation, nil}, - }, - "comment": { - {`-(?!\})`, CommentMultiline, nil}, - {`\{-`, CommentMultiline, Push("comment")}, - {`[^-}]`, CommentMultiline, nil}, - {`-\}`, CommentMultiline, Pop(1)}, - }, - "doublequote": { - {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, - {`\\[nrfvb\\"]`, LiteralStringEscape, nil}, - {`[^"]`, LiteralString, nil}, - {`"`, LiteralString, Pop(1)}, - }, - "imports": { - {`\w+(\.\w+)*`, NameClass, Pop(1)}, - }, - "numbers": { - {`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil}, - {`_?\d+`, LiteralNumberInteger, nil}, - }, - "shader": { - {`\|(?!\])`, NameEntity, nil}, - {`\|\]`, NameEntity, Pop(1)}, - {`.*\n`, NameEntity, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go b/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go deleted file mode 100644 index 5f5c9caadc70..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go +++ /dev/null @@ -1,70 +0,0 @@ -package e - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Erlang lexer. -var Erlang = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Erlang", - Aliases: []string{"erlang"}, - Filenames: []string{"*.erl", "*.hrl", "*.es", "*.escript"}, - MimeTypes: []string{"text/x-erlang"}, - }, - erlangRules, -)) - -func erlangRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`%.*\n`, Comment, nil}, - {Words(``, `\b`, `after`, `begin`, `case`, `catch`, `cond`, `end`, `fun`, `if`, `let`, `of`, `query`, `receive`, `try`, `when`), Keyword, nil}, - {Words(``, `\b`, `abs`, `append_element`, `apply`, `atom_to_list`, `binary_to_list`, `bitstring_to_list`, `binary_to_term`, `bit_size`, `bump_reductions`, `byte_size`, `cancel_timer`, `check_process_code`, `delete_module`, `demonitor`, `disconnect_node`, `display`, `element`, `erase`, `exit`, `float`, `float_to_list`, `fun_info`, `fun_to_list`, `function_exported`, `garbage_collect`, `get`, `get_keys`, `group_leader`, `hash`, `hd`, `integer_to_list`, `iolist_to_binary`, `iolist_size`, `is_atom`, `is_binary`, `is_bitstring`, `is_boolean`, `is_builtin`, `is_float`, `is_function`, `is_integer`, `is_list`, `is_number`, `is_pid`, `is_port`, `is_process_alive`, `is_record`, `is_reference`, `is_tuple`, `length`, `link`, `list_to_atom`, `list_to_binary`, `list_to_bitstring`, `list_to_existing_atom`, `list_to_float`, `list_to_integer`, `list_to_pid`, `list_to_tuple`, `load_module`, `localtime_to_universaltime`, `make_tuple`, `md5`, `md5_final`, `md5_update`, `memory`, `module_loaded`, `monitor`, `monitor_node`, `node`, `nodes`, `open_port`, `phash`, `phash2`, `pid_to_list`, `port_close`, `port_command`, `port_connect`, `port_control`, `port_call`, `port_info`, `port_to_list`, `process_display`, `process_flag`, `process_info`, `purge_module`, `put`, `read_timer`, `ref_to_list`, `register`, `resume_process`, `round`, `send`, `send_after`, `send_nosuspend`, `set_cookie`, `setelement`, `size`, `spawn`, `spawn_link`, `spawn_monitor`, `spawn_opt`, `split_binary`, `start_timer`, `statistics`, `suspend_process`, `system_flag`, `system_info`, `system_monitor`, `system_profile`, `term_to_binary`, `tl`, `trace`, `trace_delivered`, `trace_info`, `trace_pattern`, `trunc`, `tuple_size`, `tuple_to_list`, `universaltime_to_localtime`, `unlink`, `unregister`, `whereis`), NameBuiltin, nil}, - {Words(``, `\b`, `and`, `andalso`, `band`, `bnot`, `bor`, `bsl`, `bsr`, `bxor`, `div`, `not`, `or`, `orelse`, `rem`, `xor`), OperatorWord, nil}, - {`^-`, Punctuation, Push("directive")}, - {`(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)`, Operator, nil}, - {`"`, LiteralString, Push("string")}, - {`<<`, NameLabel, nil}, - {`>>`, NameLabel, nil}, - {`((?:[a-z]\w*|'[^\n']*[^\\]'))(:)`, ByGroups(NameNamespace, Punctuation), nil}, - {`(?:^|(?<=:))((?:[a-z]\w*|'[^\n']*[^\\]'))(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, - {`[+-]?(?:[2-9]|[12][0-9]|3[0-6])#[0-9a-zA-Z]+`, LiteralNumberInteger, nil}, - {`[+-]?\d+`, LiteralNumberInteger, nil}, - {`[+-]?\d+.\d+`, LiteralNumberFloat, nil}, - {`[]\[:_@\".{}()|;,]`, Punctuation, nil}, - {`(?:[A-Z_]\w*)`, NameVariable, nil}, - {`(?:[a-z]\w*|'[^\n']*[^\\]')`, Name, nil}, - {`\?(?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]'))`, NameConstant, nil}, - {`\$(?:(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))|\\[ %]|[^\\])`, LiteralStringChar, nil}, - {`#(?:[a-z]\w*|'[^\n']*[^\\]')(:?\.(?:[a-z]\w*|'[^\n']*[^\\]'))?`, NameLabel, nil}, - {`\A#!.+\n`, CommentHashbang, nil}, - {`#\{`, Punctuation, Push("map_key")}, - }, - "string": { - {`(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))`, LiteralStringEscape, nil}, - {`"`, LiteralString, Pop(1)}, - {`~[0-9.*]*[~#+BPWXb-ginpswx]`, LiteralStringInterpol, nil}, - {`[^"\\~]+`, LiteralString, nil}, - {`~`, LiteralString, nil}, - }, - "directive": { - {`(define)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameConstant), Pop(1)}, - {`(record)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameLabel), Pop(1)}, - {`(?:[a-z]\w*|'[^\n']*[^\\]')`, NameEntity, Pop(1)}, - }, - "map_key": { - Include("root"), - {`=>`, Punctuation, Push("map_val")}, - {`:=`, Punctuation, Push("map_val")}, - {`\}`, Punctuation, Pop(1)}, - }, - "map_val": { - Include("root"), - {`,`, Punctuation, Pop(1)}, - {`(?=\})`, Punctuation, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/factor.go b/vendor/github.com/alecthomas/chroma/lexers/f/factor.go deleted file mode 100644 index d88beb231d17..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/factor.go +++ /dev/null @@ -1,119 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Factor lexer. -var Factor = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Factor", - Aliases: []string{"factor"}, - Filenames: []string{"*.factor"}, - MimeTypes: []string{"text/x-factor"}, - }, - factorRules, -)) - -func factorRules() Rules { - return Rules{ - "root": { - {`#!.*$`, CommentPreproc, nil}, - Default(Push("base")), - }, - "base": { - {`\s+`, Text, nil}, - {`((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`(M:[:]?)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameFunction), nil}, - {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, - {`(GENERIC:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameFunction), nil}, - {`\(\s`, NameFunction, Push("stackeffect")}, - {`;\s`, Keyword, nil}, - {`(USING:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("vocabs")}, - {`(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, - {`(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text, NameNamespace), nil}, - {`(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text), Push("words")}, - {`(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameNamespace, Text, NameFunction), nil}, - {`(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction), nil}, - {`(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction), nil}, - {`(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), Push("slots")}, - {`(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), Push("slots")}, - {`(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, - {`(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, - {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, - {`(INSTANCE:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, - {`(SLOT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`(SINGLETON:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, - {`SINGLETONS:`, Keyword, Push("classes")}, - {`(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, - {`SYMBOLS:\s`, Keyword, Push("words")}, - {`SYNTAX:\s`, Keyword, nil}, - {`ALIEN:\s`, Keyword, nil}, - {`(STRUCT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, - {`(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text), nil}, - {`(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction, Text), nil}, - {`(?:)\s`, KeywordNamespace, nil}, - {`"""\s+(?:.|\n)*?\s+"""`, LiteralString, nil}, - {`"(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`\S+"\s+(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s`, LiteralStringChar, nil}, - {`!\s+.*$`, Comment, nil}, - {`#!\s+.*$`, Comment, nil}, - {`/\*\s+(?:.|\n)*?\s\*/\s`, Comment, nil}, - {`[tf]\s`, NameConstant, nil}, - {`[\\$]\s+\S+`, NameConstant, nil}, - {`M\\\s+\S+\s+\S+`, NameConstant, nil}, - {`[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, - {`[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, - {`0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, - {`NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, - {`0b[01]+\s`, LiteralNumberBin, nil}, - {`0o[0-7]+\s`, LiteralNumberOct, nil}, - {`(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, - {`(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, - {`(?:deprecated|final|foldable|flushable|inline|recursive)\s`, Keyword, nil}, - {Words(``, `\s`, `-rot`, `2bi`, `2bi@`, `2bi*`, `2curry`, `2dip`, `2drop`, `2dup`, `2keep`, `2nip`, `2over`, `2tri`, `2tri@`, `2tri*`, `3bi`, `3curry`, `3dip`, `3drop`, `3dup`, `3keep`, `3tri`, `4dip`, `4drop`, `4dup`, `4keep`, ``, `=`, `>boolean`, `clone`, `?`, `?execute`, `?if`, `and`, `assert`, `assert=`, `assert?`, `bi`, `bi-curry`, `bi-curry@`, `bi-curry*`, `bi@`, `bi*`, `boa`, `boolean`, `boolean?`, `both?`, `build`, `call`, `callstack`, `callstack>array`, `callstack?`, `clear`, `(clone)`, `compose`, `compose?`, `curry`, `curry?`, `datastack`, `die`, `dip`, `do`, `drop`, `dup`, `dupd`, `either?`, `eq?`, `equal?`, `execute`, `hashcode`, `hashcode*`, `identity-hashcode`, `identity-tuple`, `identity-tuple?`, `if`, `if*`, `keep`, `loop`, `most`, `new`, `nip`, `not`, `null`, `object`, `or`, `over`, `pick`, `prepose`, `retainstack`, `rot`, `same?`, `swap`, `swapd`, `throw`, `tri`, `tri-curry`, `tri-curry@`, `tri-curry*`, `tri@`, `tri*`, `tuple`, `tuple?`, `unless`, `unless*`, `until`, `when`, `when*`, `while`, `with`, `wrapper`, `wrapper?`, `xor`), NameBuiltin, nil}, - {Words(``, `\s`, `2cache`, ``, `>alist`, `?at`, `?of`, `assoc`, `assoc-all?`, `assoc-any?`, `assoc-clone-like`, `assoc-combine`, `assoc-diff`, `assoc-diff!`, `assoc-differ`, `assoc-each`, `assoc-empty?`, `assoc-filter`, `assoc-filter!`, `assoc-filter-as`, `assoc-find`, `assoc-hashcode`, `assoc-intersect`, `assoc-like`, `assoc-map`, `assoc-map-as`, `assoc-partition`, `assoc-refine`, `assoc-size`, `assoc-stack`, `assoc-subset?`, `assoc-union`, `assoc-union!`, `assoc=`, `assoc>map`, `assoc?`, `at`, `at+`, `at*`, `cache`, `change-at`, `clear-assoc`, `delete-at`, `delete-at*`, `enum`, `enum?`, `extract-keys`, `inc-at`, `key?`, `keys`, `map>assoc`, `maybe-set-at`, `new-assoc`, `of`, `push-at`, `rename-at`, `set-at`, `sift-keys`, `sift-values`, `substitute`, `unzip`, `value-at`, `value-at*`, `value?`, `values`, `zip`), NameBuiltin, nil}, - {Words(``, `\s`, `2cleave`, `2cleave>quot`, `3cleave`, `3cleave>quot`, `4cleave`, `4cleave>quot`, `alist>quot`, `call-effect`, `case`, `case-find`, `case>quot`, `cleave`, `cleave>quot`, `cond`, `cond>quot`, `deep-spread>quot`, `execute-effect`, `linear-case-quot`, `no-case`, `no-case?`, `no-cond`, `no-cond?`, `recursive-hashcode`, `shallow-spread>quot`, `spread`, `to-fixed-point`, `wrong-values`, `wrong-values?`), NameBuiltin, nil}, - {Words(``, `\s`, `-`, `/`, `/f`, `/i`, `/mod`, `2/`, `2^`, `<`, `<=`, ``, `>`, `>=`, `>bignum`, `>fixnum`, `>float`, `>integer`, `(all-integers?)`, `(each-integer)`, `(find-integer)`, `*`, `+`, `?1+`, `abs`, `align`, `all-integers?`, `bignum`, `bignum?`, `bit?`, `bitand`, `bitnot`, `bitor`, `bits>double`, `bits>float`, `bitxor`, `complex`, `complex?`, `denominator`, `double>bits`, `each-integer`, `even?`, `find-integer`, `find-last-integer`, `fixnum`, `fixnum?`, `float`, `float>bits`, `float?`, `fp-bitwise=`, `fp-infinity?`, `fp-nan-payload`, `fp-nan?`, `fp-qnan?`, `fp-sign`, `fp-snan?`, `fp-special?`, `if-zero`, `imaginary-part`, `integer`, `integer>fixnum`, `integer>fixnum-strict`, `integer?`, `log2`, `log2-expects-positive`, `log2-expects-positive?`, `mod`, `neg`, `neg?`, `next-float`, `next-power-of-2`, `number`, `number=`, `number?`, `numerator`, `odd?`, `out-of-fixnum-range`, `out-of-fixnum-range?`, `power-of-2?`, `prev-float`, `ratio`, `ratio?`, `rational`, `rational?`, `real`, `real-part`, `real?`, `recip`, `rem`, `sgn`, `shift`, `sq`, `times`, `u<`, `u<=`, `u>`, `u>=`, `unless-zero`, `unordered?`, `when-zero`, `zero?`), NameBuiltin, nil}, - {Words(``, `\s`, `1sequence`, `2all?`, `2each`, `2map`, `2map-as`, `2map-reduce`, `2reduce`, `2selector`, `2sequence`, `3append`, `3append-as`, `3each`, `3map`, `3map-as`, `3sequence`, `4sequence`, ``, ``, ``, `?first`, `?last`, `?nth`, `?second`, `?set-nth`, `accumulate`, `accumulate!`, `accumulate-as`, `all?`, `any?`, `append`, `append!`, `append-as`, `assert-sequence`, `assert-sequence=`, `assert-sequence?`, `binary-reduce`, `bounds-check`, `bounds-check?`, `bounds-error`, `bounds-error?`, `but-last`, `but-last-slice`, `cartesian-each`, `cartesian-map`, `cartesian-product`, `change-nth`, `check-slice`, `check-slice-error`, `clone-like`, `collapse-slice`, `collector`, `collector-for`, `concat`, `concat-as`, `copy`, `count`, `cut`, `cut-slice`, `cut*`, `delete-all`, `delete-slice`, `drop-prefix`, `each`, `each-from`, `each-index`, `empty?`, `exchange`, `filter`, `filter!`, `filter-as`, `find`, `find-from`, `find-index`, `find-index-from`, `find-last`, `find-last-from`, `first`, `first2`, `first3`, `first4`, `flip`, `follow`, `fourth`, `glue`, `halves`, `harvest`, `head`, `head-slice`, `head-slice*`, `head*`, `head?`, `if-empty`, `immutable`, `immutable-sequence`, `immutable-sequence?`, `immutable?`, `index`, `index-from`, `indices`, `infimum`, `infimum-by`, `insert-nth`, `interleave`, `iota`, `iota-tuple`, `iota-tuple?`, `join`, `join-as`, `last`, `last-index`, `last-index-from`, `length`, `lengthen`, `like`, `longer`, `longer?`, `longest`, `map`, `map!`, `map-as`, `map-find`, `map-find-last`, `map-index`, `map-integers`, `map-reduce`, `map-sum`, `max-length`, `member-eq?`, `member?`, `midpoint@`, `min-length`, `mismatch`, `move`, `new-like`, `new-resizable`, `new-sequence`, `non-negative-integer-expected`, `non-negative-integer-expected?`, `nth`, `nths`, `pad-head`, `pad-tail`, `padding`, `partition`, `pop`, `pop*`, `prefix`, `prepend`, `prepend-as`, `produce`, `produce-as`, `product`, `push`, `push-all`, `push-either`, `push-if`, `reduce`, `reduce-index`, `remove`, `remove!`, `remove-eq`, `remove-eq!`, `remove-nth`, `remove-nth!`, `repetition`, `repetition?`, `replace-slice`, `replicate`, `replicate-as`, `rest`, `rest-slice`, `reverse`, `reverse!`, `reversed`, `reversed?`, `second`, `selector`, `selector-for`, `sequence`, `sequence-hashcode`, `sequence=`, `sequence?`, `set-first`, `set-fourth`, `set-last`, `set-length`, `set-nth`, `set-second`, `set-third`, `short`, `shorten`, `shorter`, `shorter?`, `shortest`, `sift`, `slice`, `slice-error`, `slice-error?`, `slice?`, `snip`, `snip-slice`, `start`, `start*`, `subseq`, `subseq?`, `suffix`, `suffix!`, `sum`, `sum-lengths`, `supremum`, `supremum-by`, `surround`, `tail`, `tail-slice`, `tail-slice*`, `tail*`, `tail?`, `third`, `trim`, `trim-head`, `trim-head-slice`, `trim-slice`, `trim-tail`, `trim-tail-slice`, `unclip`, `unclip-last`, `unclip-last-slice`, `unclip-slice`, `unless-empty`, `virtual-exemplar`, `virtual-sequence`, `virtual-sequence?`, `virtual@`, `when-empty`), NameBuiltin, nil}, - {Words(``, `\s`, `+@`, `change`, `change-global`, `counter`, `dec`, `get`, `get-global`, `global`, `inc`, `init-namespaces`, `initialize`, `is-global`, `make-assoc`, `namespace`, `namestack`, `off`, `on`, `set`, `set-global`, `set-namestack`, `toggle`, `with-global`, `with-scope`, `with-variable`, `with-variables`), NameBuiltin, nil}, - {Words(``, `\s`, `1array`, `2array`, `3array`, `4array`, ``, `>array`, `array`, `array?`, `pair`, `pair?`, `resize-array`), NameBuiltin, nil}, - {Words(``, `\s`, `(each-stream-block-slice)`, `(each-stream-block)`, `(stream-contents-by-block)`, `(stream-contents-by-element)`, `(stream-contents-by-length-or-block)`, `(stream-contents-by-length)`, `+byte+`, `+character+`, `bad-seek-type`, `bad-seek-type?`, `bl`, `contents`, `each-block`, `each-block-size`, `each-block-slice`, `each-line`, `each-morsel`, `each-stream-block`, `each-stream-block-slice`, `each-stream-line`, `error-stream`, `flush`, `input-stream`, `input-stream?`, `invalid-read-buffer`, `invalid-read-buffer?`, `lines`, `nl`, `output-stream`, `output-stream?`, `print`, `read`, `read-into`, `read-partial`, `read-partial-into`, `read-until`, `read1`, `readln`, `seek-absolute`, `seek-absolute?`, `seek-end`, `seek-end?`, `seek-input`, `seek-output`, `seek-relative`, `seek-relative?`, `stream-bl`, `stream-contents`, `stream-contents*`, `stream-copy`, `stream-copy*`, `stream-element-type`, `stream-flush`, `stream-length`, `stream-lines`, `stream-nl`, `stream-print`, `stream-read`, `stream-read-into`, `stream-read-partial`, `stream-read-partial-into`, `stream-read-partial-unsafe`, `stream-read-unsafe`, `stream-read-until`, `stream-read1`, `stream-readln`, `stream-seek`, `stream-seekable?`, `stream-tell`, `stream-write`, `stream-write1`, `tell-input`, `tell-output`, `with-error-stream`, `with-error-stream*`, `with-error>output`, `with-input-output+error-streams`, `with-input-output+error-streams*`, `with-input-stream`, `with-input-stream*`, `with-output-stream`, `with-output-stream*`, `with-output>error`, `with-output+error-stream`, `with-output+error-stream*`, `with-streams`, `with-streams*`, `write`, `write1`), NameBuiltin, nil}, - {Words(``, `\s`, `1string`, ``, `>string`, `resize-string`, `string`, `string?`), NameBuiltin, nil}, - {Words(``, `\s`, `1vector`, ``, `>vector`, `?push`, `vector`, `vector?`), NameBuiltin, nil}, - {Words(``, `\s`, ``, ``, ``, `attempt-all`, `attempt-all-error`, `attempt-all-error?`, `callback-error-hook`, `callcc0`, `callcc1`, `cleanup`, `compute-restarts`, `condition`, `condition?`, `continuation`, `continuation?`, `continue`, `continue-restart`, `continue-with`, `current-continuation`, `error`, `error-continuation`, `error-in-thread`, `error-thread`, `ifcc`, `ignore-errors`, `in-callback?`, `original-error`, `recover`, `restart`, `restart?`, `restarts`, `rethrow`, `rethrow-restarts`, `return`, `return-continuation`, `thread-error-hook`, `throw-continue`, `throw-restarts`, `with-datastack`, `with-return`), NameBuiltin, nil}, - {`\S+`, Text, nil}, - }, - "stackeffect": { - {`\s+`, Text, nil}, - {`\(\s+`, NameFunction, Push("stackeffect")}, - {`\)\s`, NameFunction, Pop(1)}, - {`--\s`, NameFunction, nil}, - {`\S+`, NameVariable, nil}, - }, - "slots": { - {`\s+`, Text, nil}, - {`;\s`, Keyword, Pop(1)}, - {`(\{\s+)(\S+)(\s+[^}]+\s+\}\s)`, ByGroups(Text, NameVariable, Text), nil}, - {`\S+`, NameVariable, nil}, - }, - "vocabs": { - {`\s+`, Text, nil}, - {`;\s`, Keyword, Pop(1)}, - {`\S+`, NameNamespace, nil}, - }, - "classes": { - {`\s+`, Text, nil}, - {`;\s`, Keyword, Pop(1)}, - {`\S+`, NameClass, nil}, - }, - "words": { - {`\s+`, Text, nil}, - {`;\s`, Keyword, Pop(1)}, - {`\S+`, NameFunction, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go b/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go deleted file mode 100644 index 23134d4ebb2f..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go +++ /dev/null @@ -1,66 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Fennel lexer. -var Fennel = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Fennel", - Aliases: []string{"fennel", "fnl"}, - Filenames: []string{"*.fennel"}, - MimeTypes: []string{"text/x-fennel", "application/x-fennel"}, - }, - fennelRules, -)) - -// Here's some Fennel code used to generate the lists of keywords: -// (local fennel (require :fennel)) -// -// (fn member? [t x] (each [_ y (ipairs t)] (when (= y x) (lua "return true")))) -// -// (local declarations [:fn :lambda :λ :local :var :global :macro :macros]) -// (local keywords []) -// (local globals []) -// -// (each [name data (pairs (fennel.syntax))] -// (if (member? declarations name) nil ; already populated -// data.special? (table.insert keywords name) -// data.macro? (table.insert keywords name) -// data.global? (table.insert globals name))) -// -// (fn quoted [tbl] -// (table.sort tbl) -// (table.concat (icollect [_ k (ipairs tbl)] -// (string.format "`%s`" k)) ", ")) -// -// (print :Keyword (quoted keywords)) -// (print :KeywordDeclaration (quoted declarations)) -// (print :NameBuiltin (quoted globals)) - -func fennelRules() Rules { - return Rules{ - "root": { - {`;.*$`, CommentSingle, nil}, - {`\s+`, Whitespace, nil}, - {`-?\d+\.\d+`, LiteralNumberFloat, nil}, - {`-?\d+`, LiteralNumberInteger, nil}, - {`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {`\\(.|[a-z]+)`, LiteralStringChar, nil}, - {`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {"~@|[`\\'#^~&@]", Operator, nil}, - {Words(``, ` `, `#`, `%`, `*`, `+`, `-`, `->`, `->>`, `-?>`, `-?>>`, `.`, `..`, `/`, `//`, `:`, `<`, `<=`, `=`, `>`, `>=`, `?.`, `^`, `accumulate`, `and`, `band`, `bnot`, `bor`, `bxor`, `collect`, `comment`, `do`, `doc`, `doto`, `each`, `eval-compiler`, `for`, `hashfn`, `icollect`, `if`, `import-macros`, `include`, `length`, `let`, `lshift`, `lua`, `macrodebug`, `match`, `not`, `not=`, `or`, `partial`, `pick-args`, `pick-values`, `quote`, `require-macros`, `rshift`, `set`, `set-forcibly!`, `tset`, `values`, `when`, `while`, `with-open`, `~=`), Keyword, nil}, - {Words(``, ` `, `fn`, `global`, `lambda`, `local`, `macro`, `macros`, `var`, `λ`), KeywordDeclaration, nil}, - {Words(``, ` `, `_G`, `arg`, `assert`, `bit32`, `bit32.arshift`, `bit32.band`, `bit32.bnot`, `bit32.bor`, `bit32.btest`, `bit32.bxor`, `bit32.extract`, `bit32.lrotate`, `bit32.lshift`, `bit32.replace`, `bit32.rrotate`, `bit32.rshift`, `collectgarbage`, `coroutine`, `coroutine.create`, `coroutine.resume`, `coroutine.running`, `coroutine.status`, `coroutine.wrap`, `coroutine.yield`, `debug`, `debug.debug`, `debug.gethook`, `debug.getinfo`, `debug.getlocal`, `debug.getmetatable`, `debug.getregistry`, `debug.getupvalue`, `debug.getuservalue`, `debug.sethook`, `debug.setlocal`, `debug.setmetatable`, `debug.setupvalue`, `debug.setuservalue`, `debug.traceback`, `debug.upvalueid`, `debug.upvaluejoin`, `dofile`, `error`, `getmetatable`, `io`, `io.close`, `io.flush`, `io.input`, `io.lines`, `io.open`, `io.output`, `io.popen`, `io.read`, `io.tmpfile`, `io.type`, `io.write`, `ipairs`, `load`, `loadfile`, `loadstring`, `math`, `math.abs`, `math.acos`, `math.asin`, `math.atan`, `math.atan2`, `math.ceil`, `math.cos`, `math.cosh`, `math.deg`, `math.exp`, `math.floor`, `math.fmod`, `math.frexp`, `math.ldexp`, `math.log`, `math.log10`, `math.max`, `math.min`, `math.modf`, `math.pow`, `math.rad`, `math.random`, `math.randomseed`, `math.sin`, `math.sinh`, `math.sqrt`, `math.tan`, `math.tanh`, `module`, `next`, `os`, `os.clock`, `os.date`, `os.difftime`, `os.execute`, `os.exit`, `os.getenv`, `os.remove`, `os.rename`, `os.setlocale`, `os.time`, `os.tmpname`, `package`, `package.loadlib`, `package.searchpath`, `package.seeall`, `pairs`, `pcall`, `print`, `rawequal`, `rawget`, `rawlen`, `rawset`, `require`, `select`, `setmetatable`, `string`, `string.byte`, `string.char`, `string.dump`, `string.find`, `string.format`, `string.gmatch`, `string.gsub`, `string.len`, `string.lower`, `string.match`, `string.rep`, `string.reverse`, `string.sub`, `string.upper`, `table`, `table.concat`, `table.insert`, `table.maxn`, `table.pack`, `table.remove`, `table.sort`, `table.unpack`, `tonumber`, `tostring`, `type`, `unpack`, `xpcall`), NameBuiltin, nil}, - {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, - {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, - {`(\[|\])`, Punctuation, nil}, - {`(\{|\})`, Punctuation, nil}, - {`(\(|\))`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fish.go b/vendor/github.com/alecthomas/chroma/lexers/f/fish.go deleted file mode 100644 index 29d5028d5dc5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/fish.go +++ /dev/null @@ -1,98 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Fish lexer. -var Fish = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Fish", - Aliases: []string{"fish", "fishshell"}, - Filenames: []string{"*.fish", "*.load"}, - MimeTypes: []string{"application/x-fish"}, - }, - fishRules, -)) - -func fishRules() Rules { - keywords := []string{ - `begin`, `end`, `if`, `else`, `while`, `break`, `for`, `return`, `function`, `block`, - `case`, `continue`, `switch`, `not`, `and`, `or`, `set`, `echo`, `exit`, `pwd`, `true`, - `false`, `cd`, `cdh`, `count`, `test`, - } - keywordsPattern := Words(`\b`, `\b`, keywords...) - - builtins := []string{ - `alias`, `bg`, `bind`, `breakpoint`, `builtin`, `argparse`, `abbr`, `string`, `command`, - `commandline`, `complete`, `contains`, `dirh`, `dirs`, `disown`, `emit`, `eval`, `exec`, - `fg`, `fish`, `fish_add_path`, `fish_breakpoint_prompt`, `fish_command_not_found`, - `fish_config`, `fish_git_prompt`, `fish_greeting`, `fish_hg_prompt`, `fish_indent`, - `fish_is_root_user`, `fish_key_reader`, `fish_mode_prompt`, `fish_opt`, `fish_pager`, - `fish_prompt`, `fish_right_prompt`, `fish_status_to_signal`, `fish_svn_prompt`, - `fish_title`, `fish_update_completions`, `fish_vcs_prompt`, `fishd`, `funced`, - `funcsave`, `functions`, `help`, `history`, `isatty`, `jobs`, `math`, `mimedb`, `nextd`, - `open`, `prompt_pwd`, `realpath`, `popd`, `prevd`, `psub`, `pushd`, `random`, `read`, - `set_color`, `source`, `status`, `suspend`, `trap`, `type`, `ulimit`, `umask`, `vared`, - `fc`, `getopts`, `hash`, `kill`, `printf`, `time`, `wait`, - } - - return Rules{ - "root": { - Include("basic"), - Include("interp"), - Include("data"), - }, - "interp": { - {`\$\(\(`, Keyword, Push("math")}, - {`\(`, Keyword, Push("paren")}, - {`\$#?(\w+|.)`, NameVariable, nil}, - }, - "basic": { - {Words(`(?<=(?:^|\A|;|&&|\|\||\||`+keywordsPattern+`)\s*)`, `(?=;?\b)`, keywords...), Keyword, nil}, - {`(?<=for\s+\S+\s+)in\b`, Keyword, nil}, - {Words(`\b`, `\s*\b(?!\.)`, builtins...), NameBuiltin, nil}, - {`#!.*\n`, CommentHashbang, nil}, - {`#.*\n`, Comment, nil}, - {`\\[\w\W]`, LiteralStringEscape, nil}, - {`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil}, - {`[\[\]()={}]`, Operator, nil}, - {`(?<=\[[^\]]+)\.\.|-(?=[^\[]+\])`, Operator, nil}, - {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, - {`(?<=set\s+(?:--?[^\d\W][\w-]*\s+)?)\w+`, NameVariable, nil}, - {`(?<=for\s+)\w[\w-]*(?=\s+in)`, NameVariable, nil}, - {`(?<=function\s+)\w(?:[^\n])*?(?= *[-\n])`, NameFunction, nil}, - {`(?<=(?:^|\b(?:and|or|sudo)\b|;|\|\||&&|\||\(|(?:\b\w+\s*=\S+\s)) *)\w[\w-]*`, NameFunction, nil}, - }, - "data": { - {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, - {`(?s)'.*?'`, LiteralStringSingle, nil}, - {`;`, Punctuation, nil}, - {`&&|\|\||&|\||\^|<|>`, Operator, nil}, - {`\s+`, Text, nil}, - {`\b\d+\b`, LiteralNumber, nil}, - {`(?<=\s+)--?[^\d][\w-]*`, NameAttribute, nil}, - {".+?", Text, nil}, - }, - "string": { - {`"`, LiteralStringDouble, Pop(1)}, - {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, - Include("interp"), - }, - "paren": { - {`\)`, Keyword, Pop(1)}, - Include("root"), - }, - "math": { - {`\)\)`, Keyword, Pop(1)}, - {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, - {`\d+#\d+`, LiteralNumber, nil}, - {`\d+#(?! )`, LiteralNumber, nil}, - {`\d+`, LiteralNumber, nil}, - Include("root"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/forth.go b/vendor/github.com/alecthomas/chroma/lexers/f/forth.go deleted file mode 100644 index 8d66708dccf4..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/forth.go +++ /dev/null @@ -1,44 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Forth lexer. -var Forth = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Forth", - Aliases: []string{"forth"}, - Filenames: []string{"*.frt", "*.fth", "*.fs"}, - MimeTypes: []string{"application/x-forth"}, - CaseInsensitive: true, - }, - forthRules, -)) - -func forthRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`\\.*?\n`, CommentSingle, nil}, - {`\([\s].*?\)`, CommentSingle, nil}, - {`(:|variable|constant|value|buffer:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, - {`([.sc]")(\s+?)`, ByGroups(LiteralString, Text), Push("stringdef")}, - {`(blk|block|buffer|evaluate|flush|load|save-buffers|update|empty-buffers|list|refill|scr|thru|\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|2\*|2\/|2@|2drop|2dup|2over|2swap|>body|>in|>number|>r|\?dup|abort|abort\"|abs|accept|align|aligned|allot|and|base|begin|bl|c!|c,|c@|cell\+|cells|char|char\+|chars|constant|count|cr|create|decimal|depth|do|does>|drop|dup|else|emit|environment\?|evaluate|execute|exit|fill|find|fm\/mod|here|hold|i|if|immediate|invert|j|key|leave|literal|loop|lshift|m\*|max|min|mod|move|negate|or|over|postpone|quit|r>|r@|recurse|repeat|rot|rshift|s\"|s>d|sign|sm\/rem|source|space|spaces|state|swap|then|type|u\.|u\<|um\*|um\/mod|unloop|until|variable|while|word|xor|\[char\]|\[\'\]|@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|\.r|0<>|0>|2>r|2r>|2r@|:noname|\?do|again|c\"|case|compile,|endcase|endof|erase|false|hex|marker|nip|of|pad|parse|pick|refill|restore-input|roll|save-input|source-id|to|true|tuck|u\.r|u>|unused|value|within|\[compile\]|\#tib|convert|expect|query|span|tib|2constant|2literal|2variable|d\+|d-|d\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|dabs|dmax|dmin|dnegate|m\*\/|m\+|2rot|du<|catch|throw|abort|abort\"|at-xy|key\?|page|ekey|ekey>char|ekey\?|emit\?|ms|time&date|BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|>float|d>f|f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|falign|faligned|fconstant|fdepth|fdrop|fdup|fliteral|float\+|floats|floor|fmax|fmin|fnegate|fover|frot|fround|fswap|fvariable|represent|df!|df@|dfalign|dfaligned|dfloat\+|dfloats|f\*\*|f\.|fabs|facos|facosh|falog|fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|sfloats|\(local\)|to|locals\||allocate|free|resize|definitions|find|forth-wordlist|get-current|get-order|search-wordlist|set-current|set-order|wordlist|also|forth|only|order|previous|-trailing|\/string|blank|cmove|cmove>|compare|search|sliteral|.s|dump|see|words|;code|ahead|assembler|bye|code|cs-pick|cs-roll|editor|state|\[else\]|\[if\]|\[then\]|forget|defer|defer@|defer!|action-of|begin-structure|field:|buffer:|parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|name>interpret|name>compile|name>string|cfield:|end-structure)\s`, Keyword, nil}, - {`(\$[0-9A-F]+)`, LiteralNumberHex, nil}, - {`(\#|%|&|\-|\+)?[0-9]+`, LiteralNumberInteger, nil}, - {`(\#|%|&|\-|\+)?[0-9.]+`, KeywordType, nil}, - {`(@i|!i|@e|!e|pause|noop|turnkey|sleep|itype|icompare|sp@|sp!|rp@|rp!|up@|up!|>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|find-name|1ms|sp0|rp0|\(evaluate\)|int-trap|int!)\s`, NameConstant, nil}, - {`(do-recognizer|r:fail|recognizer:|get-recognizers|set-recognizers|r:float|r>comp|r>int|r>post|r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|rec:num|rec:float|rec:word)\s`, NameDecorator, nil}, - {`(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, - {`[^\s]+(?=[\s])`, NameFunction, nil}, - }, - "worddef": { - {`\S+`, NameClass, Pop(1)}, - }, - "stringdef": { - {`[^"]+`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go b/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go deleted file mode 100644 index af4a969ceec8..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go +++ /dev/null @@ -1,51 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Fortran lexer. -var Fortran = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Fortran", - Aliases: []string{"fortran"}, - Filenames: []string{"*.f03", "*.f90", "*.F03", "*.F90"}, - MimeTypes: []string{"text/x-fortran"}, - CaseInsensitive: true, - }, - fortranRules, -)) - -func fortranRules() Rules { - return Rules{ - "root": { - {`^#.*\n`, CommentPreproc, nil}, - {`!.*\n`, Comment, nil}, - Include("strings"), - Include("core"), - {`[a-z][\w$]*`, Name, nil}, - Include("nums"), - {`[\s]+`, Text, nil}, - }, - "core": { - {Words(`\b`, `\s*\b`, `ABSTRACT`, `ACCEPT`, `ALL`, `ALLSTOP`, `ALLOCATABLE`, `ALLOCATE`, `ARRAY`, `ASSIGN`, `ASSOCIATE`, `ASYNCHRONOUS`, `BACKSPACE`, `BIND`, `BLOCK`, `BLOCKDATA`, `BYTE`, `CALL`, `CASE`, `CLASS`, `CLOSE`, `CODIMENSION`, `COMMON`, `CONCURRRENT`, `CONTIGUOUS`, `CONTAINS`, `CONTINUE`, `CRITICAL`, `CYCLE`, `DATA`, `DEALLOCATE`, `DECODE`, `DEFERRED`, `DIMENSION`, `DO`, `ELEMENTAL`, `ELSE`, `ENCODE`, `END`, `ENTRY`, `ENUM`, `ENUMERATOR`, `EQUIVALENCE`, `EXIT`, `EXTENDS`, `EXTERNAL`, `EXTRINSIC`, `FILE`, `FINAL`, `FORALL`, `FORMAT`, `FUNCTION`, `GENERIC`, `GOTO`, `IF`, `IMAGES`, `IMPLICIT`, `IMPORT`, `IMPURE`, `INCLUDE`, `INQUIRE`, `INTENT`, `INTERFACE`, `INTRINSIC`, `IS`, `LOCK`, `MEMORY`, `MODULE`, `NAMELIST`, `NULLIFY`, `NONE`, `NON_INTRINSIC`, `NON_OVERRIDABLE`, `NOPASS`, `OPEN`, `OPTIONAL`, `OPTIONS`, `PARAMETER`, `PASS`, `PAUSE`, `POINTER`, `PRINT`, `PRIVATE`, `PROGRAM`, `PROCEDURE`, `PROTECTED`, `PUBLIC`, `PURE`, `READ`, `RECURSIVE`, `RESULT`, `RETURN`, `REWIND`, `SAVE`, `SELECT`, `SEQUENCE`, `STOP`, `SUBMODULE`, `SUBROUTINE`, `SYNC`, `SYNCALL`, `SYNCIMAGES`, `SYNCMEMORY`, `TARGET`, `THEN`, `TYPE`, `UNLOCK`, `USE`, `VALUE`, `VOLATILE`, `WHERE`, `WRITE`, `WHILE`), Keyword, nil}, - {Words(`\b`, `\s*\b`, `CHARACTER`, `COMPLEX`, `DOUBLE PRECISION`, `DOUBLE COMPLEX`, `INTEGER`, `LOGICAL`, `REAL`, `C_INT`, `C_SHORT`, `C_LONG`, `C_LONG_LONG`, `C_SIGNED_CHAR`, `C_SIZE_T`, `C_INT8_T`, `C_INT16_T`, `C_INT32_T`, `C_INT64_T`, `C_INT_LEAST8_T`, `C_INT_LEAST16_T`, `C_INT_LEAST32_T`, `C_INT_LEAST64_T`, `C_INT_FAST8_T`, `C_INT_FAST16_T`, `C_INT_FAST32_T`, `C_INT_FAST64_T`, `C_INTMAX_T`, `C_INTPTR_T`, `C_FLOAT`, `C_DOUBLE`, `C_LONG_DOUBLE`, `C_FLOAT_COMPLEX`, `C_DOUBLE_COMPLEX`, `C_LONG_DOUBLE_COMPLEX`, `C_BOOL`, `C_CHAR`, `C_PTR`, `C_FUNPTR`), KeywordType, nil}, - {`(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)`, Operator, nil}, - {`(::)`, KeywordDeclaration, nil}, - {`[()\[\],:&%;.]`, Punctuation, nil}, - {Words(`\b`, `\s*\b`, `Abort`, `Abs`, `Access`, `AChar`, `ACos`, `ACosH`, `AdjustL`, `AdjustR`, `AImag`, `AInt`, `Alarm`, `All`, `Allocated`, `ALog`, `AMax`, `AMin`, `AMod`, `And`, `ANInt`, `Any`, `ASin`, `ASinH`, `Associated`, `ATan`, `ATanH`, `Atomic_Define`, `Atomic_Ref`, `BesJ`, `BesJN`, `Bessel_J0`, `Bessel_J1`, `Bessel_JN`, `Bessel_Y0`, `Bessel_Y1`, `Bessel_YN`, `BesY`, `BesYN`, `BGE`, `BGT`, `BLE`, `BLT`, `Bit_Size`, `BTest`, `CAbs`, `CCos`, `Ceiling`, `CExp`, `Char`, `ChDir`, `ChMod`, `CLog`, `Cmplx`, `Command_Argument_Count`, `Complex`, `Conjg`, `Cos`, `CosH`, `Count`, `CPU_Time`, `CShift`, `CSin`, `CSqRt`, `CTime`, `C_Loc`, `C_Associated`, `C_Null_Ptr`, `C_Null_Funptr`, `C_F_Pointer`, `C_F_ProcPointer`, `C_Null_Char`, `C_Alert`, `C_Backspace`, `C_Form_Feed`, `C_FunLoc`, `C_Sizeof`, `C_New_Line`, `C_Carriage_Return`, `C_Horizontal_Tab`, `C_Vertical_Tab`, `DAbs`, `DACos`, `DASin`, `DATan`, `Date_and_Time`, `DbesJ`, `DbesJN`, `DbesY`, `DbesYN`, `Dble`, `DCos`, `DCosH`, `DDiM`, `DErF`, `DErFC`, `DExp`, `Digits`, `DiM`, `DInt`, `DLog`, `DMax`, `DMin`, `DMod`, `DNInt`, `Dot_Product`, `DProd`, `DSign`, `DSinH`, `DShiftL`, `DShiftR`, `DSin`, `DSqRt`, `DTanH`, `DTan`, `DTime`, `EOShift`, `Epsilon`, `ErF`, `ErFC`, `ErFC_Scaled`, `ETime`, `Execute_Command_Line`, `Exit`, `Exp`, `Exponent`, `Extends_Type_Of`, `FDate`, `FGet`, `FGetC`, `FindLoc`, `Float`, `Floor`, `Flush`, `FNum`, `FPutC`, `FPut`, `Fraction`, `FSeek`, `FStat`, `FTell`, `Gamma`, `GError`, `GetArg`, `Get_Command`, `Get_Command_Argument`, `Get_Environment_Variable`, `GetCWD`, `GetEnv`, `GetGId`, `GetLog`, `GetPId`, `GetUId`, `GMTime`, `HostNm`, `Huge`, `Hypot`, `IAbs`, `IAChar`, `IAll`, `IAnd`, `IAny`, `IArgC`, `IBClr`, `IBits`, `IBSet`, `IChar`, `IDate`, `IDiM`, `IDInt`, `IDNInt`, `IEOr`, `IErrNo`, `IFix`, `Imag`, `ImagPart`, `Image_Index`, `Index`, `Int`, `IOr`, `IParity`, `IRand`, `IsaTty`, `IShft`, `IShftC`, `ISign`, `Iso_C_Binding`, `Is_Contiguous`, `Is_Iostat_End`, `Is_Iostat_Eor`, `ITime`, `Kill`, `Kind`, `LBound`, `LCoBound`, `Len`, `Len_Trim`, `LGe`, `LGt`, `Link`, `LLe`, `LLt`, `LnBlnk`, `Loc`, `Log`, `Log_Gamma`, `Logical`, `Long`, `LShift`, `LStat`, `LTime`, `MaskL`, `MaskR`, `MatMul`, `Max`, `MaxExponent`, `MaxLoc`, `MaxVal`, `MClock`, `Merge`, `Merge_Bits`, `Move_Alloc`, `Min`, `MinExponent`, `MinLoc`, `MinVal`, `Mod`, `Modulo`, `MvBits`, `Nearest`, `New_Line`, `NInt`, `Norm2`, `Not`, `Null`, `Num_Images`, `Or`, `Pack`, `Parity`, `PError`, `Precision`, `Present`, `Product`, `Radix`, `Rand`, `Random_Number`, `Random_Seed`, `Range`, `Real`, `RealPart`, `Rename`, `Repeat`, `Reshape`, `RRSpacing`, `RShift`, `Same_Type_As`, `Scale`, `Scan`, `Second`, `Selected_Char_Kind`, `Selected_Int_Kind`, `Selected_Real_Kind`, `Set_Exponent`, `Shape`, `ShiftA`, `ShiftL`, `ShiftR`, `Short`, `Sign`, `Signal`, `SinH`, `Sin`, `Sleep`, `Sngl`, `Spacing`, `Spread`, `SqRt`, `SRand`, `Stat`, `Storage_Size`, `Sum`, `SymLnk`, `System`, `System_Clock`, `Tan`, `TanH`, `Time`, `This_Image`, `Tiny`, `TrailZ`, `Transfer`, `Transpose`, `Trim`, `TtyNam`, `UBound`, `UCoBound`, `UMask`, `Unlink`, `Unpack`, `Verify`, `XOr`, `ZAbs`, `ZCos`, `ZExp`, `ZLog`, `ZSin`, `ZSqRt`), NameBuiltin, nil}, - {`\.(true|false)\.`, NameBuiltin, nil}, - {`\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.`, OperatorWord, nil}, - }, - "strings": { - {`(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"`, LiteralStringDouble, nil}, - {`(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, - }, - "nums": { - {`\d+(?![.e])(_[a-z]\w+)?`, LiteralNumberInteger, nil}, - {`[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, - {`[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go b/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go deleted file mode 100644 index 7c646386dce1..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go +++ /dev/null @@ -1,39 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// FortranFixed lexer. -var FortranFixed = internal.Register(MustNewLazyLexer( - &Config{ - Name: "FortranFixed", - Aliases: []string{"fortranfixed"}, - Filenames: []string{"*.f", "*.F"}, - MimeTypes: []string{"text/x-fortran"}, - NotMultiline: true, - CaseInsensitive: true, - }, - func() Rules { - return Rules{ - "root": { - {`[C*].*\n`, Comment, nil}, - {`#.*\n`, CommentPreproc, nil}, - {`[\t ]*!.*\n`, Comment, nil}, - {`(.{5})`, NameLabel, Push("cont-char")}, - {`.*\n`, Using(Fortran), nil}, - }, - "cont-char": { - {` `, Text, Push("code")}, - {`0`, Comment, Push("code")}, - {`.`, GenericStrong, Push("code")}, - }, - "code": { - {`(.{66})(.*)(\n)`, ByGroups(Using(Fortran), Comment, Text), Push("root")}, - {`.*\n`, Using(Fortran), Push("root")}, - Default(Push("root")), - }, - } - }, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go b/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go deleted file mode 100644 index 44fced4d5b37..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go +++ /dev/null @@ -1,98 +0,0 @@ -package f - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Fsharp lexer. -var Fsharp = internal.Register(MustNewLazyLexer( - &Config{ - Name: "FSharp", - Aliases: []string{"fsharp"}, - Filenames: []string{"*.fs", "*.fsi"}, - MimeTypes: []string{"text/x-fsharp"}, - }, - fsharpRules, -)) - -func fsharpRules() Rules { - return Rules{ - "escape-sequence": { - {`\\[\\"\'ntbrafv]`, LiteralStringEscape, nil}, - {`\\[0-9]{3}`, LiteralStringEscape, nil}, - {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, - {`\\U[0-9a-fA-F]{8}`, LiteralStringEscape, nil}, - }, - "root": { - {`\s+`, Text, nil}, - {`\(\)|\[\]`, NameBuiltinPseudo, nil}, - {`\b(?|-|\\.\\.|\\.|::|:=|:>|:|;;|;|<-|<\\]|<|>\\]|>|\\?\\?|\\?|\\[<|\\[\\||\\[|\\]|_|`|\\{|\\|\\]|\\||\\}|~|<@@|<@|=|@>|@@>)", Operator, nil}, - {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, - {`\b(and|or|not)\b`, OperatorWord, nil}, - {`\b(sbyte|byte|char|nativeint|unativeint|float32|single|float|double|int8|uint8|int16|uint16|int32|uint32|int64|uint64|decimal|unit|bool|string|list|exn|obj|enum)\b`, KeywordType, nil}, - {`#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n`, CommentPreproc, nil}, - {`[^\W\d][\w']*`, Name, nil}, - {`\d[\d_]*[uU]?[yslLnQRZINGmM]?`, LiteralNumberInteger, nil}, - {`0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?`, LiteralNumberHex, nil}, - {`0[oO][0-7][0-7_]*[uU]?[yslLn]?`, LiteralNumberOct, nil}, - {`0[bB][01][01_]*[uU]?[yslLn]?`, LiteralNumberBin, nil}, - {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?`, LiteralNumberFloat, nil}, - {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?`, LiteralStringChar, nil}, - {`'.'`, LiteralStringChar, nil}, - {`'`, Keyword, nil}, - {`@?"`, LiteralStringDouble, Push("string")}, - {`[~?][a-z][\w\']*:`, NameVariable, nil}, - }, - "dotted": { - {`\s+`, Text, nil}, - {`\.`, Punctuation, nil}, - {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, - {`[A-Z][\w\']*`, Name, Pop(1)}, - {`[a-z_][\w\']*`, Name, Pop(1)}, - Default(Pop(1)), - }, - "comment": { - {`[^(*)@"]+`, Comment, nil}, - {`\(\*`, Comment, Push()}, - {`\*\)`, Comment, Pop(1)}, - {`@"`, LiteralString, Push("lstring")}, - {`"""`, LiteralString, Push("tqs")}, - {`"`, LiteralString, Push("string")}, - {`[(*)@]`, Comment, nil}, - }, - "string": { - {`[^\\"]+`, LiteralString, nil}, - Include("escape-sequence"), - {`\\\n`, LiteralString, nil}, - {`\n`, LiteralString, nil}, - {`"B?`, LiteralString, Pop(1)}, - }, - "lstring": { - {`[^"]+`, LiteralString, nil}, - {`\n`, LiteralString, nil}, - {`""`, LiteralString, nil}, - {`"B?`, LiteralString, Pop(1)}, - }, - "tqs": { - {`[^"]+`, LiteralString, nil}, - {`\n`, LiteralString, nil}, - {`"""B?`, LiteralString, Pop(1)}, - {`"`, LiteralString, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gas.go b/vendor/github.com/alecthomas/chroma/lexers/g/gas.go deleted file mode 100644 index 1f733f6ac17a..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/gas.go +++ /dev/null @@ -1,59 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Gas lexer. -var Gas = internal.Register(MustNewLazyLexer( - &Config{ - Name: "GAS", - Aliases: []string{"gas", "asm"}, - Filenames: []string{"*.s", "*.S"}, - MimeTypes: []string{"text/x-gas"}, - }, - gasRules, -)) - -func gasRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):`, NameLabel, nil}, - {`\.(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, Push("directive-args")}, - {`lock|rep(n?z)?|data\d+`, NameAttribute, nil}, - {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameFunction, Push("instruction-args")}, - {`[\r\n]+`, Text, nil}, - }, - "directive-args": { - {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, - {`"(\\"|[^"])*"`, LiteralString, nil}, - {`@(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, nil}, - {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, - {`[\r\n]+`, Text, Pop(1)}, - Include("punctuation"), - Include("whitespace"), - }, - "instruction-args": { - {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation), nil}, - {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))([-+])((?:0[xX][a-zA-Z0-9]+|\d+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation, LiteralNumberInteger, Punctuation), nil}, - {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, - {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, - {`%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameVariable, nil}, - {`$(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, - {`$'(.|\\')'`, LiteralStringChar, nil}, - {`[\r\n]+`, Text, Pop(1)}, - Include("punctuation"), - Include("whitespace"), - }, - "whitespace": { - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`[;#].*?\n`, Comment, nil}, - }, - "punctuation": { - {`[-*,.()\[\]!:]+`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go b/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go deleted file mode 100644 index 2b6af973fbee..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go +++ /dev/null @@ -1,128 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// GDScript lexer. -var GDScript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "GDScript", - Aliases: []string{"gdscript", "gd"}, - Filenames: []string{"*.gd"}, - MimeTypes: []string{"text/x-gdscript", "application/x-gdscript"}, - }, - gdscriptRules, -)) - -func gdscriptRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`[^\S\n]+`, Text, nil}, - {`#.*$`, CommentSingle, nil}, - {`[]{}:(),;[]`, Punctuation, nil}, - {`\\\n`, Text, nil}, - {`\\`, Text, nil}, - {`(in|and|or|not)\b`, OperatorWord, nil}, - {`!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]`, Operator, nil}, - Include("keywords"), - {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, - {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, - Include("builtins"), - {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, - {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, - {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, - {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, - {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, - Include("name"), - Include("numbers"), - }, - "keywords": { - {Words(``, `\b`, - `if`, `elif`, `else`, `for`, `do`, - `while`, `switch`, `case`, `break`, `continue`, - `pass`, `return`, `class`, `extends`, `tool`, - `signal`, `func`, `static`, `const`, `enum`, - `var`, `onready`, `export`, `setget`, `breakpoint`), Keyword, nil}, - }, - "builtins": { - {Words(`(?]+>)`, NameVariable, nil}, - }, - "numbers": { - {`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralString, nil}, - }, - "string": { - Include("tableVars"), - {`(\s|.)`, LiteralString, nil}, - }, - "pyString": { - {`"""`, Keyword, Pop(1)}, - Include("string"), - }, - "stepContentRoot": { - {`$`, Keyword, Pop(1)}, - Include("stepContent"), - }, - "stepContentStack": { - {`$`, Keyword, Pop(2)}, - Include("stepContent"), - }, - "stepContent": { - {`"`, NameFunction, Push("doubleString")}, - Include("tableVars"), - Include("numbers"), - Include("comments"), - {`(\s|.)`, NameFunction, nil}, - }, - "tableContent": { - {`\s+\|\s*$`, Keyword, Pop(1)}, - Include("comments"), - {`\\\|`, LiteralString, nil}, - {`\s*\|`, Keyword, nil}, - {`"`, LiteralString, Push("doubleStringTable")}, - Include("string"), - }, - "doubleString": { - {`"`, NameFunction, Pop(1)}, - Include("string"), - }, - "doubleStringTable": { - {`"`, LiteralString, Pop(1)}, - Include("string"), - }, - "root": { - {`\n`, NameFunction, nil}, - Include("comments"), - {`"""`, Keyword, Push("pyString")}, - {`\s+\|`, Keyword, Push("tableContent")}, - {`"`, NameFunction, Push("doubleString")}, - Include("tableVars"), - Include("numbers"), - {`(\s*)(@[^@\r\n\t ]+)`, ByGroups(NameFunction, NameTag), nil}, - {stepKeywords, ByGroups(NameFunction, Keyword), Push("stepContentRoot")}, - {featureKeywords, ByGroups(Keyword, Keyword, NameFunction), Push("narrative")}, - {featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElements")}, - {examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")}, - {`(\s|.)`, NameFunction, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go b/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go deleted file mode 100644 index 14aa58db3d9e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go +++ /dev/null @@ -1,41 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// GLSL lexer. -var GLSL = internal.Register(MustNewLazyLexer( - &Config{ - Name: "GLSL", - Aliases: []string{"glsl"}, - Filenames: []string{"*.vert", "*.frag", "*.geo"}, - MimeTypes: []string{"text/x-glslsrc"}, - }, - glslRules, -)) - -func glslRules() Rules { - return Rules{ - "root": { - {`^#.*`, CommentPreproc, nil}, - {`//.*`, CommentSingle, nil}, - {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, - {`\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?`, Operator, nil}, - {`[?:]`, Operator, nil}, - {`\bdefined\b`, Operator, nil}, - {`[;{}(),\[\]]`, Punctuation, nil}, - {`[+-]?\d*\.\d+([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, - {`[+-]?\d+\.\d*([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, - {`0[xX][0-9a-fA-F]*`, LiteralNumberHex, nil}, - {`0[0-7]*`, LiteralNumberOct, nil}, - {`[1-9][0-9]*`, LiteralNumberInteger, nil}, - {Words(`\b`, `\b`, `attribute`, `const`, `uniform`, `varying`, `centroid`, `break`, `continue`, `do`, `for`, `while`, `if`, `else`, `in`, `out`, `inout`, `float`, `int`, `void`, `bool`, `true`, `false`, `invariant`, `discard`, `return`, `mat2`, `mat3mat4`, `mat2x2`, `mat3x2`, `mat4x2`, `mat2x3`, `mat3x3`, `mat4x3`, `mat2x4`, `mat3x4`, `mat4x4`, `vec2`, `vec3`, `vec4`, `ivec2`, `ivec3`, `ivec4`, `bvec2`, `bvec3`, `bvec4`, `sampler1D`, `sampler2D`, `sampler3DsamplerCube`, `sampler1DShadow`, `sampler2DShadow`, `struct`), Keyword, nil}, - {Words(`\b`, `\b`, `asm`, `class`, `union`, `enum`, `typedef`, `template`, `this`, `packed`, `goto`, `switch`, `default`, `inline`, `noinline`, `volatile`, `public`, `static`, `extern`, `external`, `interface`, `long`, `short`, `double`, `half`, `fixed`, `unsigned`, `lowp`, `mediump`, `highp`, `precision`, `input`, `output`, `hvec2`, `hvec3`, `hvec4`, `dvec2`, `dvec3`, `dvec4`, `fvec2`, `fvec3`, `fvec4`, `sampler2DRect`, `sampler3DRect`, `sampler2DRectShadow`, `sizeof`, `cast`, `namespace`, `using`), Keyword, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`\.`, Punctuation, nil}, - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go b/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go deleted file mode 100644 index a7a25094188d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go +++ /dev/null @@ -1,121 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Gnuplot lexer. -var Gnuplot = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Gnuplot", - Aliases: []string{"gnuplot"}, - Filenames: []string{"*.plot", "*.plt"}, - MimeTypes: []string{"text/x-gnuplot"}, - }, - gnuplotRules, -)) - -func gnuplotRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`bind\b|bin\b|bi\b`, Keyword, Push("bind")}, - {`exit\b|exi\b|ex\b|quit\b|qui\b|qu\b|q\b`, Keyword, Push("quit")}, - {`fit\b|fi\b|f\b`, Keyword, Push("fit")}, - {`(if)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("if")}, - {`else\b`, Keyword, nil}, - {`pause\b|paus\b|pau\b|pa\b`, Keyword, Push("pause")}, - {`plot\b|plo\b|pl\b|p\b|replot\b|replo\b|repl\b|rep\b|splot\b|splo\b|spl\b|sp\b`, Keyword, Push("plot")}, - {`save\b|sav\b|sa\b`, Keyword, Push("save")}, - {`set\b|se\b`, Keyword, Push("genericargs", "optionarg")}, - {`show\b|sho\b|sh\b|unset\b|unse\b|uns\b`, Keyword, Push("noargs", "optionarg")}, - {`lower\b|lowe\b|low\b|raise\b|rais\b|rai\b|ra\b|call\b|cal\b|ca\b|cd\b|clear\b|clea\b|cle\b|cl\b|help\b|hel\b|he\b|h\b|\?\b|history\b|histor\b|histo\b|hist\b|his\b|hi\b|load\b|loa\b|lo\b|l\b|print\b|prin\b|pri\b|pr\b|pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|system\b|syste\b|syst\b|sys\b|sy\b|update\b|updat\b|upda\b|upd\b|up\b`, Keyword, Push("genericargs")}, - {`pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|test\b`, Keyword, Push("noargs")}, - {`([a-zA-Z_]\w*)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), Push("genericargs")}, - {`([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)`, ByGroups(NameFunction, Text, Operator), Push("genericargs")}, - {`@[a-zA-Z_]\w*`, NameConstant, nil}, - {`;`, Keyword, nil}, - }, - "comment": { - {`[^\\\n]`, Comment, nil}, - {`\\\n`, Comment, nil}, - {`\\`, Comment, nil}, - Default(Pop(1)), - }, - "whitespace": { - {`#`, Comment, Push("comment")}, - {`[ \t\v\f]+`, Text, nil}, - }, - "noargs": { - Include("whitespace"), - {`;`, Punctuation, Pop(1)}, - {`\n`, Text, Pop(1)}, - }, - "dqstring": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - {`\n`, LiteralString, Pop(1)}, - }, - "sqstring": { - {`''`, LiteralString, nil}, - {`'`, LiteralString, Pop(1)}, - {`[^\\'\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - {`\n`, LiteralString, Pop(1)}, - }, - "genericargs": { - Include("noargs"), - {`"`, LiteralString, Push("dqstring")}, - {`'`, LiteralString, Push("sqstring")}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+)`, LiteralNumberFloat, nil}, - {`-?\d+`, LiteralNumberInteger, nil}, - {`[,.~!%^&*+=|?:<>/-]`, Operator, nil}, - {`[{}()\[\]]`, Punctuation, nil}, - {`(eq|ne)\b`, OperatorWord, nil}, - {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`@[a-zA-Z_]\w*`, NameConstant, nil}, - {`\\\n`, Text, nil}, - }, - "optionarg": { - Include("whitespace"), - {`all\b|al\b|a\b|angles\b|angle\b|angl\b|ang\b|an\b|arrow\b|arro\b|arr\b|ar\b|autoscale\b|autoscal\b|autosca\b|autosc\b|autos\b|auto\b|aut\b|au\b|bars\b|bar\b|ba\b|b\b|border\b|borde\b|bord\b|bor\b|boxwidth\b|boxwidt\b|boxwid\b|boxwi\b|boxw\b|box\b|clabel\b|clabe\b|clab\b|cla\b|cl\b|clip\b|cli\b|cl\b|c\b|cntrparam\b|cntrpara\b|cntrpar\b|cntrpa\b|cntrp\b|cntr\b|cnt\b|cn\b|contour\b|contou\b|conto\b|cont\b|con\b|co\b|data\b|dat\b|da\b|datafile\b|datafil\b|datafi\b|dataf\b|data\b|dgrid3d\b|dgrid3\b|dgrid\b|dgri\b|dgr\b|dg\b|dummy\b|dumm\b|dum\b|du\b|encoding\b|encodin\b|encodi\b|encod\b|enco\b|enc\b|decimalsign\b|decimalsig\b|decimalsi\b|decimals\b|decimal\b|decima\b|decim\b|deci\b|dec\b|fit\b|fontpath\b|fontpat\b|fontpa\b|fontp\b|font\b|format\b|forma\b|form\b|for\b|fo\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|grid\b|gri\b|gr\b|g\b|hidden3d\b|hidden3\b|hidden\b|hidde\b|hidd\b|hid\b|historysize\b|historysiz\b|historysi\b|historys\b|history\b|histor\b|histo\b|hist\b|his\b|isosamples\b|isosample\b|isosampl\b|isosamp\b|isosam\b|isosa\b|isos\b|iso\b|is\b|key\b|ke\b|k\b|keytitle\b|keytitl\b|keytit\b|keyti\b|keyt\b|label\b|labe\b|lab\b|la\b|linestyle\b|linestyl\b|linesty\b|linest\b|lines\b|line\b|lin\b|li\b|ls\b|loadpath\b|loadpat\b|loadpa\b|loadp\b|load\b|loa\b|locale\b|local\b|loca\b|loc\b|logscale\b|logscal\b|logsca\b|logsc\b|logs\b|log\b|macros\b|macro\b|macr\b|mac\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|mapping3d\b|mapping3\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|margin\b|margi\b|marg\b|mar\b|lmargin\b|lmargi\b|lmarg\b|lmar\b|rmargin\b|rmargi\b|rmarg\b|rmar\b|tmargin\b|tmargi\b|tmarg\b|tmar\b|bmargin\b|bmargi\b|bmarg\b|bmar\b|mouse\b|mous\b|mou\b|mo\b|multiplot\b|multiplo\b|multipl\b|multip\b|multi\b|mxtics\b|mxtic\b|mxti\b|mxt\b|nomxtics\b|nomxtic\b|nomxti\b|nomxt\b|mx2tics\b|mx2tic\b|mx2ti\b|mx2t\b|nomx2tics\b|nomx2tic\b|nomx2ti\b|nomx2t\b|mytics\b|mytic\b|myti\b|myt\b|nomytics\b|nomytic\b|nomyti\b|nomyt\b|my2tics\b|my2tic\b|my2ti\b|my2t\b|nomy2tics\b|nomy2tic\b|nomy2ti\b|nomy2t\b|mztics\b|mztic\b|mzti\b|mzt\b|nomztics\b|nomztic\b|nomzti\b|nomzt\b|mcbtics\b|mcbtic\b|mcbti\b|mcbt\b|nomcbtics\b|nomcbtic\b|nomcbti\b|nomcbt\b|offsets\b|offset\b|offse\b|offs\b|off\b|of\b|origin\b|origi\b|orig\b|ori\b|or\b|output\b|outpu\b|outp\b|out\b|ou\b|o\b|parametric\b|parametri\b|parametr\b|paramet\b|parame\b|param\b|para\b|par\b|pa\b|pm3d\b|pm3\b|pm\b|palette\b|palett\b|palet\b|pale\b|pal\b|colorbox\b|colorbo\b|colorb\b|plot\b|plo\b|pl\b|p\b|pointsize\b|pointsiz\b|pointsi\b|points\b|point\b|poin\b|poi\b|polar\b|pola\b|pol\b|print\b|prin\b|pri\b|pr\b|object\b|objec\b|obje\b|obj\b|samples\b|sample\b|sampl\b|samp\b|sam\b|sa\b|size\b|siz\b|si\b|style\b|styl\b|sty\b|st\b|surface\b|surfac\b|surfa\b|surf\b|sur\b|su\b|table\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|termoptions\b|termoption\b|termoptio\b|termopti\b|termopt\b|termop\b|termo\b|tics\b|tic\b|ti\b|ticscale\b|ticscal\b|ticsca\b|ticsc\b|ticslevel\b|ticsleve\b|ticslev\b|ticsle\b|ticsl\b|timefmt\b|timefm\b|timef\b|timestamp\b|timestam\b|timesta\b|timest\b|times\b|time\b|tim\b|title\b|titl\b|tit\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b|version\b|versio\b|versi\b|vers\b|ver\b|ve\b|view\b|vie\b|vi\b|xyplane\b|xyplan\b|xypla\b|xypl\b|xyp\b|xdata\b|xdat\b|xda\b|x2data\b|x2dat\b|x2da\b|ydata\b|ydat\b|yda\b|y2data\b|y2dat\b|y2da\b|zdata\b|zdat\b|zda\b|cbdata\b|cbdat\b|cbda\b|xlabel\b|xlabe\b|xlab\b|xla\b|xl\b|x2label\b|x2labe\b|x2lab\b|x2la\b|x2l\b|ylabel\b|ylabe\b|ylab\b|yla\b|yl\b|y2label\b|y2labe\b|y2lab\b|y2la\b|y2l\b|zlabel\b|zlabe\b|zlab\b|zla\b|zl\b|cblabel\b|cblabe\b|cblab\b|cbla\b|cbl\b|xtics\b|xtic\b|xti\b|noxtics\b|noxtic\b|noxti\b|x2tics\b|x2tic\b|x2ti\b|nox2tics\b|nox2tic\b|nox2ti\b|ytics\b|ytic\b|yti\b|noytics\b|noytic\b|noyti\b|y2tics\b|y2tic\b|y2ti\b|noy2tics\b|noy2tic\b|noy2ti\b|ztics\b|ztic\b|zti\b|noztics\b|noztic\b|nozti\b|cbtics\b|cbtic\b|cbti\b|nocbtics\b|nocbtic\b|nocbti\b|xdtics\b|xdtic\b|xdti\b|noxdtics\b|noxdtic\b|noxdti\b|x2dtics\b|x2dtic\b|x2dti\b|nox2dtics\b|nox2dtic\b|nox2dti\b|ydtics\b|ydtic\b|ydti\b|noydtics\b|noydtic\b|noydti\b|y2dtics\b|y2dtic\b|y2dti\b|noy2dtics\b|noy2dtic\b|noy2dti\b|zdtics\b|zdtic\b|zdti\b|nozdtics\b|nozdtic\b|nozdti\b|cbdtics\b|cbdtic\b|cbdti\b|nocbdtics\b|nocbdtic\b|nocbdti\b|xmtics\b|xmtic\b|xmti\b|noxmtics\b|noxmtic\b|noxmti\b|x2mtics\b|x2mtic\b|x2mti\b|nox2mtics\b|nox2mtic\b|nox2mti\b|ymtics\b|ymtic\b|ymti\b|noymtics\b|noymtic\b|noymti\b|y2mtics\b|y2mtic\b|y2mti\b|noy2mtics\b|noy2mtic\b|noy2mti\b|zmtics\b|zmtic\b|zmti\b|nozmtics\b|nozmtic\b|nozmti\b|cbmtics\b|cbmtic\b|cbmti\b|nocbmtics\b|nocbmtic\b|nocbmti\b|xrange\b|xrang\b|xran\b|xra\b|xr\b|x2range\b|x2rang\b|x2ran\b|x2ra\b|x2r\b|yrange\b|yrang\b|yran\b|yra\b|yr\b|y2range\b|y2rang\b|y2ran\b|y2ra\b|y2r\b|zrange\b|zrang\b|zran\b|zra\b|zr\b|cbrange\b|cbrang\b|cbran\b|cbra\b|cbr\b|rrange\b|rrang\b|rran\b|rra\b|rr\b|trange\b|trang\b|tran\b|tra\b|tr\b|urange\b|urang\b|uran\b|ura\b|ur\b|vrange\b|vrang\b|vran\b|vra\b|vr\b|xzeroaxis\b|xzeroaxi\b|xzeroax\b|xzeroa\b|x2zeroaxis\b|x2zeroaxi\b|x2zeroax\b|x2zeroa\b|yzeroaxis\b|yzeroaxi\b|yzeroax\b|yzeroa\b|y2zeroaxis\b|y2zeroaxi\b|y2zeroax\b|y2zeroa\b|zzeroaxis\b|zzeroaxi\b|zzeroax\b|zzeroa\b|zeroaxis\b|zeroaxi\b|zeroax\b|zeroa\b|zero\b|zer\b|ze\b|z\b`, NameBuiltin, Pop(1)}, - }, - "bind": { - {`!`, Keyword, Pop(1)}, - {`allwindows\b|allwindow\b|allwindo\b|allwind\b|allwin\b|allwi\b|allw\b|all\b`, NameBuiltin, nil}, - Include("genericargs"), - }, - "quit": { - {`gnuplot\b`, Keyword, nil}, - Include("noargs"), - }, - "fit": { - {`via\b`, NameBuiltin, nil}, - Include("plot"), - }, - "if": { - {`\)`, Punctuation, Pop(1)}, - Include("genericargs"), - }, - "pause": { - {`(mouse|any|button1|button2|button3)\b`, NameBuiltin, nil}, - {`keypress\b|keypres\b|keypre\b|keypr\b|keyp\b|key\b`, NameBuiltin, nil}, - Include("genericargs"), - }, - "plot": { - {`axes\b|axe\b|ax\b|axis\b|axi\b|binary\b|binar\b|bina\b|bin\b|every\b|ever\b|eve\b|ev\b|index\b|inde\b|ind\b|in\b|i\b|matrix\b|matri\b|matr\b|mat\b|smooth\b|smoot\b|smoo\b|smo\b|sm\b|s\b|thru\b|title\b|titl\b|tit\b|ti\b|t\b|notitle\b|notitl\b|notit\b|noti\b|not\b|using\b|usin\b|usi\b|us\b|u\b|with\b|wit\b|wi\b|w\b`, NameBuiltin, nil}, - Include("genericargs"), - }, - "save": { - {`functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b`, NameBuiltin, nil}, - Include("genericargs"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go b/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go deleted file mode 100644 index 7d465cd4c179..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go +++ /dev/null @@ -1,49 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Go lexer. -var Graphql = internal.Register(MustNewLazyLexer( - &Config{ - Name: "GraphQL", - Aliases: []string{"graphql", "graphqls", "gql"}, - Filenames: []string{"*.graphql", "*.graphqls"}, - }, - graphqlRules, -)) - -func graphqlRules() Rules { - return Rules{ - "root": { - {`(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)`, KeywordDeclaration, Push("type")}, - {`(on|extend|schema|directive|\.\.\.)`, KeywordDeclaration, nil}, - {`(QUERY|MUTATION|SUBSCRIPTION|FIELD|FRAGMENT_DEFINITION|FRAGMENT_SPREAD|INLINE_FRAGMENT|SCHEMA|SCALAR|OBJECT|FIELD_DEFINITION|ARGUMENT_DEFINITION|INTERFACE|UNION|ENUM|ENUM_VALUE|INPUT_OBJECT|INPUT_FIELD_DEFINITION)\b`, KeywordConstant, nil}, - {`[^\W\d]\w*`, NameProperty, nil}, - {`\@\w+`, NameDecorator, nil}, - {`:`, Punctuation, Push("type")}, - {`[\(\)\{\}\[\],!\|=]`, Punctuation, nil}, - {`\$\w+`, NameVariable, nil}, - {`\d+i`, LiteralNumber, nil}, - {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, - {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, - {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, - {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, - {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, - {`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, - {`"""[\x00-\x7F]*?"""`, LiteralString, nil}, - {`"(\\["\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])"`, LiteralStringChar, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`"(true|false|null)*"`, Literal, nil}, - {`[\r\n\s]+`, Whitespace, nil}, - {`#[^\r\n]*`, Comment, nil}, - }, - // Treats the next word as a class, default rules it would be a property - "type": { - {`[^\W\d]\w*`, NameClass, Pop(1)}, - Include("root"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/groff.go b/vendor/github.com/alecthomas/chroma/lexers/g/groff.go deleted file mode 100644 index 65a70934f210..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/groff.go +++ /dev/null @@ -1,47 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Groff lexer. -var Groff = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Groff", - Aliases: []string{"groff", "nroff", "man"}, - Filenames: []string{"*.[1-9]", "*.1p", "*.3pm", "*.man"}, - MimeTypes: []string{"application/x-troff", "text/troff"}, - }, - func() Rules { - return Rules{ - "root": { - {`(\.)(\w+)`, ByGroups(Text, Keyword), Push("request")}, - {`\.`, Punctuation, Push("request")}, - {`[^\\\n]+`, Text, Push("textline")}, - Default(Push("textline")), - }, - "textline": { - Include("escapes"), - {`[^\\\n]+`, Text, nil}, - {`\n`, Text, Pop(1)}, - }, - "escapes": { - {`\\"[^\n]*`, Comment, nil}, - {`\\[fn]\w`, LiteralStringEscape, nil}, - {`\\\(.{2}`, LiteralStringEscape, nil}, - {`\\.\[.*\]`, LiteralStringEscape, nil}, - {`\\.`, LiteralStringEscape, nil}, - {`\\\n`, Text, Push("request")}, - }, - "request": { - {`\n`, Text, Pop(1)}, - Include("escapes"), - {`"[^\n"]+"`, LiteralStringDouble, nil}, - {`\d+`, LiteralNumber, nil}, - {`\S+`, LiteralString, nil}, - {`\s+`, Text, nil}, - }, - } - }, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go b/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go deleted file mode 100644 index 8d37bd87ed77..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go +++ /dev/null @@ -1,62 +0,0 @@ -package g - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Groovy lexer. -var Groovy = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Groovy", - Aliases: []string{"groovy"}, - Filenames: []string{"*.groovy", "*.gradle"}, - MimeTypes: []string{"text/x-groovy"}, - DotAll: true, - }, - groovyRules, -)) - -func groovyRules() Rules { - return Rules{ - "root": { - {`#!(.*?)$`, CommentPreproc, Push("base")}, - Default(Push("base")), - }, - "base": { - {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, - {`[^\S\n]+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`@[a-zA-Z_][\w.]*`, NameDecorator, nil}, - {`(as|assert|break|case|catch|continue|default|do|else|finally|for|if|in|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b`, Keyword, nil}, - {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, - {`(def|boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, - {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`""".*?"""`, LiteralStringDouble, nil}, - {`'''.*?'''`, LiteralStringSingle, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`\$/((?!/\$).)*/\$`, LiteralString, nil}, - {`/(\\\\|\\"|[^/])*/`, LiteralString, nil}, - {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, - {`(\.)([a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, - {`[a-zA-Z_]\w*:`, NameLabel, nil}, - {`[a-zA-Z_$]\w*`, Name, nil}, - {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`[0-9]+L?`, LiteralNumberInteger, nil}, - {`\n`, Text, nil}, - }, - "class": { - {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, - }, - "import": { - {`[\w.]+\*?`, NameNamespace, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go b/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go deleted file mode 100644 index d34ef3a98fbe..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go +++ /dev/null @@ -1,60 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Handlebars lexer. -var Handlebars = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Handlebars", - Aliases: []string{"handlebars", "hbs"}, - Filenames: []string{"*.handlebars", "*.hbs"}, - MimeTypes: []string{}, - }, - handlebarsRules, -)) - -func handlebarsRules() Rules { - return Rules{ - "root": { - {`[^{]+`, Other, nil}, - {`\{\{!.*\}\}`, Comment, nil}, - {`(\{\{\{)(\s*)`, ByGroups(CommentSpecial, Text), Push("tag")}, - {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("tag")}, - }, - "tag": { - {`\s+`, Text, nil}, - {`\}\}\}`, CommentSpecial, Pop(1)}, - {`\}\}`, CommentPreproc, Pop(1)}, - {`([#/]*)(each|if|unless|else|with|log|in(?:line)?)`, ByGroups(Keyword, Keyword), nil}, - {`#\*inline`, Keyword, nil}, - {`([#/])([\w-]+)`, ByGroups(NameFunction, NameFunction), nil}, - {`([\w-]+)(=)`, ByGroups(NameAttribute, Operator), nil}, - {`(>)(\s*)(@partial-block)`, ByGroups(Keyword, Text, Keyword), nil}, - {`(#?>)(\s*)([\w-]+)`, ByGroups(Keyword, Text, NameVariable), nil}, - {`(>)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("dynamic-partial")}, - Include("generic"), - }, - "dynamic-partial": { - {`\s+`, Text, nil}, - {`\)`, Punctuation, Pop(1)}, - {`(lookup)(\s+)(\.|this)(\s+)`, ByGroups(Keyword, Text, NameVariable, Text), nil}, - {`(lookup)(\s+)(\S+)`, ByGroups(Keyword, Text, UsingSelf("variable")), nil}, - {`[\w-]+`, NameFunction, nil}, - Include("generic"), - }, - "variable": { - {`[a-zA-Z][\w-]*`, NameVariable, nil}, - {`\.[\w-]+`, NameVariable, nil}, - {`(this\/|\.\/|(\.\.\/)+)[\w-]+`, NameVariable, nil}, - }, - "generic": { - Include("variable"), - {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, - {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go b/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go deleted file mode 100644 index e34f03ef16cd..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go +++ /dev/null @@ -1,103 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Haskell lexer. -var Haskell = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Haskell", - Aliases: []string{"haskell", "hs"}, - Filenames: []string{"*.hs"}, - MimeTypes: []string{"text/x-haskell"}, - }, - haskellRules, -)) - -func haskellRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, - {`\{-`, CommentMultiline, Push("comment")}, - {`\bimport\b`, KeywordReserved, Push("import")}, - {`\bmodule\b`, KeywordReserved, Push("module")}, - {`\berror\b`, NameException, nil}, - {`\b(case|class|data|default|deriving|do|else|family|if|in|infix[lr]?|instance|let|newtype|of|then|type|where|_)(?!\')\b`, KeywordReserved, nil}, - {`'[^\\]'`, LiteralStringChar, nil}, - {`^[_\p{Ll}][\w\']*`, NameFunction, nil}, - {`'?[_\p{Ll}][\w']*`, Name, nil}, - {`('')?[\p{Lu}][\w\']*`, KeywordType, nil}, - {`(')[\p{Lu}][\w\']*`, KeywordType, nil}, - {`(')\[[^\]]*\]`, KeywordType, nil}, - {`(')\([^)]*\)`, KeywordType, nil}, - {`\\(?![:!#$%&*+.\\/<=>?@^|~-]+)`, NameFunction, nil}, - {`(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, - {`:[:!#$%&*+.\\/<=>?@^|~-]*`, KeywordType, nil}, - {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, - {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, - {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, - {`0[oO][0-7]+`, LiteralNumberOct, nil}, - {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`'`, LiteralStringChar, Push("character")}, - {`"`, LiteralString, Push("string")}, - {`\[\]`, KeywordType, nil}, - {`\(\)`, NameBuiltin, nil}, - {"[][(),;`{}]", Punctuation, nil}, - }, - "import": { - {`\s+`, Text, nil}, - {`"`, LiteralString, Push("string")}, - {`\)`, Punctuation, Pop(1)}, - {`qualified\b`, Keyword, nil}, - {`([\p{Lu}][\w.]*)(\s+)(as)(\s+)([\p{Lu}][\w.]*)`, ByGroups(NameNamespace, Text, Keyword, Text, Name), Pop(1)}, - {`([\p{Lu}][\w.]*)(\s+)(hiding)(\s+)(\()`, ByGroups(NameNamespace, Text, Keyword, Text, Punctuation), Push("funclist")}, - {`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, - {`[\w.]+`, NameNamespace, Pop(1)}, - }, - "module": { - {`\s+`, Text, nil}, - {`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, - {`[\p{Lu}][\w.]*`, NameNamespace, Pop(1)}, - }, - "funclist": { - {`\s+`, Text, nil}, - {`[\p{Lu}]\w*`, KeywordType, nil}, - {`(_[\w\']+|[\p{Ll}][\w\']*)`, NameFunction, nil}, - {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, - {`\{-`, CommentMultiline, Push("comment")}, - {`,`, Punctuation, nil}, - {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, - {`\(`, Punctuation, Push("funclist", "funclist")}, - {`\)`, Punctuation, Pop(2)}, - }, - "comment": { - {`[^-{}]+`, CommentMultiline, nil}, - {`\{-`, CommentMultiline, Push()}, - {`-\}`, CommentMultiline, Pop(1)}, - {`[-{}]`, CommentMultiline, nil}, - }, - "character": { - {`[^\\']'`, LiteralStringChar, Pop(1)}, - {`\\`, LiteralStringEscape, Push("escape")}, - {`'`, LiteralStringChar, Pop(1)}, - }, - "string": { - {`[^\\"]+`, LiteralString, nil}, - {`\\`, LiteralStringEscape, Push("escape")}, - {`"`, LiteralString, Pop(1)}, - }, - "escape": { - {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, - {`\^[][\p{Lu}@^_]`, LiteralStringEscape, Pop(1)}, - {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, - {`o[0-7]+`, LiteralStringEscape, Pop(1)}, - {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, - {`\d+`, LiteralStringEscape, Pop(1)}, - {`\s+\\`, LiteralStringEscape, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go b/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go deleted file mode 100644 index 7206fba0868e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go +++ /dev/null @@ -1,73 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// HCL lexer. -var HCL = internal.Register(MustNewLazyLexer( - &Config{ - Name: "HCL", - Aliases: []string{"hcl"}, - Filenames: []string{"*.hcl"}, - MimeTypes: []string{"application/x-hcl"}, - }, - hclRules, -)) - -func hclRules() Rules { - return Rules{ - "root": { - Include("string"), - Include("punctuation"), - Include("curly"), - Include("basic"), - Include("whitespace"), - {`[0-9]+`, LiteralNumber, nil}, - }, - "basic": { - {Words(`\b`, `\b`, `true`, `false`), KeywordType, nil}, - {`\s*/\*`, CommentMultiline, Push("comment")}, - {`\s*#.*\n`, CommentSingle, nil}, - {`(.*?)(\s*)(=)`, ByGroups(Name, Text, Operator), nil}, - {`\d+`, Number, nil}, - {`\b\w+\b`, Keyword, nil}, - {`\$\{`, LiteralStringInterpol, Push("var_builtin")}, - }, - "function": { - {`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil}, - Include("punctuation"), - Include("curly"), - }, - "var_builtin": { - {`\$\{`, LiteralStringInterpol, Push()}, - {Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil}, - Include("string"), - Include("punctuation"), - {`\s+`, Text, nil}, - {`\}`, LiteralStringInterpol, Pop(1)}, - }, - "string": { - {`(".*")`, ByGroups(LiteralStringDouble), nil}, - }, - "punctuation": { - {`[\[\](),.]`, Punctuation, nil}, - }, - "curly": { - {`\{`, TextPunctuation, nil}, - {`\}`, TextPunctuation, nil}, - }, - "comment": { - {`[^*/]`, CommentMultiline, nil}, - {`/\*`, CommentMultiline, Push()}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[*/]`, CommentMultiline, nil}, - }, - "whitespace": { - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go b/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go deleted file mode 100644 index 089353749622..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go +++ /dev/null @@ -1,71 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Hexdump lexer. -var Hexdump = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Hexdump", - Aliases: []string{"hexdump"}, - Filenames: []string{}, - MimeTypes: []string{}, - }, - hexdumpRules, -)) - -func hexdumpRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - Include("offset"), - {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, - {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, - {`(\s{2,3})(\>)(.{16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("bracket-strings")}, - {`(\s{2,3})(\|)(.{16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("piped-strings")}, - {`(\s{2,3})(\>)(.{1,15})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, - {`(\s{2,3})(\|)(.{1,15})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, - {`(\s{2,3})(.{1,15})$`, ByGroups(Text, LiteralString), nil}, - {`(\s{2,3})(.{16}|.{20})$`, ByGroups(Text, LiteralString), Push("nonpiped-strings")}, - {`\s`, Text, nil}, - {`^\*`, Punctuation, nil}, - }, - "offset": { - {`^([0-9A-Ha-h]+)(:)`, ByGroups(NameLabel, Punctuation), Push("offset-mode")}, - {`^[0-9A-Ha-h]+`, NameLabel, nil}, - }, - "offset-mode": { - {`\s`, Text, Pop(1)}, - {`[0-9A-Ha-h]+`, NameLabel, nil}, - {`:`, Punctuation, nil}, - }, - "piped-strings": { - {`\n`, Text, nil}, - Include("offset"), - {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, - {`(\s{2,3})(\|)(.{1,16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, - {`\s`, Text, nil}, - {`^\*`, Punctuation, nil}, - }, - "bracket-strings": { - {`\n`, Text, nil}, - Include("offset"), - {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, - {`(\s{2,3})(\>)(.{1,16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, - {`\s`, Text, nil}, - {`^\*`, Punctuation, nil}, - }, - "nonpiped-strings": { - {`\n`, Text, nil}, - Include("offset"), - {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, - {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, - {`(\s{19,})(.{1,20}?)$`, ByGroups(Text, LiteralString), nil}, - {`(\s{2,3})(.{1,20})$`, ByGroups(Text, LiteralString), nil}, - {`\s`, Text, nil}, - {`^\*`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go b/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go deleted file mode 100644 index 6c5f63771575..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go +++ /dev/null @@ -1,58 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// HLB lexer. -var HLB = internal.Register(MustNewLazyLexer( - &Config{ - Name: "HLB", - Aliases: []string{"hlb"}, - Filenames: []string{"*.hlb"}, - MimeTypes: []string{}, - }, - hlbRules, -)) - -func hlbRules() Rules { - return Rules{ - "root": { - {`(#.*)`, ByGroups(CommentSingle), nil}, - {`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil}, - {`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil}, - {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil}, - {`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")}, - {`(\{)`, ByGroups(Punctuation), Push("block")}, - {`(\n|\r|\r\n)`, Text, nil}, - {`.`, Text, nil}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\"`, LiteralString, nil}, - {`[^\\"]+`, LiteralString, nil}, - }, - "block": { - {`(\})`, ByGroups(Punctuation), Pop(1)}, - {`(#.*)`, ByGroups(CommentSingle), nil}, - {`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil}, - {`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil}, - {`"`, LiteralString, Push("string")}, - {`(with)`, ByGroups(KeywordReserved), nil}, - {`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil}, - {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")}, - {`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil}, - {`(\n|\r|\r\n)`, Text, nil}, - {`.`, Text, nil}, - }, - "params": { - {`(\))`, ByGroups(Punctuation), Pop(1)}, - {`(variadic)`, ByGroups(Keyword), nil}, - {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil}, - {`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil}, - {`(\n|\r|\r\n)`, Text, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/html.go b/vendor/github.com/alecthomas/chroma/lexers/h/html.go deleted file mode 100644 index b9ca1e1d6f62..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/html.go +++ /dev/null @@ -1,63 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - . "github.com/alecthomas/chroma/lexers/c" // nolint - "github.com/alecthomas/chroma/lexers/internal" - . "github.com/alecthomas/chroma/lexers/j" // nolint -) - -// HTML lexer. -var HTML = internal.Register(MustNewLazyLexer( - &Config{ - Name: "HTML", - Aliases: []string{"html"}, - Filenames: []string{"*.html", "*.htm", "*.xhtml", "*.xslt"}, - MimeTypes: []string{"text/html", "application/xhtml+xml"}, - NotMultiline: true, - DotAll: true, - CaseInsensitive: true, - }, - htmlRules, -)) - -func htmlRules() Rules { - return Rules{ - "root": { - {`[^<&]+`, Text, nil}, - {`&\S*?;`, NameEntity, nil}, - {`\<\!\[CDATA\[.*?\]\]\>`, CommentPreproc, nil}, - {``, Comment, Pop(1)}, - {`-`, Comment, nil}, - }, - "tag": { - {`\s+`, Text, nil}, - {`([\w:-]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")}, - {`[\w:-]+`, NameAttribute, nil}, - {`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)}, - }, - "script-content": { - {`(<)(\s*)(/)(\s*)(script)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, - {`.+?(?=<\s*/\s*script\s*>)`, Using(Javascript), nil}, - }, - "style-content": { - {`(<)(\s*)(/)(\s*)(style)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, - {`.+?(?=<\s*/\s*style\s*>)`, Using(CSS), nil}, - }, - "attr": { - {`".*?"`, LiteralString, Pop(1)}, - {`'.*?'`, LiteralString, Pop(1)}, - {`[^\s>]+`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hy.go b/vendor/github.com/alecthomas/chroma/lexers/h/hy.go deleted file mode 100644 index 7a078979938f..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/h/hy.go +++ /dev/null @@ -1,55 +0,0 @@ -package h - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Hy lexer. -var Hy = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Hy", - Aliases: []string{"hylang"}, - Filenames: []string{"*.hy"}, - MimeTypes: []string{"text/x-hy", "application/x-hy"}, - }, - hyRules, -)) - -func hyRules() Rules { - return Rules{ - "root": { - {`;.*$`, CommentSingle, nil}, - {`[,\s]+`, Text, nil}, - {`-?\d+\.\d+`, LiteralNumberFloat, nil}, - {`-?\d+`, LiteralNumberInteger, nil}, - {`0[0-7]+j?`, LiteralNumberOct, nil}, - {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {`\\(.|[a-z]+)`, LiteralStringChar, nil}, - {`^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, - {`^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, - {`::?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, - {"~@|[`\\'#^~&@]", Operator, nil}, - Include("py-keywords"), - Include("py-builtins"), - {Words(``, ` `, `cond`, `for`, `->`, `->>`, `car`, `cdr`, `first`, `rest`, `let`, `when`, `unless`, `import`, `do`, `progn`, `get`, `slice`, `assoc`, `with-decorator`, `,`, `list_comp`, `kwapply`, `~`, `is`, `in`, `is-not`, `not-in`, `quasiquote`, `unquote`, `unquote-splice`, `quote`, `|`, `<<=`, `>>=`, `foreach`, `while`, `eval-and-compile`, `eval-when-compile`), Keyword, nil}, - {Words(``, ` `, `def`, `defn`, `defun`, `defmacro`, `defclass`, `lambda`, `fn`, `setv`), KeywordDeclaration, nil}, - {Words(``, ` `, `cycle`, `dec`, `distinct`, `drop`, `even?`, `filter`, `inc`, `instance?`, `iterable?`, `iterate`, `iterator?`, `neg?`, `none?`, `nth`, `numeric?`, `odd?`, `pos?`, `remove`, `repeat`, `repeatedly`, `take`, `take_nth`, `take_while`, `zero?`), NameBuiltin, nil}, - {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, - {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, - {`(\[|\])`, Punctuation, nil}, - {`(\{|\})`, Punctuation, nil}, - {`(\(|\))`, Punctuation, nil}, - }, - "py-keywords": { - {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, - }, - "py-builtins": { - {Words(`(??@^|_~:\\]).*?)$`, ByGroups(Text, CommentSingle), nil}, - {`(\s*)(\|{3}.*?)$`, ByGroups(Text, CommentSingle), nil}, - {`(\s*)(\{-)`, ByGroups(Text, CommentMultiline), Push("comment")}, - {`^(\s*)([^\s(){}]+)(\s*)(:)(\s*)`, ByGroups(Text, NameFunction, Text, OperatorWord, Text), nil}, - {`\b(case|class|data|default|using|do|else|if|in|infix[lr]?|instance|rewrite|auto|namespace|codata|mutual|private|public|abstract|total|partial|let|proof|of|then|static|where|_|with|pattern|term|syntax|prefix|postulate|parameters|record|dsl|impossible|implicit|tactics|intros|intro|compute|refine|exact|trivial)(?!\')\b`, KeywordReserved, nil}, - {`(import|module)(\s+)`, ByGroups(KeywordReserved, Text), Push("module")}, - {`('')?[A-Z][\w\']*`, KeywordType, nil}, - {`[a-z][\w\']*`, Text, nil}, - {`(<-|::|->|=>|=)`, OperatorWord, nil}, - {`([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, - {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, - {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, - {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`'`, LiteralStringChar, Push("character")}, - {`"`, LiteralString, Push("string")}, - {`[^\s(){}]+`, Text, nil}, - {`\s+?`, Text, nil}, - }, - "module": { - {`\s+`, Text, nil}, - {`([A-Z][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, - {`[A-Z][\w.]*`, NameNamespace, Pop(1)}, - }, - "funclist": { - {`\s+`, Text, nil}, - {`[A-Z]\w*`, KeywordType, nil}, - {`(_[\w\']+|[a-z][\w\']*)`, NameFunction, nil}, - {`--.*$`, CommentSingle, nil}, - {`\{-`, CommentMultiline, Push("comment")}, - {`,`, Punctuation, nil}, - {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, - {`\(`, Punctuation, Push("funclist", "funclist")}, - {`\)`, Punctuation, Pop(2)}, - }, - "comment": { - {`[^-{}]+`, CommentMultiline, nil}, - {`\{-`, CommentMultiline, Push()}, - {`-\}`, CommentMultiline, Pop(1)}, - {`[-{}]`, CommentMultiline, nil}, - }, - "character": { - {`[^\\']`, LiteralStringChar, nil}, - {`\\`, LiteralStringEscape, Push("escape")}, - {`'`, LiteralStringChar, Pop(1)}, - }, - "string": { - {`[^\\"]+`, LiteralString, nil}, - {`\\`, LiteralStringEscape, Push("escape")}, - {`"`, LiteralString, Pop(1)}, - }, - "escape": { - {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, - {`\^[][A-Z@^_]`, LiteralStringEscape, Pop(1)}, - {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, - {`o[0-7]+`, LiteralStringEscape, Pop(1)}, - {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, - {`\d+`, LiteralStringEscape, Pop(1)}, - {`\s+\\`, LiteralStringEscape, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/igor.go b/vendor/github.com/alecthomas/chroma/lexers/i/igor.go deleted file mode 100644 index bbb1d7230bf6..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/i/igor.go +++ /dev/null @@ -1,36 +0,0 @@ -package i - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Igor lexer. -var Igor = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Igor", - Aliases: []string{"igor", "igorpro"}, - Filenames: []string{"*.ipf"}, - MimeTypes: []string{"text/ipf"}, - CaseInsensitive: true, - }, - igorRules, -)) - -func igorRules() Rules { - return Rules{ - "root": { - {`//.*$`, CommentSingle, nil}, - {`"([^"\\]|\\.)*"`, LiteralString, nil}, - {Words(`\b`, `\b`, `if`, `else`, `elseif`, `endif`, `for`, `endfor`, `strswitch`, `switch`, `case`, `default`, `endswitch`, `do`, `while`, `try`, `catch`, `endtry`, `break`, `continue`, `return`, `AbortOnRTE`, `AbortOnValue`), Keyword, nil}, - {Words(`\b`, `\b`, `variable`, `string`, `constant`, `strconstant`, `NVAR`, `SVAR`, `WAVE`, `STRUCT`, `dfref`, `funcref`, `char`, `uchar`, `int16`, `uint16`, `int32`, `uint32`, `int64`, `uint64`, `float`, `double`), KeywordType, nil}, - {Words(`\b`, `\b`, `override`, `ThreadSafe`, `MultiThread`, `static`, `Proc`, `Picture`, `Prompt`, `DoPrompt`, `macro`, `window`, `function`, `end`, `Structure`, `EndStructure`, `EndMacro`, `Menu`, `SubMenu`), KeywordReserved, nil}, - {Words(`\b`, `\b`, `Abort`, `AddFIFOData`, `AddFIFOVectData`, `AddMovieAudio`, `AddMovieFrame`, `AddWavesToBoxPlot`, `AddWavesToViolinPlot`, `AdoptFiles`, `APMath`, `Append`, `AppendBoxPlot`, `AppendImage`, `AppendLayoutObject`, `AppendMatrixContour`, `AppendText`, `AppendToGizmo`, `AppendToGraph`, `AppendToLayout`, `AppendToTable`, `AppendViolinPlot`, `AppendXYZContour`, `AutoPositionWindow`, `AxonTelegraphFindServers`, `BackgroundInfo`, `Beep`, `BoundingBall`, `BoxSmooth`, `BrowseURL`, `BuildMenu`, `Button`, `cd`, `Chart`, `CheckBox`, `CheckDisplayed`, `ChooseColor`, `Close`, `CloseHelp`, `CloseMovie`, `CloseProc`, `ColorScale`, `ColorTab2Wave`, `Concatenate`, `ControlBar`, `ControlInfo`, `ControlUpdate`, `ConvertGlobalStringTextEncoding`, `ConvexHull`, `Convolve`, `CopyDimLabels`, `CopyFile`, `CopyFolder`, `CopyScales`, `Correlate`, `CreateAliasShortcut`, `CreateBrowser`, `Cross`, `CtrlBackground`, `CtrlFIFO`, `CtrlNamedBackground`, `Cursor`, `CurveFit`, `CustomControl`, `CWT`, `DAQmx_AI_SetupReader`, `DAQmx_AO_SetOutputs`, `DAQmx_CTR_CountEdges`, `DAQmx_CTR_OutputPulse`, `DAQmx_CTR_Period`, `DAQmx_CTR_PulseWidth`, `DAQmx_DIO_Config`, `DAQmx_DIO_WriteNewData`, `DAQmx_Scan`, `DAQmx_WaveformGen`, `Debugger`, `DebuggerOptions`, `DefaultFont`, `DefaultGuiControls`, `DefaultGuiFont`, `DefaultTextEncoding`, `DefineGuide`, `DelayUpdate`, `DeleteAnnotations`, `DeleteFile`, `DeleteFolder`, `DeletePoints`, `Differentiate`, `dir`, `Display`, `DisplayHelpTopic`, `DisplayProcedure`, `DoAlert`, `DoIgorMenu`, `DoUpdate`, `DoWindow`, `DoXOPIdle`, `DPSS`, `DrawAction`, `DrawArc`, `DrawBezier`, `DrawLine`, `DrawOval`, `DrawPICT`, `DrawPoly`, `DrawRect`, `DrawRRect`, `DrawText`, `DrawUserShape`, `DSPDetrend`, `DSPPeriodogram`, `Duplicate`, `DuplicateDataFolder`, `DWT`, `EdgeStats`, `Edit`, `ErrorBars`, `EstimatePeakSizes`, `Execute`, `ExecuteScriptText`, `ExperimentInfo`, `ExperimentModified`, `ExportGizmo`, `Extract`, `FastGaussTransform`, `FastOp`, `FBinRead`, `FBinWrite`, `FFT`, `FGetPos`, `FIFOStatus`, `FIFO2Wave`, `FilterFIR`, `FilterIIR`, `FindAPeak`, `FindContour`, `FindDuplicates`, `FindLevel`, `FindLevels`, `FindPeak`, `FindPointsInPoly`, `FindRoots`, `FindSequence`, `FindValue`, `FMaxFlat`, `FPClustering`, `fprintf`, `FReadLine`, `FSetPos`, `FStatus`, `FTPCreateDirectory`, `FTPDelete`, `FTPDownload`, `FTPUpload`, `FuncFit`, `FuncFitMD`, `GBLoadWave`, `GetAxis`, `GetCamera`, `GetFileFolderInfo`, `GetGizmo`, `GetLastUserMenuInfo`, `GetMarquee`, `GetMouse`, `GetSelection`, `GetWindow`, `GISCreateVectorLayer`, `GISGetRasterInfo`, `GISGetRegisteredFileInfo`, `GISGetVectorLayerInfo`, `GISLoadRasterData`, `GISLoadVectorData`, `GISRasterizeVectorData`, `GISRegisterFile`, `GISTransformCoords`, `GISUnRegisterFile`, `GISWriteFieldData`, `GISWriteGeometryData`, `GISWriteRaster`, `GPIBReadBinaryWave2`, `GPIBReadBinary2`, `GPIBReadWave2`, `GPIBRead2`, `GPIBWriteBinaryWave2`, `GPIBWriteBinary2`, `GPIBWriteWave2`, `GPIBWrite2`, `GPIB2`, `GraphNormal`, `GraphWaveDraw`, `GraphWaveEdit`, `Grep`, `GroupBox`, `Hanning`, `HDFInfo`, `HDFReadImage`, `HDFReadSDS`, `HDFReadVset`, `HDF5CloseFile`, `HDF5CloseGroup`, `HDF5ConvertColors`, `HDF5CreateFile`, `HDF5CreateGroup`, `HDF5CreateLink`, `HDF5Dump`, `HDF5DumpErrors`, `HDF5DumpState`, `HDF5FlushFile`, `HDF5ListAttributes`, `HDF5ListGroup`, `HDF5LoadData`, `HDF5LoadGroup`, `HDF5LoadImage`, `HDF5OpenFile`, `HDF5OpenGroup`, `HDF5SaveData`, `HDF5SaveGroup`, `HDF5SaveImage`, `HDF5TestOperation`, `HDF5UnlinkObject`, `HideIgorMenus`, `HideInfo`, `HideProcedures`, `HideTools`, `HilbertTransform`, `Histogram`, `ICA`, `IFFT`, `ImageAnalyzeParticles`, `ImageBlend`, `ImageBoundaryToMask`, `ImageComposite`, `ImageEdgeDetection`, `ImageFileInfo`, `ImageFilter`, `ImageFocus`, `ImageFromXYZ`, `ImageGenerateROIMask`, `ImageGLCM`, `ImageHistModification`, `ImageHistogram`, `ImageInterpolate`, `ImageLineProfile`, `ImageLoad`, `ImageMorphology`, `ImageRegistration`, `ImageRemoveBackground`, `ImageRestore`, `ImageRotate`, `ImageSave`, `ImageSeedFill`, `ImageSkeleton3d`, `ImageSnake`, `ImageStats`, `ImageThreshold`, `ImageTransform`, `ImageUnwrapPhase`, `ImageWindow`, `IndexSort`, `InsertPoints`, `Integrate`, `IntegrateODE`, `Integrate2D`, `Interpolate2`, `Interpolate3D`, `Interp3DPath`, `ITCCloseAll2`, `ITCCloseDevice2`, `ITCConfigAllChannels2`, `ITCConfigChannelReset2`, `ITCConfigChannelUpload2`, `ITCConfigChannel2`, `ITCFIFOAvailableAll2`, `ITCFIFOAvailable2`, `ITCGetAllChannelsConfig2`, `ITCGetChannelConfig2`, `ITCGetCurrentDevice2`, `ITCGetDeviceInfo2`, `ITCGetDevices2`, `ITCGetErrorString2`, `ITCGetSerialNumber2`, `ITCGetState2`, `ITCGetVersions2`, `ITCInitialize2`, `ITCOpenDevice2`, `ITCReadADC2`, `ITCReadDigital2`, `ITCReadTimer2`, `ITCSelectDevice2`, `ITCSetDAC2`, `ITCSetGlobals2`, `ITCSetModes2`, `ITCSetState2`, `ITCStartAcq2`, `ITCStopAcq2`, `ITCUpdateFIFOPositionAll2`, `ITCUpdateFIFOPosition2`, `ITCWriteDigital2`, `JCAMPLoadWave`, `JointHistogram`, `KillBackground`, `KillControl`, `KillDataFolder`, `KillFIFO`, `KillFreeAxis`, `KillPath`, `KillPICTs`, `KillStrings`, `KillVariables`, `KillWaves`, `KillWindow`, `KMeans`, `Label`, `Layout`, `LayoutPageAction`, `LayoutSlideShow`, `Legend`, `LinearFeedbackShiftRegister`, `ListBox`, `LoadData`, `LoadPackagePreferences`, `LoadPICT`, `LoadWave`, `Loess`, `LombPeriodogram`, `Make`, `MakeIndex`, `MarkPerfTestTime`, `MatrixConvolve`, `MatrixCorr`, `MatrixEigenV`, `MatrixFilter`, `MatrixGaussJ`, `MatrixGLM`, `MatrixInverse`, `MatrixLinearSolve`, `MatrixLinearSolveTD`, `MatrixLLS`, `MatrixLUBkSub`, `MatrixLUD`, `MatrixLUDTD`, `MatrixMultiply`, `MatrixOP`, `MatrixSchur`, `MatrixSolve`, `MatrixSVBkSub`, `MatrixSVD`, `MatrixTranspose`, `MCC_FindServers`, `MeasureStyledText`, `MFR_CheckForNewBricklets`, `MFR_CloseResultFile`, `MFR_CreateOverviewTable`, `MFR_GetBrickletCount`, `MFR_GetBrickletData`, `MFR_GetBrickletDeployData`, `MFR_GetBrickletMetaData`, `MFR_GetBrickletRawData`, `MFR_GetReportTemplate`, `MFR_GetResultFileMetaData`, `MFR_GetResultFileName`, `MFR_GetVernissageVersion`, `MFR_GetVersion`, `MFR_GetXOPErrorMessage`, `MFR_OpenResultFile`, `MLLoadWave`, `Modify`, `ModifyBoxPlot`, `ModifyBrowser`, `ModifyCamera`, `ModifyContour`, `ModifyControl`, `ModifyControlList`, `ModifyFreeAxis`, `ModifyGizmo`, `ModifyGraph`, `ModifyImage`, `ModifyLayout`, `ModifyPanel`, `ModifyTable`, `ModifyViolinPlot`, `ModifyWaterfall`, `MoveDataFolder`, `MoveFile`, `MoveFolder`, `MoveString`, `MoveSubwindow`, `MoveVariable`, `MoveWave`, `MoveWindow`, `MultiTaperPSD`, `MultiThreadingControl`, `NC_CloseFile`, `NC_DumpErrors`, `NC_Inquire`, `NC_ListAttributes`, `NC_ListObjects`, `NC_LoadData`, `NC_OpenFile`, `NeuralNetworkRun`, `NeuralNetworkTrain`, `NewCamera`, `NewDataFolder`, `NewFIFO`, `NewFIFOChan`, `NewFreeAxis`, `NewGizmo`, `NewImage`, `NewLayout`, `NewMovie`, `NewNotebook`, `NewPanel`, `NewPath`, `NewWaterfall`, `NILoadWave`, `NI4882`, `Note`, `Notebook`, `NotebookAction`, `Open`, `OpenHelp`, `OpenNotebook`, `Optimize`, `ParseOperationTemplate`, `PathInfo`, `PauseForUser`, `PauseUpdate`, `PCA`, `PlayMovie`, `PlayMovieAction`, `PlaySound`, `PopupContextualMenu`, `PopupMenu`, `Preferences`, `PrimeFactors`, `Print`, `printf`, `PrintGraphs`, `PrintLayout`, `PrintNotebook`, `PrintSettings`, `PrintTable`, `Project`, `PulseStats`, `PutScrapText`, `pwd`, `Quit`, `RatioFromNumber`, `Redimension`, `Remez`, `Remove`, `RemoveContour`, `RemoveFromGizmo`, `RemoveFromGraph`, `RemoveFromLayout`, `RemoveFromTable`, `RemoveImage`, `RemoveLayoutObjects`, `RemovePath`, `Rename`, `RenameDataFolder`, `RenamePath`, `RenamePICT`, `RenameWindow`, `ReorderImages`, `ReorderTraces`, `ReplaceText`, `ReplaceWave`, `Resample`, `ResumeUpdate`, `Reverse`, `Rotate`, `Save`, `SaveData`, `SaveExperiment`, `SaveGizmoCopy`, `SaveGraphCopy`, `SaveNotebook`, `SavePackagePreferences`, `SavePICT`, `SaveTableCopy`, `SetActiveSubwindow`, `SetAxis`, `SetBackground`, `SetDashPattern`, `SetDataFolder`, `SetDimLabel`, `SetDrawEnv`, `SetDrawLayer`, `SetFileFolderInfo`, `SetFormula`, `SetIdlePeriod`, `SetIgorHook`, `SetIgorMenuMode`, `SetIgorOption`, `SetMarquee`, `SetProcessSleep`, `SetRandomSeed`, `SetScale`, `SetVariable`, `SetWaveLock`, `SetWaveTextEncoding`, `SetWindow`, `ShowIgorMenus`, `ShowInfo`, `ShowTools`, `Silent`, `Sleep`, `Slider`, `Smooth`, `SmoothCustom`, `Sort`, `SortColumns`, `SoundInRecord`, `SoundInSet`, `SoundInStartChart`, `SoundInStatus`, `SoundInStopChart`, `SoundLoadWave`, `SoundSaveWave`, `SphericalInterpolate`, `SphericalTriangulate`, `SplitString`, `SplitWave`, `sprintf`, `SQLHighLevelOp`, `sscanf`, `Stack`, `StackWindows`, `StatsAngularDistanceTest`, `StatsANOVA1Test`, `StatsANOVA2NRTest`, `StatsANOVA2RMTest`, `StatsANOVA2Test`, `StatsChiTest`, `StatsCircularCorrelationTest`, `StatsCircularMeans`, `StatsCircularMoments`, `StatsCircularTwoSampleTest`, `StatsCochranTest`, `StatsContingencyTable`, `StatsDIPTest`, `StatsDunnettTest`, `StatsFriedmanTest`, `StatsFTest`, `StatsHodgesAjneTest`, `StatsJBTest`, `StatsKDE`, `StatsKendallTauTest`, `StatsKSTest`, `StatsKWTest`, `StatsLinearCorrelationTest`, `StatsLinearRegression`, `StatsMultiCorrelationTest`, `StatsNPMCTest`, `StatsNPNominalSRTest`, `StatsQuantiles`, `StatsRankCorrelationTest`, `StatsResample`, `StatsSample`, `StatsScheffeTest`, `StatsShapiroWilkTest`, `StatsSignTest`, `StatsSRTest`, `StatsTTest`, `StatsTukeyTest`, `StatsVariancesTest`, `StatsWatsonUSquaredTest`, `StatsWatsonWilliamsTest`, `StatsWheelerWatsonTest`, `StatsWilcoxonRankTest`, `StatsWRCorrelationTest`, `STFT`, `String`, `StructFill`, `StructGet`, `StructPut`, `SumDimension`, `SumSeries`, `TabControl`, `Tag`, `TDMLoadData`, `TDMSaveData`, `TextBox`, `ThreadGroupPutDF`, `ThreadStart`, `TickWavesFromAxis`, `Tile`, `TileWindows`, `TitleBox`, `ToCommandLine`, `ToolsGrid`, `Triangulate3d`, `Unwrap`, `URLRequest`, `ValDisplay`, `Variable`, `VDTClosePort2`, `VDTGetPortList2`, `VDTGetStatus2`, `VDTOpenPort2`, `VDTOperationsPort2`, `VDTReadBinaryWave2`, `VDTReadBinary2`, `VDTReadHexWave2`, `VDTReadHex2`, `VDTReadWave2`, `VDTRead2`, `VDTTerminalPort2`, `VDTWriteBinaryWave2`, `VDTWriteBinary2`, `VDTWriteHexWave2`, `VDTWriteHex2`, `VDTWriteWave2`, `VDTWrite2`, `VDT2`, `VISAControl`, `VISARead`, `VISAReadBinary`, `VISAReadBinaryWave`, `VISAReadWave`, `VISAWrite`, `VISAWriteBinary`, `VISAWriteBinaryWave`, `VISAWriteWave`, `WaveMeanStdv`, `WaveStats`, `WaveTransform`, `wfprintf`, `WignerTransform`, `WindowFunction`, `XLLoadWave`), NameClass, nil}, - {Words(`\b`, `\b`, `abs`, `acos`, `acosh`, `AddListItem`, `AiryA`, `AiryAD`, `AiryB`, `AiryBD`, `alog`, `AnnotationInfo`, `AnnotationList`, `area`, `areaXY`, `asin`, `asinh`, `atan`, `atanh`, `atan2`, `AxisInfo`, `AxisList`, `AxisValFromPixel`, `AxonTelegraphAGetDataNum`, `AxonTelegraphAGetDataString`, `AxonTelegraphAGetDataStruct`, `AxonTelegraphGetDataNum`, `AxonTelegraphGetDataString`, `AxonTelegraphGetDataStruct`, `AxonTelegraphGetTimeoutMs`, `AxonTelegraphSetTimeoutMs`, `Base64Decode`, `Base64Encode`, `Besseli`, `Besselj`, `Besselk`, `Bessely`, `beta`, `betai`, `BinarySearch`, `BinarySearchInterp`, `binomial`, `binomialln`, `binomialNoise`, `cabs`, `CaptureHistory`, `CaptureHistoryStart`, `ceil`, `cequal`, `char2num`, `chebyshev`, `chebyshevU`, `CheckName`, `ChildWindowList`, `CleanupName`, `cmplx`, `cmpstr`, `conj`, `ContourInfo`, `ContourNameList`, `ContourNameToWaveRef`, `ContourZ`, `ControlNameList`, `ConvertTextEncoding`, `cos`, `cosh`, `cosIntegral`, `cot`, `coth`, `CountObjects`, `CountObjectsDFR`, `cpowi`, `CreationDate`, `csc`, `csch`, `CsrInfo`, `CsrWave`, `CsrWaveRef`, `CsrXWave`, `CsrXWaveRef`, `CTabList`, `DataFolderDir`, `DataFolderExists`, `DataFolderRefsEqual`, `DataFolderRefStatus`, `date`, `datetime`, `DateToJulian`, `date2secs`, `Dawson`, `defined`, `deltax`, `digamma`, `dilogarithm`, `DimDelta`, `DimOffset`, `DimSize`, `ei`, `enoise`, `equalWaves`, `erf`, `erfc`, `erfcw`, `exists`, `exp`, `expInt`, `expIntegralE1`, `expNoise`, `factorial`, `Faddeeva`, `fakedata`, `faverage`, `faverageXY`, `fDAQmx_AI_GetReader`, `fDAQmx_AO_UpdateOutputs`, `fDAQmx_ConnectTerminals`, `fDAQmx_CTR_Finished`, `fDAQmx_CTR_IsFinished`, `fDAQmx_CTR_IsPulseFinished`, `fDAQmx_CTR_ReadCounter`, `fDAQmx_CTR_ReadWithOptions`, `fDAQmx_CTR_SetPulseFrequency`, `fDAQmx_CTR_Start`, `fDAQmx_DeviceNames`, `fDAQmx_DIO_Finished`, `fDAQmx_DIO_PortWidth`, `fDAQmx_DIO_Read`, `fDAQmx_DIO_Write`, `fDAQmx_DisconnectTerminals`, `fDAQmx_ErrorString`, `fDAQmx_ExternalCalDate`, `fDAQmx_NumAnalogInputs`, `fDAQmx_NumAnalogOutputs`, `fDAQmx_NumCounters`, `fDAQmx_NumDIOPorts`, `fDAQmx_ReadChan`, `fDAQmx_ReadNamedChan`, `fDAQmx_ResetDevice`, `fDAQmx_ScanGetAvailable`, `fDAQmx_ScanGetNextIndex`, `fDAQmx_ScanStart`, `fDAQmx_ScanStop`, `fDAQmx_ScanWait`, `fDAQmx_ScanWaitWithTimeout`, `fDAQmx_SelfCalDate`, `fDAQmx_SelfCalibration`, `fDAQmx_WaveformStart`, `fDAQmx_WaveformStop`, `fDAQmx_WF_IsFinished`, `fDAQmx_WF_WaitUntilFinished`, `fDAQmx_WriteChan`, `FetchURL`, `FindDimLabel`, `FindListItem`, `floor`, `FontList`, `FontSizeHeight`, `FontSizeStringWidth`, `FresnelCos`, `FresnelSin`, `FuncRefInfo`, `FunctionInfo`, `FunctionList`, `FunctionPath`, `gamma`, `gammaEuler`, `gammaInc`, `gammaNoise`, `gammln`, `gammp`, `gammq`, `Gauss`, `Gauss1D`, `Gauss2D`, `gcd`, `GetBrowserLine`, `GetBrowserSelection`, `GetDataFolder`, `GetDataFolderDFR`, `GetDefaultFont`, `GetDefaultFontSize`, `GetDefaultFontStyle`, `GetDimLabel`, `GetEnvironmentVariable`, `GetErrMessage`, `GetFormula`, `GetIndependentModuleName`, `GetIndexedObjName`, `GetIndexedObjNameDFR`, `GetKeyState`, `GetRTErrMessage`, `GetRTError`, `GetRTLocation`, `GetRTLocInfo`, `GetRTStackInfo`, `GetScrapText`, `GetUserData`, `GetWavesDataFolder`, `GetWavesDataFolderDFR`, `GISGetAllFileFormats`, `GISSRefsAreEqual`, `GizmoInfo`, `GizmoScale`, `gnoise`, `GrepList`, `GrepString`, `GuideInfo`, `GuideNameList`, `Hash`, `hcsr`, `HDF5AttributeInfo`, `HDF5DatasetInfo`, `HDF5LibraryInfo`, `HDF5TypeInfo`, `hermite`, `hermiteGauss`, `HyperGNoise`, `HyperGPFQ`, `HyperG0F1`, `HyperG1F1`, `HyperG2F1`, `IgorInfo`, `IgorVersion`, `imag`, `ImageInfo`, `ImageNameList`, `ImageNameToWaveRef`, `IndependentModuleList`, `IndexedDir`, `IndexedFile`, `IndexToScale`, `Inf`, `Integrate1D`, `interp`, `Interp2D`, `Interp3D`, `inverseERF`, `inverseERFC`, `ItemsInList`, `JacobiCn`, `JacobiSn`, `JulianToDate`, `Laguerre`, `LaguerreA`, `LaguerreGauss`, `LambertW`, `LayoutInfo`, `leftx`, `LegendreA`, `limit`, `ListMatch`, `ListToTextWave`, `ListToWaveRefWave`, `ln`, `log`, `logNormalNoise`, `lorentzianNoise`, `LowerStr`, `MacroList`, `magsqr`, `MandelbrotPoint`, `MarcumQ`, `MatrixCondition`, `MatrixDet`, `MatrixDot`, `MatrixRank`, `MatrixTrace`, `max`, `MCC_AutoBridgeBal`, `MCC_AutoFastComp`, `MCC_AutoPipetteOffset`, `MCC_AutoSlowComp`, `MCC_AutoWholeCellComp`, `MCC_GetBridgeBalEnable`, `MCC_GetBridgeBalResist`, `MCC_GetFastCompCap`, `MCC_GetFastCompTau`, `MCC_GetHolding`, `MCC_GetHoldingEnable`, `MCC_GetMode`, `MCC_GetNeutralizationCap`, `MCC_GetNeutralizationEnable`, `MCC_GetOscKillerEnable`, `MCC_GetPipetteOffset`, `MCC_GetPrimarySignalGain`, `MCC_GetPrimarySignalHPF`, `MCC_GetPrimarySignalLPF`, `MCC_GetRsCompBandwidth`, `MCC_GetRsCompCorrection`, `MCC_GetRsCompEnable`, `MCC_GetRsCompPrediction`, `MCC_GetSecondarySignalGain`, `MCC_GetSecondarySignalLPF`, `MCC_GetSlowCompCap`, `MCC_GetSlowCompTau`, `MCC_GetSlowCompTauX20Enable`, `MCC_GetSlowCurrentInjEnable`, `MCC_GetSlowCurrentInjLevel`, `MCC_GetSlowCurrentInjSetlTime`, `MCC_GetWholeCellCompCap`, `MCC_GetWholeCellCompEnable`, `MCC_GetWholeCellCompResist`, `MCC_SelectMultiClamp700B`, `MCC_SetBridgeBalEnable`, `MCC_SetBridgeBalResist`, `MCC_SetFastCompCap`, `MCC_SetFastCompTau`, `MCC_SetHolding`, `MCC_SetHoldingEnable`, `MCC_SetMode`, `MCC_SetNeutralizationCap`, `MCC_SetNeutralizationEnable`, `MCC_SetOscKillerEnable`, `MCC_SetPipetteOffset`, `MCC_SetPrimarySignalGain`, `MCC_SetPrimarySignalHPF`, `MCC_SetPrimarySignalLPF`, `MCC_SetRsCompBandwidth`, `MCC_SetRsCompCorrection`, `MCC_SetRsCompEnable`, `MCC_SetRsCompPrediction`, `MCC_SetSecondarySignalGain`, `MCC_SetSecondarySignalLPF`, `MCC_SetSlowCompCap`, `MCC_SetSlowCompTau`, `MCC_SetSlowCompTauX20Enable`, `MCC_SetSlowCurrentInjEnable`, `MCC_SetSlowCurrentInjLevel`, `MCC_SetSlowCurrentInjSetlTime`, `MCC_SetTimeoutMs`, `MCC_SetWholeCellCompCap`, `MCC_SetWholeCellCompEnable`, `MCC_SetWholeCellCompResist`, `mean`, `median`, `min`, `mod`, `ModDate`, `MPFXEMGPeak`, `MPFXExpConvExpPeak`, `MPFXGaussPeak`, `MPFXLorenzianPeak`, `MPFXVoigtPeak`, `NameOfWave`, `NaN`, `NewFreeDataFolder`, `NewFreeWave`, `norm`, `NormalizeUnicode`, `note`, `NumberByKey`, `numpnts`, `numtype`, `NumVarOrDefault`, `num2char`, `num2istr`, `num2str`, `NVAR_Exists`, `OperationList`, `PadString`, `PanelResolution`, `ParamIsDefault`, `ParseFilePath`, `PathList`, `pcsr`, `Pi`, `PICTInfo`, `PICTList`, `PixelFromAxisVal`, `pnt2x`, `poissonNoise`, `poly`, `PolygonArea`, `poly2D`, `PossiblyQuoteName`, `ProcedureText`, `p2rect`, `qcsr`, `real`, `RemoveByKey`, `RemoveEnding`, `RemoveFromList`, `RemoveListItem`, `ReplaceNumberByKey`, `ReplaceString`, `ReplaceStringByKey`, `rightx`, `round`, `r2polar`, `sawtooth`, `scaleToIndex`, `ScreenResolution`, `sec`, `sech`, `Secs2Date`, `Secs2Time`, `SelectNumber`, `SelectString`, `SetEnvironmentVariable`, `sign`, `sin`, `sinc`, `sinh`, `sinIntegral`, `SortList`, `SpecialCharacterInfo`, `SpecialCharacterList`, `SpecialDirPath`, `SphericalBessJ`, `SphericalBessJD`, `SphericalBessY`, `SphericalBessYD`, `SphericalHarmonics`, `SQLAllocHandle`, `SQLAllocStmt`, `SQLBinaryWavesToTextWave`, `SQLBindCol`, `SQLBindParameter`, `SQLBrowseConnect`, `SQLBulkOperations`, `SQLCancel`, `SQLCloseCursor`, `SQLColAttributeNum`, `SQLColAttributeStr`, `SQLColumnPrivileges`, `SQLColumns`, `SQLConnect`, `SQLDataSources`, `SQLDescribeCol`, `SQLDescribeParam`, `SQLDisconnect`, `SQLDriverConnect`, `SQLDrivers`, `SQLEndTran`, `SQLError`, `SQLExecDirect`, `SQLExecute`, `SQLFetch`, `SQLFetchScroll`, `SQLForeignKeys`, `SQLFreeConnect`, `SQLFreeEnv`, `SQLFreeHandle`, `SQLFreeStmt`, `SQLGetConnectAttrNum`, `SQLGetConnectAttrStr`, `SQLGetCursorName`, `SQLGetDataNum`, `SQLGetDataStr`, `SQLGetDescFieldNum`, `SQLGetDescFieldStr`, `SQLGetDescRec`, `SQLGetDiagFieldNum`, `SQLGetDiagFieldStr`, `SQLGetDiagRec`, `SQLGetEnvAttrNum`, `SQLGetEnvAttrStr`, `SQLGetFunctions`, `SQLGetInfoNum`, `SQLGetInfoStr`, `SQLGetStmtAttrNum`, `SQLGetStmtAttrStr`, `SQLGetTypeInfo`, `SQLMoreResults`, `SQLNativeSql`, `SQLNumParams`, `SQLNumResultCols`, `SQLNumResultRowsIfKnown`, `SQLNumRowsFetched`, `SQLParamData`, `SQLPrepare`, `SQLPrimaryKeys`, `SQLProcedureColumns`, `SQLProcedures`, `SQLPutData`, `SQLReinitialize`, `SQLRowCount`, `SQLSetConnectAttrNum`, `SQLSetConnectAttrStr`, `SQLSetCursorName`, `SQLSetDescFieldNum`, `SQLSetDescFieldStr`, `SQLSetDescRec`, `SQLSetEnvAttrNum`, `SQLSetEnvAttrStr`, `SQLSetPos`, `SQLSetStmtAttrNum`, `SQLSetStmtAttrStr`, `SQLSpecialColumns`, `SQLStatistics`, `SQLTablePrivileges`, `SQLTables`, `SQLTextWaveToBinaryWaves`, `SQLTextWaveTo2DBinaryWave`, `SQLUpdateBoundValues`, `SQLXOPCheckState`, `SQL2DBinaryWaveToTextWave`, `sqrt`, `StartMSTimer`, `StatsBetaCDF`, `StatsBetaPDF`, `StatsBinomialCDF`, `StatsBinomialPDF`, `StatsCauchyCDF`, `StatsCauchyPDF`, `StatsChiCDF`, `StatsChiPDF`, `StatsCMSSDCDF`, `StatsCorrelation`, `StatsDExpCDF`, `StatsDExpPDF`, `StatsErlangCDF`, `StatsErlangPDF`, `StatsErrorPDF`, `StatsEValueCDF`, `StatsEValuePDF`, `StatsExpCDF`, `StatsExpPDF`, `StatsFCDF`, `StatsFPDF`, `StatsFriedmanCDF`, `StatsGammaCDF`, `StatsGammaPDF`, `StatsGeometricCDF`, `StatsGeometricPDF`, `StatsGEVCDF`, `StatsGEVPDF`, `StatsHyperGCDF`, `StatsHyperGPDF`, `StatsInvBetaCDF`, `StatsInvBinomialCDF`, `StatsInvCauchyCDF`, `StatsInvChiCDF`, `StatsInvCMSSDCDF`, `StatsInvDExpCDF`, `StatsInvEValueCDF`, `StatsInvExpCDF`, `StatsInvFCDF`, `StatsInvFriedmanCDF`, `StatsInvGammaCDF`, `StatsInvGeometricCDF`, `StatsInvKuiperCDF`, `StatsInvLogisticCDF`, `StatsInvLogNormalCDF`, `StatsInvMaxwellCDF`, `StatsInvMooreCDF`, `StatsInvNBinomialCDF`, `StatsInvNCChiCDF`, `StatsInvNCFCDF`, `StatsInvNormalCDF`, `StatsInvParetoCDF`, `StatsInvPoissonCDF`, `StatsInvPowerCDF`, `StatsInvQCDF`, `StatsInvQpCDF`, `StatsInvRayleighCDF`, `StatsInvRectangularCDF`, `StatsInvSpearmanCDF`, `StatsInvStudentCDF`, `StatsInvTopDownCDF`, `StatsInvTriangularCDF`, `StatsInvUsquaredCDF`, `StatsInvVonMisesCDF`, `StatsInvWeibullCDF`, `StatsKuiperCDF`, `StatsLogisticCDF`, `StatsLogisticPDF`, `StatsLogNormalCDF`, `StatsLogNormalPDF`, `StatsMaxwellCDF`, `StatsMaxwellPDF`, `StatsMedian`, `StatsMooreCDF`, `StatsNBinomialCDF`, `StatsNBinomialPDF`, `StatsNCChiCDF`, `StatsNCChiPDF`, `StatsNCFCDF`, `StatsNCFPDF`, `StatsNCTCDF`, `StatsNCTPDF`, `StatsNormalCDF`, `StatsNormalPDF`, `StatsParetoCDF`, `StatsParetoPDF`, `StatsPermute`, `StatsPoissonCDF`, `StatsPoissonPDF`, `StatsPowerCDF`, `StatsPowerNoise`, `StatsPowerPDF`, `StatsQCDF`, `StatsQpCDF`, `StatsRayleighCDF`, `StatsRayleighPDF`, `StatsRectangularCDF`, `StatsRectangularPDF`, `StatsRunsCDF`, `StatsSpearmanRhoCDF`, `StatsStudentCDF`, `StatsStudentPDF`, `StatsTopDownCDF`, `StatsTriangularCDF`, `StatsTriangularPDF`, `StatsTrimmedMean`, `StatsUSquaredCDF`, `StatsVonMisesCDF`, `StatsVonMisesNoise`, `StatsVonMisesPDF`, `StatsWaldCDF`, `StatsWaldPDF`, `StatsWeibullCDF`, `StatsWeibullPDF`, `StopMSTimer`, `StringByKey`, `stringCRC`, `StringFromList`, `StringList`, `stringmatch`, `strlen`, `strsearch`, `StrVarOrDefault`, `str2num`, `StudentA`, `StudentT`, `sum`, `SVAR_Exists`, `TableInfo`, `TagVal`, `TagWaveRef`, `tan`, `tango_close_device`, `tango_command_inout`, `tango_compute_image_proj`, `tango_get_dev_attr_list`, `tango_get_dev_black_box`, `tango_get_dev_cmd_list`, `tango_get_dev_status`, `tango_get_dev_timeout`, `tango_get_error_stack`, `tango_open_device`, `tango_ping_device`, `tango_read_attribute`, `tango_read_attributes`, `tango_reload_dev_interface`, `tango_resume_attr_monitor`, `tango_set_attr_monitor_period`, `tango_set_dev_timeout`, `tango_start_attr_monitor`, `tango_stop_attr_monitor`, `tango_suspend_attr_monitor`, `tango_write_attribute`, `tango_write_attributes`, `tanh`, `TDMAddChannel`, `TDMAddGroup`, `TDMAppendDataValues`, `TDMAppendDataValuesTime`, `TDMChannelPropertyExists`, `TDMCloseChannel`, `TDMCloseFile`, `TDMCloseGroup`, `TDMCreateChannelProperty`, `TDMCreateFile`, `TDMCreateFileProperty`, `TDMCreateGroupProperty`, `TDMFilePropertyExists`, `TDMGetChannelPropertyNames`, `TDMGetChannelPropertyNum`, `TDMGetChannelPropertyStr`, `TDMGetChannelPropertyTime`, `TDMGetChannelPropertyType`, `TDMGetChannels`, `TDMGetChannelStringPropertyLen`, `TDMGetDataType`, `TDMGetDataValues`, `TDMGetDataValuesTime`, `TDMGetFilePropertyNames`, `TDMGetFilePropertyNum`, `TDMGetFilePropertyStr`, `TDMGetFilePropertyTime`, `TDMGetFilePropertyType`, `TDMGetFileStringPropertyLen`, `TDMGetGroupPropertyNames`, `TDMGetGroupPropertyNum`, `TDMGetGroupPropertyStr`, `TDMGetGroupPropertyTime`, `TDMGetGroupPropertyType`, `TDMGetGroups`, `TDMGetGroupStringPropertyLen`, `TDMGetLibraryErrorDescription`, `TDMGetNumChannelProperties`, `TDMGetNumChannels`, `TDMGetNumDataValues`, `TDMGetNumFileProperties`, `TDMGetNumGroupProperties`, `TDMGetNumGroups`, `TDMGroupPropertyExists`, `TDMOpenFile`, `TDMOpenFileEx`, `TDMRemoveChannel`, `TDMRemoveGroup`, `TDMReplaceDataValues`, `TDMReplaceDataValuesTime`, `TDMSaveFile`, `TDMSetChannelPropertyNum`, `TDMSetChannelPropertyStr`, `TDMSetChannelPropertyTime`, `TDMSetDataValues`, `TDMSetDataValuesTime`, `TDMSetFilePropertyNum`, `TDMSetFilePropertyStr`, `TDMSetFilePropertyTime`, `TDMSetGroupPropertyNum`, `TDMSetGroupPropertyStr`, `TDMSetGroupPropertyTime`, `TextEncodingCode`, `TextEncodingName`, `TextFile`, `ThreadGroupCreate`, `ThreadGroupGetDF`, `ThreadGroupGetDFR`, `ThreadGroupRelease`, `ThreadGroupWait`, `ThreadProcessorCount`, `ThreadReturnValue`, `ticks`, `time`, `TraceFromPixel`, `TraceInfo`, `TraceNameList`, `TraceNameToWaveRef`, `TrimString`, `trunc`, `UniqueName`, `UnPadString`, `UnsetEnvironmentVariable`, `UpperStr`, `URLDecode`, `URLEncode`, `VariableList`, `Variance`, `vcsr`, `viAssertIntrSignal`, `viAssertTrigger`, `viAssertUtilSignal`, `viClear`, `viClose`, `viDisableEvent`, `viDiscardEvents`, `viEnableEvent`, `viFindNext`, `viFindRsrc`, `viGetAttribute`, `viGetAttributeString`, `viGpibCommand`, `viGpibControlATN`, `viGpibControlREN`, `viGpibPassControl`, `viGpibSendIFC`, `viIn8`, `viIn16`, `viIn32`, `viLock`, `viMapAddress`, `viMapTrigger`, `viMemAlloc`, `viMemFree`, `viMoveIn8`, `viMoveIn16`, `viMoveIn32`, `viMoveOut8`, `viMoveOut16`, `viMoveOut32`, `viOpen`, `viOpenDefaultRM`, `viOut8`, `viOut16`, `viOut32`, `viPeek8`, `viPeek16`, `viPeek32`, `viPoke8`, `viPoke16`, `viPoke32`, `viRead`, `viReadSTB`, `viSetAttribute`, `viSetAttributeString`, `viStatusDesc`, `viTerminate`, `viUnlock`, `viUnmapAddress`, `viUnmapTrigger`, `viUsbControlIn`, `viUsbControlOut`, `viVxiCommandQuery`, `viWaitOnEvent`, `viWrite`, `VoigtFunc`, `VoigtPeak`, `WaveCRC`, `WaveDims`, `WaveExists`, `WaveHash`, `WaveInfo`, `WaveList`, `WaveMax`, `WaveMin`, `WaveName`, `WaveRefIndexed`, `WaveRefIndexedDFR`, `WaveRefsEqual`, `WaveRefWaveToList`, `WaveTextEncoding`, `WaveType`, `WaveUnits`, `WhichListItem`, `WinList`, `WinName`, `WinRecreation`, `WinType`, `wnoise`, `xcsr`, `XWaveName`, `XWaveRefFromTrace`, `x2pnt`, `zcsr`, `ZernikeR`, `zeromq_client_connect`, `zeromq_client_connect`, `zeromq_client_recv`, `zeromq_client_recv`, `zeromq_client_send`, `zeromq_client_send`, `zeromq_handler_start`, `zeromq_handler_start`, `zeromq_handler_stop`, `zeromq_handler_stop`, `zeromq_server_bind`, `zeromq_server_bind`, `zeromq_server_recv`, `zeromq_server_recv`, `zeromq_server_send`, `zeromq_server_send`, `zeromq_set`, `zeromq_set`, `zeromq_stop`, `zeromq_stop`, `zeromq_test_callfunction`, `zeromq_test_callfunction`, `zeromq_test_serializeWave`, `zeromq_test_serializeWave`, `zeta`), NameFunction, nil}, - {`^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)`, NameDecorator, nil}, - {`[^a-z"/]+$`, Text, nil}, - {`.`, Text, nil}, - {`\n|\r`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/ini.go b/vendor/github.com/alecthomas/chroma/lexers/i/ini.go deleted file mode 100644 index 46b2ce238550..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/i/ini.go +++ /dev/null @@ -1,29 +0,0 @@ -package i - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Ini lexer. -var Ini = internal.Register(MustNewLazyLexer( - &Config{ - Name: "INI", - Aliases: []string{"ini", "cfg", "dosini"}, - Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"}, - MimeTypes: []string{"text/x-ini", "text/inf"}, - }, - iniRules, -)) - -func iniRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`[;#].*`, CommentSingle, nil}, - {`\[.*?\]$`, Keyword, nil}, - {`(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Text, Operator, Text, LiteralString), nil}, - {`(.+?)$`, NameAttribute, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/io.go b/vendor/github.com/alecthomas/chroma/lexers/i/io.go deleted file mode 100644 index 8b2e53a0742d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/i/io.go +++ /dev/null @@ -1,44 +0,0 @@ -package i - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Io lexer. -var Io = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Io", - Aliases: []string{"io"}, - Filenames: []string{"*.io"}, - MimeTypes: []string{"text/x-iosrc"}, - }, - ioRules, -)) - -func ioRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`//(.*?)\n`, CommentSingle, nil}, - {`#(.*?)\n`, CommentSingle, nil}, - {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/\+`, CommentMultiline, Push("nestedcomment")}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}`, Operator, nil}, - {`(clone|do|doFile|doString|method|for|if|else|elseif|then)\b`, Keyword, nil}, - {`(nil|false|true)\b`, NameConstant, nil}, - {`(Object|list|List|Map|args|Sequence|Coroutine|File)\b`, NameBuiltin, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - }, - "nestedcomment": { - {`[^+/]+`, CommentMultiline, nil}, - {`/\+`, CommentMultiline, Push()}, - {`\+/`, CommentMultiline, Pop(1)}, - {`[+/]`, CommentMultiline, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/j.go b/vendor/github.com/alecthomas/chroma/lexers/j/j.go deleted file mode 100644 index 9a2a4e3c044d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/j/j.go +++ /dev/null @@ -1,77 +0,0 @@ -package j - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// J lexer. -var J = internal.Register(MustNewLazyLexer( - &Config{ - Name: "J", - Aliases: []string{"j"}, - Filenames: []string{"*.ijs"}, - MimeTypes: []string{"text/x-j"}, - }, - jRules, -)) - -func jRules() Rules { - return Rules{ - "root": { - {`#!.*$`, CommentPreproc, nil}, - {`NB\..*`, CommentSingle, nil}, - {`\n+\s*Note`, CommentMultiline, Push("comment")}, - {`\s*Note.*`, CommentSingle, nil}, - {`\s+`, Text, nil}, - {`'`, LiteralString, Push("singlequote")}, - {`0\s+:\s*0|noun\s+define\s*$`, NameEntity, Push("nounDefinition")}, - {`(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b`, NameFunction, Push("explicitDefinition")}, - {Words(``, `\b[a-zA-Z]\w*\.`, `for_`, `goto_`, `label_`), NameLabel, nil}, - {Words(``, `\.`, `assert`, `break`, `case`, `catch`, `catchd`, `catcht`, `continue`, `do`, `else`, `elseif`, `end`, `fcase`, `for`, `if`, `return`, `select`, `throw`, `try`, `while`, `whilst`), NameLabel, nil}, - {`\b[a-zA-Z]\w*`, NameVariable, nil}, - {Words(``, ``, `ARGV`, `CR`, `CRLF`, `DEL`, `Debug`, `EAV`, `EMPTY`, `FF`, `JVERSION`, `LF`, `LF2`, `Note`, `TAB`, `alpha17`, `alpha27`, `apply`, `bind`, `boxopen`, `boxxopen`, `bx`, `clear`, `cutLF`, `cutopen`, `datatype`, `def`, `dfh`, `drop`, `each`, `echo`, `empty`, `erase`, `every`, `evtloop`, `exit`, `expand`, `fetch`, `file2url`, `fixdotdot`, `fliprgb`, `getargs`, `getenv`, `hfd`, `inv`, `inverse`, `iospath`, `isatty`, `isutf8`, `items`, `leaf`, `list`, `nameclass`, `namelist`, `names`, `nc`, `nl`, `on`, `pick`, `rows`, `script`, `scriptd`, `sign`, `sminfo`, `smoutput`, `sort`, `split`, `stderr`, `stdin`, `stdout`, `table`, `take`, `timespacex`, `timex`, `tmoutput`, `toCRLF`, `toHOST`, `toJ`, `tolower`, `toupper`, `type`, `ucp`, `ucpcount`, `usleep`, `utf8`, `uucp`), NameFunction, nil}, - {`=[.:]`, Operator, nil}, - {"[-=+*#$%@!~`^&\";:.,<>{}\\[\\]\\\\|/]", Operator, nil}, - {`[abCdDeEfHiIjLMoprtT]\.`, KeywordReserved, nil}, - {`[aDiLpqsStux]\:`, KeywordReserved, nil}, - {`(_[0-9])\:`, KeywordConstant, nil}, - {`\(`, Punctuation, Push("parentheses")}, - Include("numbers"), - }, - "comment": { - {`[^)]`, CommentMultiline, nil}, - {`^\)`, CommentMultiline, Pop(1)}, - {`[)]`, CommentMultiline, nil}, - }, - "explicitDefinition": { - {`\b[nmuvxy]\b`, NameDecorator, nil}, - Include("root"), - {`[^)]`, Name, nil}, - {`^\)`, NameLabel, Pop(1)}, - {`[)]`, Name, nil}, - }, - "numbers": { - {`\b_{1,2}\b`, LiteralNumber, nil}, - {`_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?`, LiteralNumber, nil}, - {`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil}, - {`_?\d+x`, LiteralNumberIntegerLong, nil}, - {`_?\d+`, LiteralNumberInteger, nil}, - }, - "nounDefinition": { - {`[^)]`, LiteralString, nil}, - {`^\)`, NameLabel, Pop(1)}, - {`[)]`, LiteralString, nil}, - }, - "parentheses": { - {`\)`, Punctuation, Pop(1)}, - Include("explicitDefinition"), - Include("root"), - }, - "singlequote": { - {`[^']`, LiteralString, nil}, - {`''`, LiteralString, nil}, - {`'`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/java.go b/vendor/github.com/alecthomas/chroma/lexers/j/java.go deleted file mode 100644 index 48a9d9fee841..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/j/java.go +++ /dev/null @@ -1,56 +0,0 @@ -package j - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Java lexer. -var Java = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Java", - Aliases: []string{"java"}, - Filenames: []string{"*.java"}, - MimeTypes: []string{"text/x-java"}, - DotAll: true, - EnsureNL: true, - }, - javaRules, -)) - -func javaRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`(assert|break|case|catch|continue|default|do|else|finally|for|if|goto|instanceof|new|return|switch|this|throw|try|while)\b`, Keyword, nil}, - {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, - {`@[^\W\d][\w.]*`, NameDecorator, nil}, - {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, - {`(boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, - {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`(true|false|null)\b`, KeywordConstant, nil}, - {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`(import(?:\s+static)?)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, - {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, - {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, - {`([^\W\d]|\$)[\w$]*`, Name, nil}, - {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, - {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, - {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, - {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, - {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, - {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, - {`\n`, Text, nil}, - }, - "class": { - {`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, - }, - "import": { - {`[\w.]+\*?`, NameNamespace, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go b/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go deleted file mode 100644 index 5c6b9379418b..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go +++ /dev/null @@ -1,74 +0,0 @@ -package j - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Javascript lexer. -var JavascriptRules = Rules{ - "commentsandwhitespace": { - {`\s+`, Text, nil}, - {``, `<--`, `<-->`, `>`, `<`, `>=`, `≥`, `<=`, `≤`, `==`, `===`, `≡`, `!=`, `≠`, `!==`, `≢`, `∈`, `∉`, `∋`, `∌`, `⊆`, `⊈`, `⊂`, `⊄`, `⊊`, `∝`, `∊`, `∍`, `∥`, `∦`, `∷`, `∺`, `∻`, `∽`, `∾`, `≁`, `≃`, `≂`, `≄`, `≅`, `≆`, `≇`, `≈`, `≉`, `≊`, `≋`, `≌`, `≍`, `≎`, `≐`, `≑`, `≒`, `≓`, `≖`, `≗`, `≘`, `≙`, `≚`, `≛`, `≜`, `≝`, `≞`, `≟`, `≣`, `≦`, `≧`, `≨`, `≩`, `≪`, `≫`, `≬`, `≭`, `≮`, `≯`, `≰`, `≱`, `≲`, `≳`, `≴`, `≵`, `≶`, `≷`, `≸`, `≹`, `≺`, `≻`, `≼`, `≽`, `≾`, `≿`, `⊀`, `⊁`, `⊃`, `⊅`, `⊇`, `⊉`, `⊋`, `⊏`, `⊐`, `⊑`, `⊒`, `⊜`, `⊩`, `⊬`, `⊮`, `⊰`, `⊱`, `⊲`, `⊳`, `⊴`, `⊵`, `⊶`, `⊷`, `⋍`, `⋐`, `⋑`, `⋕`, `⋖`, `⋗`, `⋘`, `⋙`, `⋚`, `⋛`, `⋜`, `⋝`, `⋞`, `⋟`, `⋠`, `⋡`, `⋢`, `⋣`, `⋤`, `⋥`, `⋦`, `⋧`, `⋨`, `⋩`, `⋪`, `⋫`, `⋬`, `⋭`, `⋲`, `⋳`, `⋴`, `⋵`, `⋶`, `⋷`, `⋸`, `⋹`, `⋺`, `⋻`, `⋼`, `⋽`, `⋾`, `⋿`, `⟈`, `⟉`, `⟒`, `⦷`, `⧀`, `⧁`, `⧡`, `⧣`, `⧤`, `⧥`, `⩦`, `⩧`, `⩪`, `⩫`, `⩬`, `⩭`, `⩮`, `⩯`, `⩰`, `⩱`, `⩲`, `⩳`, `⩵`, `⩶`, `⩷`, `⩸`, `⩹`, `⩺`, `⩻`, `⩼`, `⩽`, `⩾`, `⩿`, `⪀`, `⪁`, `⪂`, `⪃`, `⪄`, `⪅`, `⪆`, `⪇`, `⪈`, `⪉`, `⪊`, `⪋`, `⪌`, `⪍`, `⪎`, `⪏`, `⪐`, `⪑`, `⪒`, `⪓`, `⪔`, `⪕`, `⪖`, `⪗`, `⪘`, `⪙`, `⪚`, `⪛`, `⪜`, `⪝`, `⪞`, `⪟`, `⪠`, `⪡`, `⪢`, `⪣`, `⪤`, `⪥`, `⪦`, `⪧`, `⪨`, `⪩`, `⪪`, `⪫`, `⪬`, `⪭`, `⪮`, `⪯`, `⪰`, `⪱`, `⪲`, `⪳`, `⪴`, `⪵`, `⪶`, `⪷`, `⪸`, `⪹`, `⪺`, `⪻`, `⪼`, `⪽`, `⪾`, `⪿`, `⫀`, `⫁`, `⫂`, `⫃`, `⫄`, `⫅`, `⫆`, `⫇`, `⫈`, `⫉`, `⫊`, `⫋`, `⫌`, `⫍`, `⫎`, `⫏`, `⫐`, `⫑`, `⫒`, `⫓`, `⫔`, `⫕`, `⫖`, `⫗`, `⫘`, `⫙`, `⫷`, `⫸`, `⫹`, `⫺`, `⊢`, `⊣`, `⟂`, `<:`, `>:`, `<|`, `|>`, `…`, `⁝`, `⋮`, `⋱`, `⋰`, `⋯`, `+`, `-`, `¦`, `|`, `⊕`, `⊖`, `⊞`, `⊟`, `++`, `∪`, `∨`, `⊔`, `±`, `∓`, `∔`, `∸`, `≏`, `⊎`, `⊻`, `⊽`, `⋎`, `⋓`, `⧺`, `⧻`, `⨈`, `⨢`, `⨣`, `⨤`, `⨥`, `⨦`, `⨧`, `⨨`, `⨩`, `⨪`, `⨫`, `⨬`, `⨭`, `⨮`, `⨹`, `⨺`, `⩁`, `⩂`, `⩅`, `⩊`, `⩌`, `⩏`, `⩐`, `⩒`, `⩔`, `⩖`, `⩗`, `⩛`, `⩝`, `⩡`, `⩢`, `⩣`, `*`, `/`, `⌿`, `÷`, `%`, `&`, `⋅`, `∘`, `×`, `\`, `∩`, `∧`, `⊗`, `⊘`, `⊙`, `⊚`, `⊛`, `⊠`, `⊡`, `⊓`, `∗`, `∙`, `∤`, `⅋`, `≀`, `⊼`, `⋄`, `⋆`, `⋇`, `⋉`, `⋊`, `⋋`, `⋌`, `⋏`, `⋒`, `⟑`, `⦸`, `⦼`, `⦾`, `⦿`, `⧶`, `⧷`, `⨇`, `⨰`, `⨱`, `⨲`, `⨳`, `⨴`, `⨵`, `⨶`, `⨷`, `⨸`, `⨻`, `⨼`, `⨽`, `⩀`, `⩃`, `⩄`, `⩋`, `⩍`, `⩎`, `⩑`, `⩓`, `⩕`, `⩘`, `⩚`, `⩜`, `⩞`, `⩟`, `⩠`, `⫛`, `⊍`, `▷`, `⨝`, `⟕`, `⟖`, `⟗`, `⨟`, `//`, `>>`, `<<`, `>>>`, `^`, `↑`, `↓`, `⇵`, `⟰`, `⟱`, `⤈`, `⤉`, `⤊`, `⤋`, `⤒`, `⤓`, `⥉`, `⥌`, `⥍`, `⥏`, `⥑`, `⥔`, `⥕`, `⥘`, `⥙`, `⥜`, `⥝`, `⥠`, `⥡`, `⥣`, `⥥`, `⥮`, `⥯`, `↑`, `↓`, `!`, `¬`, `√`, `∛`, `∜`), Operator, nil}, - {Words(``, `[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*`, `.=`, `.+=`, `.-=`, `.*=`, `./=`, `.//=`, `.\=`, `.^=`, `.÷=`, `.%=`, `.<<=`, `.>>=`, `.>>>=`, `.|=`, `.&=`, `.⊻=`, `.≔`, `.⩴`, `.≕'`, `.~`, `.=>`, `.→`, `.↔`, `.↚`, `.↛`, `.↞`, `.↠`, `.↢`, `.↣`, `.↦`, `.↤`, `.↮`, `.⇎`, `.⇍`, `.⇏`, `.⇐`, `.⇒`, `.⇔`, `.⇴`, `.⇶`, `.⇷`, `.⇸`, `.⇹`, `.⇺`, `.⇻`, `.⇼`, `.⇽`, `.⇾`, `.⇿`, `.⟵`, `.⟶`, `.⟷`, `.⟹`, `.⟺`, `.⟻`, `.⟼`, `.⟽`, `.⟾`, `.⟿`, `.⤀`, `.⤁`, `.⤂`, `.⤃`, `.⤄`, `.⤅`, `.⤆`, `.⤇`, `.⤌`, `.⤍`, `.⤎`, `.⤏`, `.⤐`, `.⤑`, `.⤔`, `.⤕`, `.⤖`, `.⤗`, `.⤘`, `.⤝`, `.⤞`, `.⤟`, `.⤠`, `.⥄`, `.⥅`, `.⥆`, `.⥇`, `.⥈`, `.⥊`, `.⥋`, `.⥎`, `.⥐`, `.⥒`, `.⥓`, `.⥖`, `.⥗`, `.⥚`, `.⥛`, `.⥞`, `.⥟`, `.⥢`, `.⥤`, `.⥦`, `.⥧`, `.⥨`, `.⥩`, `.⥪`, `.⥫`, `.⥬`, `.⥭`, `.⥰`, `.⧴`, `.⬱`, `.⬰`, `.⬲`, `.⬳`, `.⬴`, `.⬵`, `.⬶`, `.⬷`, `.⬸`, `.⬹`, `.⬺`, `.⬻`, `.⬼`, `.⬽`, `.⬾`, `.⬿`, `.⭀`, `.⭁`, `.⭂`, `.⭃`, `.⭄`, `.⭇`, `.⭈`, `.⭉`, `.⭊`, `.⭋`, `.⭌`, `.←`, `.→`, `.⇜`, `.⇝`, `.↜`, `.↝`, `.↩`, `.↪`, `.↫`, `.↬`, `.↼`, `.↽`, `.⇀`, `.⇁`, `.⇄`, `.⇆`, `.⇇`, `.⇉`, `.⇋`, `.⇌`, `.⇚`, `.⇛`, `.⇠`, `.⇢`, `.↷`, `.↶`, `.↺`, `.↻`, `.-->`, `.<--`, `.<-->`, `.>`, `.<`, `.>=`, `.≥`, `.<=`, `.≤`, `.==`, `.===`, `.≡`, `.!=`, `.≠`, `.!==`, `.≢`, `.∈`, `.∉`, `.∋`, `.∌`, `.⊆`, `.⊈`, `.⊂`, `.⊄`, `.⊊`, `.∝`, `.∊`, `.∍`, `.∥`, `.∦`, `.∷`, `.∺`, `.∻`, `.∽`, `.∾`, `.≁`, `.≃`, `.≂`, `.≄`, `.≅`, `.≆`, `.≇`, `.≈`, `.≉`, `.≊`, `.≋`, `.≌`, `.≍`, `.≎`, `.≐`, `.≑`, `.≒`, `.≓`, `.≖`, `.≗`, `.≘`, `.≙`, `.≚`, `.≛`, `.≜`, `.≝`, `.≞`, `.≟`, `.≣`, `.≦`, `.≧`, `.≨`, `.≩`, `.≪`, `.≫`, `.≬`, `.≭`, `.≮`, `.≯`, `.≰`, `.≱`, `.≲`, `.≳`, `.≴`, `.≵`, `.≶`, `.≷`, `.≸`, `.≹`, `.≺`, `.≻`, `.≼`, `.≽`, `.≾`, `.≿`, `.⊀`, `.⊁`, `.⊃`, `.⊅`, `.⊇`, `.⊉`, `.⊋`, `.⊏`, `.⊐`, `.⊑`, `.⊒`, `.⊜`, `.⊩`, `.⊬`, `.⊮`, `.⊰`, `.⊱`, `.⊲`, `.⊳`, `.⊴`, `.⊵`, `.⊶`, `.⊷`, `.⋍`, `.⋐`, `.⋑`, `.⋕`, `.⋖`, `.⋗`, `.⋘`, `.⋙`, `.⋚`, `.⋛`, `.⋜`, `.⋝`, `.⋞`, `.⋟`, `.⋠`, `.⋡`, `.⋢`, `.⋣`, `.⋤`, `.⋥`, `.⋦`, `.⋧`, `.⋨`, `.⋩`, `.⋪`, `.⋫`, `.⋬`, `.⋭`, `.⋲`, `.⋳`, `.⋴`, `.⋵`, `.⋶`, `.⋷`, `.⋸`, `.⋹`, `.⋺`, `.⋻`, `.⋼`, `.⋽`, `.⋾`, `.⋿`, `.⟈`, `.⟉`, `.⟒`, `.⦷`, `.⧀`, `.⧁`, `.⧡`, `.⧣`, `.⧤`, `.⧥`, `.⩦`, `.⩧`, `.⩪`, `.⩫`, `.⩬`, `.⩭`, `.⩮`, `.⩯`, `.⩰`, `.⩱`, `.⩲`, `.⩳`, `.⩵`, `.⩶`, `.⩷`, `.⩸`, `.⩹`, `.⩺`, `.⩻`, `.⩼`, `.⩽`, `.⩾`, `.⩿`, `.⪀`, `.⪁`, `.⪂`, `.⪃`, `.⪄`, `.⪅`, `.⪆`, `.⪇`, `.⪈`, `.⪉`, `.⪊`, `.⪋`, `.⪌`, `.⪍`, `.⪎`, `.⪏`, `.⪐`, `.⪑`, `.⪒`, `.⪓`, `.⪔`, `.⪕`, `.⪖`, `.⪗`, `.⪘`, `.⪙`, `.⪚`, `.⪛`, `.⪜`, `.⪝`, `.⪞`, `.⪟`, `.⪠`, `.⪡`, `.⪢`, `.⪣`, `.⪤`, `.⪥`, `.⪦`, `.⪧`, `.⪨`, `.⪩`, `.⪪`, `.⪫`, `.⪬`, `.⪭`, `.⪮`, `.⪯`, `.⪰`, `.⪱`, `.⪲`, `.⪳`, `.⪴`, `.⪵`, `.⪶`, `.⪷`, `.⪸`, `.⪹`, `.⪺`, `.⪻`, `.⪼`, `.⪽`, `.⪾`, `.⪿`, `.⫀`, `.⫁`, `.⫂`, `.⫃`, `.⫄`, `.⫅`, `.⫆`, `.⫇`, `.⫈`, `.⫉`, `.⫊`, `.⫋`, `.⫌`, `.⫍`, `.⫎`, `.⫏`, `.⫐`, `.⫑`, `.⫒`, `.⫓`, `.⫔`, `.⫕`, `.⫖`, `.⫗`, `.⫘`, `.⫙`, `.⫷`, `.⫸`, `.⫹`, `.⫺`, `.⊢`, `.⊣`, `.⟂`, `.<:`, `.>:`, `.<|`, `.|>`, `.…`, `.⁝`, `.⋮`, `.⋱`, `.⋰`, `.⋯`, `.+`, `.-`, `.¦`, `.|`, `.⊕`, `.⊖`, `.⊞`, `.⊟`, `.++`, `.∪`, `.∨`, `.⊔`, `.±`, `.∓`, `.∔`, `.∸`, `.≏`, `.⊎`, `.⊻`, `.⊽`, `.⋎`, `.⋓`, `.⧺`, `.⧻`, `.⨈`, `.⨢`, `.⨣`, `.⨤`, `.⨥`, `.⨦`, `.⨧`, `.⨨`, `.⨩`, `.⨪`, `.⨫`, `.⨬`, `.⨭`, `.⨮`, `.⨹`, `.⨺`, `.⩁`, `.⩂`, `.⩅`, `.⩊`, `.⩌`, `.⩏`, `.⩐`, `.⩒`, `.⩔`, `.⩖`, `.⩗`, `.⩛`, `.⩝`, `.⩡`, `.⩢`, `.⩣`, `.*`, `./`, `.⌿`, `.÷`, `.%`, `.&`, `.⋅`, `.∘`, `.×`, `.\`, `.∩`, `.∧`, `.⊗`, `.⊘`, `.⊙`, `.⊚`, `.⊛`, `.⊠`, `.⊡`, `.⊓`, `.∗`, `.∙`, `.∤`, `.⅋`, `.≀`, `.⊼`, `.⋄`, `.⋆`, `.⋇`, `.⋉`, `.⋊`, `.⋋`, `.⋌`, `.⋏`, `.⋒`, `.⟑`, `.⦸`, `.⦼`, `.⦾`, `.⦿`, `.⧶`, `.⧷`, `.⨇`, `.⨰`, `.⨱`, `.⨲`, `.⨳`, `.⨴`, `.⨵`, `.⨶`, `.⨷`, `.⨸`, `.⨻`, `.⨼`, `.⨽`, `.⩀`, `.⩃`, `.⩄`, `.⩋`, `.⩍`, `.⩎`, `.⩑`, `.⩓`, `.⩕`, `.⩘`, `.⩚`, `.⩜`, `.⩞`, `.⩟`, `.⩠`, `.⫛`, `.⊍`, `.▷`, `.⨝`, `.⟕`, `.⟖`, `.⟗`, `.⨟`, `.//`, `.>>`, `.<<`, `.>>>`, `.^`, `.↑`, `.↓`, `.⇵`, `.⟰`, `.⟱`, `.⤈`, `.⤉`, `.⤊`, `.⤋`, `.⤒`, `.⤓`, `.⥉`, `.⥌`, `.⥍`, `.⥏`, `.⥑`, `.⥔`, `.⥕`, `.⥘`, `.⥙`, `.⥜`, `.⥝`, `.⥠`, `.⥡`, `.⥣`, `.⥥`, `.⥮`, `.⥯`, `.↑`, `.↓`, `.!`, `.¬`, `.√`, `.∛`, `.∜`), Operator, nil}, - {Words(``, ``, `...`, `..`), Operator, nil}, - {`'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'`, LiteralStringChar, nil}, - {`(?<=[.\w)\]])(\'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*)+`, Operator, nil}, - {`(raw)(""")`, ByGroups(LiteralStringAffix, LiteralString), Push("tqrawstring")}, - {`(raw)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("rawstring")}, - {`(r)(""")`, ByGroups(LiteralStringAffix, LiteralStringRegex), Push("tqregex")}, - {`(r)(")`, ByGroups(LiteralStringAffix, LiteralStringRegex), Push("regex")}, - {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))?(""")`, ByGroups(LiteralStringAffix, LiteralString), Push("tqstring")}, - {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))?(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {"((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*))?(```)", ByGroups(LiteralStringAffix, LiteralStringBacktick), Push("tqcommand")}, - {"((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*))?(`)", ByGroups(LiteralStringAffix, LiteralStringBacktick), Push("command")}, - {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))(\{)`, ByGroups(KeywordType, Punctuation), Push("curly")}, - {`(where)(\s+)((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))`, ByGroups(Keyword, Text, KeywordType), nil}, - {`(\{)`, Punctuation, Push("curly")}, - {`(abstract[ \t]+type|primitive[ \t]+type|mutable[ \t]+struct|struct)([\s()]+)((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))`, ByGroups(Keyword, Text, KeywordType), nil}, - {`@(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, NameDecorator, nil}, - {Words(`@`, `[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*`, `->`, `:=`, `$=`, `?`, `||`, `&&`, `:`, `$`, `::`, `..`, `.`, `=`, `+=`, `-=`, `*=`, `/=`, `//=`, `\=`, `^=`, `÷=`, `%=`, `<<=`, `>>=`, `>>>=`, `|=`, `&=`, `⊻=`, `≔`, `⩴`, `≕'`, `~`, `=>`, `→`, `↔`, `↚`, `↛`, `↞`, `↠`, `↢`, `↣`, `↦`, `↤`, `↮`, `⇎`, `⇍`, `⇏`, `⇐`, `⇒`, `⇔`, `⇴`, `⇶`, `⇷`, `⇸`, `⇹`, `⇺`, `⇻`, `⇼`, `⇽`, `⇾`, `⇿`, `⟵`, `⟶`, `⟷`, `⟹`, `⟺`, `⟻`, `⟼`, `⟽`, `⟾`, `⟿`, `⤀`, `⤁`, `⤂`, `⤃`, `⤄`, `⤅`, `⤆`, `⤇`, `⤌`, `⤍`, `⤎`, `⤏`, `⤐`, `⤑`, `⤔`, `⤕`, `⤖`, `⤗`, `⤘`, `⤝`, `⤞`, `⤟`, `⤠`, `⥄`, `⥅`, `⥆`, `⥇`, `⥈`, `⥊`, `⥋`, `⥎`, `⥐`, `⥒`, `⥓`, `⥖`, `⥗`, `⥚`, `⥛`, `⥞`, `⥟`, `⥢`, `⥤`, `⥦`, `⥧`, `⥨`, `⥩`, `⥪`, `⥫`, `⥬`, `⥭`, `⥰`, `⧴`, `⬱`, `⬰`, `⬲`, `⬳`, `⬴`, `⬵`, `⬶`, `⬷`, `⬸`, `⬹`, `⬺`, `⬻`, `⬼`, `⬽`, `⬾`, `⬿`, `⭀`, `⭁`, `⭂`, `⭃`, `⭄`, `⭇`, `⭈`, `⭉`, `⭊`, `⭋`, `⭌`, `←`, `→`, `⇜`, `⇝`, `↜`, `↝`, `↩`, `↪`, `↫`, `↬`, `↼`, `↽`, `⇀`, `⇁`, `⇄`, `⇆`, `⇇`, `⇉`, `⇋`, `⇌`, `⇚`, `⇛`, `⇠`, `⇢`, `↷`, `↶`, `↺`, `↻`, `-->`, `<--`, `<-->`, `>`, `<`, `>=`, `≥`, `<=`, `≤`, `==`, `===`, `≡`, `!=`, `≠`, `!==`, `≢`, `∈`, `∉`, `∋`, `∌`, `⊆`, `⊈`, `⊂`, `⊄`, `⊊`, `∝`, `∊`, `∍`, `∥`, `∦`, `∷`, `∺`, `∻`, `∽`, `∾`, `≁`, `≃`, `≂`, `≄`, `≅`, `≆`, `≇`, `≈`, `≉`, `≊`, `≋`, `≌`, `≍`, `≎`, `≐`, `≑`, `≒`, `≓`, `≖`, `≗`, `≘`, `≙`, `≚`, `≛`, `≜`, `≝`, `≞`, `≟`, `≣`, `≦`, `≧`, `≨`, `≩`, `≪`, `≫`, `≬`, `≭`, `≮`, `≯`, `≰`, `≱`, `≲`, `≳`, `≴`, `≵`, `≶`, `≷`, `≸`, `≹`, `≺`, `≻`, `≼`, `≽`, `≾`, `≿`, `⊀`, `⊁`, `⊃`, `⊅`, `⊇`, `⊉`, `⊋`, `⊏`, `⊐`, `⊑`, `⊒`, `⊜`, `⊩`, `⊬`, `⊮`, `⊰`, `⊱`, `⊲`, `⊳`, `⊴`, `⊵`, `⊶`, `⊷`, `⋍`, `⋐`, `⋑`, `⋕`, `⋖`, `⋗`, `⋘`, `⋙`, `⋚`, `⋛`, `⋜`, `⋝`, `⋞`, `⋟`, `⋠`, `⋡`, `⋢`, `⋣`, `⋤`, `⋥`, `⋦`, `⋧`, `⋨`, `⋩`, `⋪`, `⋫`, `⋬`, `⋭`, `⋲`, `⋳`, `⋴`, `⋵`, `⋶`, `⋷`, `⋸`, `⋹`, `⋺`, `⋻`, `⋼`, `⋽`, `⋾`, `⋿`, `⟈`, `⟉`, `⟒`, `⦷`, `⧀`, `⧁`, `⧡`, `⧣`, `⧤`, `⧥`, `⩦`, `⩧`, `⩪`, `⩫`, `⩬`, `⩭`, `⩮`, `⩯`, `⩰`, `⩱`, `⩲`, `⩳`, `⩵`, `⩶`, `⩷`, `⩸`, `⩹`, `⩺`, `⩻`, `⩼`, `⩽`, `⩾`, `⩿`, `⪀`, `⪁`, `⪂`, `⪃`, `⪄`, `⪅`, `⪆`, `⪇`, `⪈`, `⪉`, `⪊`, `⪋`, `⪌`, `⪍`, `⪎`, `⪏`, `⪐`, `⪑`, `⪒`, `⪓`, `⪔`, `⪕`, `⪖`, `⪗`, `⪘`, `⪙`, `⪚`, `⪛`, `⪜`, `⪝`, `⪞`, `⪟`, `⪠`, `⪡`, `⪢`, `⪣`, `⪤`, `⪥`, `⪦`, `⪧`, `⪨`, `⪩`, `⪪`, `⪫`, `⪬`, `⪭`, `⪮`, `⪯`, `⪰`, `⪱`, `⪲`, `⪳`, `⪴`, `⪵`, `⪶`, `⪷`, `⪸`, `⪹`, `⪺`, `⪻`, `⪼`, `⪽`, `⪾`, `⪿`, `⫀`, `⫁`, `⫂`, `⫃`, `⫄`, `⫅`, `⫆`, `⫇`, `⫈`, `⫉`, `⫊`, `⫋`, `⫌`, `⫍`, `⫎`, `⫏`, `⫐`, `⫑`, `⫒`, `⫓`, `⫔`, `⫕`, `⫖`, `⫗`, `⫘`, `⫙`, `⫷`, `⫸`, `⫹`, `⫺`, `⊢`, `⊣`, `⟂`, `<:`, `>:`, `<|`, `|>`, `…`, `⁝`, `⋮`, `⋱`, `⋰`, `⋯`, `+`, `-`, `¦`, `|`, `⊕`, `⊖`, `⊞`, `⊟`, `++`, `∪`, `∨`, `⊔`, `±`, `∓`, `∔`, `∸`, `≏`, `⊎`, `⊻`, `⊽`, `⋎`, `⋓`, `⧺`, `⧻`, `⨈`, `⨢`, `⨣`, `⨤`, `⨥`, `⨦`, `⨧`, `⨨`, `⨩`, `⨪`, `⨫`, `⨬`, `⨭`, `⨮`, `⨹`, `⨺`, `⩁`, `⩂`, `⩅`, `⩊`, `⩌`, `⩏`, `⩐`, `⩒`, `⩔`, `⩖`, `⩗`, `⩛`, `⩝`, `⩡`, `⩢`, `⩣`, `*`, `/`, `⌿`, `÷`, `%`, `&`, `⋅`, `∘`, `×`, `\`, `∩`, `∧`, `⊗`, `⊘`, `⊙`, `⊚`, `⊛`, `⊠`, `⊡`, `⊓`, `∗`, `∙`, `∤`, `⅋`, `≀`, `⊼`, `⋄`, `⋆`, `⋇`, `⋉`, `⋊`, `⋋`, `⋌`, `⋏`, `⋒`, `⟑`, `⦸`, `⦼`, `⦾`, `⦿`, `⧶`, `⧷`, `⨇`, `⨰`, `⨱`, `⨲`, `⨳`, `⨴`, `⨵`, `⨶`, `⨷`, `⨸`, `⨻`, `⨼`, `⨽`, `⩀`, `⩃`, `⩄`, `⩋`, `⩍`, `⩎`, `⩑`, `⩓`, `⩕`, `⩘`, `⩚`, `⩜`, `⩞`, `⩟`, `⩠`, `⫛`, `⊍`, `▷`, `⨝`, `⟕`, `⟖`, `⟗`, `⨟`, `//`, `>>`, `<<`, `>>>`, `^`, `↑`, `↓`, `⇵`, `⟰`, `⟱`, `⤈`, `⤉`, `⤊`, `⤋`, `⤒`, `⤓`, `⥉`, `⥌`, `⥍`, `⥏`, `⥑`, `⥔`, `⥕`, `⥘`, `⥙`, `⥜`, `⥝`, `⥠`, `⥡`, `⥣`, `⥥`, `⥮`, `⥯`, `↑`, `↓`, `!`, `¬`, `√`, `∛`, `∜`), NameDecorator, nil}, - {Words(``, `\b`, `baremodule`, `begin`, `break`, `catch`, `ccall`, `const`, `continue`, `do`, `else`, `elseif`, `end`, `export`, `finally`, `for`, `function`, `global`, `if`, `import`, `in`, `isa`, `let`, `local`, `macro`, `module`, `quote`, `return`, `try`, `using`, `where`, `while`), Keyword, nil}, - {Words(``, `\b`, `AbstractArray`, `AbstractChannel`, `AbstractChar`, `AbstractDict`, `AbstractDisplay`, `AbstractFloat`, `AbstractIrrational`, `AbstractMatch`, `AbstractMatrix`, `AbstractPattern`, `AbstractRange`, `AbstractSet`, `AbstractString`, `AbstractUnitRange`, `AbstractVecOrMat`, `AbstractVector`, `Any`, `ArgumentError`, `Array`, `AssertionError`, `BigFloat`, `BigInt`, `BitArray`, `BitMatrix`, `BitSet`, `BitVector`, `Bool`, `BoundsError`, `CapturedException`, `CartesianIndex`, `CartesianIndices`, `Cchar`, `Cdouble`, `Cfloat`, `Channel`, `Char`, `Cint`, `Cintmax_t`, `Clong`, `Clonglong`, `Cmd`, `Colon`, `Complex`, `ComplexF16`, `ComplexF32`, `ComplexF64`, `ComposedFunction`, `CompositeException`, `Condition`, `Cptrdiff_t`, `Cshort`, `Csize_t`, `Cssize_t`, `Cstring`, `Cuchar`, `Cuint`, `Cuintmax_t`, `Culong`, `Culonglong`, `Cushort`, `Cvoid`, `Cwchar_t`, `Cwstring`, `DataType`, `DenseArray`, `DenseMatrix`, `DenseVecOrMat`, `DenseVector`, `Dict`, `DimensionMismatch`, `Dims`, `DivideError`, `DomainError`, `EOFError`, `Enum`, `ErrorException`, `Exception`, `ExponentialBackOff`, `Expr`, `Float16`, `Float32`, `Float64`, `Function`, `GlobalRef`, `HTML`, `IO`, `IOBuffer`, `IOContext`, `IOStream`, `IdDict`, `IndexCartesian`, `IndexLinear`, `IndexStyle`, `InexactError`, `InitError`, `Int`, `Int128`, `Int16`, `Int32`, `Int64`, `Int8`, `Integer`, `InterruptException`, `InvalidStateException`, `Irrational`, `KeyError`, `LinRange`, `LineNumberNode`, `LinearIndices`, `LoadError`, `MIME`, `Matrix`, `Method`, `MethodError`, `Missing`, `MissingException`, `Module`, `NTuple`, `NamedTuple`, `Nothing`, `Number`, `OrdinalRange`, `OutOfMemoryError`, `OverflowError`, `Pair`, `PartialQuickSort`, `PermutedDimsArray`, `Pipe`, `ProcessFailedException`, `Ptr`, `QuoteNode`, `Rational`, `RawFD`, `ReadOnlyMemoryError`, `Real`, `ReentrantLock`, `Ref`, `Regex`, `RegexMatch`, `RoundingMode`, `SegmentationFault`, `Set`, `Signed`, `Some`, `StackOverflowError`, `StepRange`, `StepRangeLen`, `StridedArray`, `StridedMatrix`, `StridedVecOrMat`, `StridedVector`, `String`, `StringIndexError`, `SubArray`, `SubString`, `SubstitutionString`, `Symbol`, `SystemError`, `Task`, `TaskFailedException`, `Text`, `TextDisplay`, `Timer`, `Tuple`, `Type`, `TypeError`, `TypeVar`, `UInt`, `UInt128`, `UInt16`, `UInt32`, `UInt64`, `UInt8`, `UndefInitializer`, `UndefKeywordError`, `UndefRefError`, `UndefVarError`, `Union`, `UnionAll`, `UnitRange`, `Unsigned`, `Val`, `Vararg`, `VecElement`, `VecOrMat`, `Vector`, `VersionNumber`, `WeakKeyDict`, `WeakRef`), KeywordType, nil}, - {Words(``, `\b`, `ARGS`, `C_NULL`, `DEPOT_PATH`, `ENDIAN_BOM`, `ENV`, `Inf`, `Inf16`, `Inf32`, `Inf64`, `InsertionSort`, `LOAD_PATH`, `MergeSort`, `NaN`, `NaN16`, `NaN32`, `NaN64`, `PROGRAM_FILE`, `QuickSort`, `RoundDown`, `RoundFromZero`, `RoundNearest`, `RoundNearestTiesAway`, `RoundNearestTiesUp`, `RoundToZero`, `RoundUp`, `VERSION`, `devnull`, `false`, `im`, `missing`, `nothing`, `pi`, `stderr`, `stdin`, `stdout`, `true`, `undef`, `π`, `ℯ`), NameBuiltin, nil}, - {`(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, Name, nil}, - {`(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+((_\d+)+)?[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, - {`0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+`, LiteralNumberFloat, nil}, - {`0b[01]+((_[01]+)+)?`, LiteralNumberBin, nil}, - {`0o[0-7]+((_[0-7]+)+)?`, LiteralNumberOct, nil}, - {`0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?`, LiteralNumberHex, nil}, - {`\d+((_\d+)+)?`, LiteralNumberInteger, nil}, - {Words(``, ``, `.`), Operator, nil}, - }, - "blockcomment": { - {`[^=#]`, CommentMultiline, nil}, - {`#=`, CommentMultiline, Push()}, - {`=#`, CommentMultiline, Pop(1)}, - {`[=#]`, CommentMultiline, nil}, - }, - "curly": { - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - {`(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, KeywordType, nil}, - Include("root"), - }, - "tqrawstring": { - {`"""`, LiteralString, Pop(1)}, - {`([^"]|"[^"][^"])+`, LiteralString, nil}, - }, - "rawstring": { - {`"`, LiteralString, Pop(1)}, - {`\\"`, LiteralStringEscape, nil}, - {`([^"\\]|\\[^"])+`, LiteralString, nil}, - }, - "interp": { - {`\$(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, LiteralStringInterpol, nil}, - {`(\$)(\()`, ByGroups(LiteralStringInterpol, Punctuation), Push("in-intp")}, - }, - "in-intp": { - {`\(`, Punctuation, Push()}, - {`\)`, Punctuation, Pop(1)}, - Include("root"), - }, - "string": { - {`(")((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)|\d+)?`, ByGroups(LiteralString, LiteralStringAffix), Pop(1)}, - {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, - Include("interp"), - {`%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil}, - {`[^"$%\\]+`, LiteralString, nil}, - {`.`, LiteralString, nil}, - }, - "tqstring": { - {`(""")((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)|\d+)?`, ByGroups(LiteralString, LiteralStringAffix), Pop(1)}, - {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, - Include("interp"), - {`[^"$%\\]+`, LiteralString, nil}, - {`.`, LiteralString, nil}, - }, - "regex": { - {`(")([imsxa]*)?`, ByGroups(LiteralStringRegex, LiteralStringAffix), Pop(1)}, - {`\\"`, LiteralStringRegex, nil}, - {`[^\\"]+`, LiteralStringRegex, nil}, - }, - "tqregex": { - {`(""")([imsxa]*)?`, ByGroups(LiteralStringRegex, LiteralStringAffix), Pop(1)}, - {`[^"]+`, LiteralStringRegex, nil}, - }, - "command": { - {"(`)((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*)|\\d+)?", ByGroups(LiteralStringBacktick, LiteralStringAffix), Pop(1)}, - {"\\\\[`$]", LiteralStringEscape, nil}, - Include("interp"), - {"[^\\\\`$]+", LiteralStringBacktick, nil}, - {`.`, LiteralStringBacktick, nil}, - }, - "tqcommand": { - {"(```)((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*)|\\d+)?", ByGroups(LiteralStringBacktick, LiteralStringAffix), Pop(1)}, - {`\\\$`, LiteralStringEscape, nil}, - Include("interp"), - {"[^\\\\`$]+", LiteralStringBacktick, nil}, - {`.`, LiteralStringBacktick, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go b/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go deleted file mode 100644 index 5dbda9fbad69..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go +++ /dev/null @@ -1,54 +0,0 @@ -package j - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var Jungle = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Jungle", - Aliases: []string{"jungle"}, - Filenames: []string{"*.jungle"}, - MimeTypes: []string{"text/x-jungle"}, - }, - jungleRules, -)) - -func jungleRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`\n`, Text, nil}, - {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, - {`^(?=\S)`, None, Push("instruction")}, - {`[\.;\[\]\(\)\$]`, Punctuation, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "instruction": { - {`[^\S\n]+`, Text, nil}, - {`=`, Operator, Push("value")}, - {`(?=\S)`, None, Push("var")}, - Default(Pop(1)), - }, - "value": { - {`[^\S\n]+`, Text, nil}, - {`\$\(`, Punctuation, Push("var")}, - {`[;\[\]\(\)\$]`, Punctuation, nil}, - {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, - {`[\w_\-\.\/\\]+`, Text, nil}, - Default(Pop(1)), - }, - "var": { - {`[^\S\n]+`, Text, nil}, - {`\b(((re)?source|barrel)Path|excludeAnnotations|annotations|lang)\b`, NameBuiltin, nil}, - {`\bbase\b`, NameConstant, nil}, - {`\b(ind|zsm|hrv|ces|dan|dut|eng|fin|fre|deu|gre|hun|ita|nob|po[lr]|rus|sl[ov]|spa|swe|ara|heb|zh[st]|jpn|kor|tha|vie|bul|tur)`, NameConstant, nil}, - {`\b((semi)?round|rectangle)(-\d+x\d+)?\b`, NameConstant, nil}, - {`[\.;\[\]\(\$]`, Punctuation, nil}, - {`\)`, Punctuation, Pop(1)}, - {`[a-zA-Z_]\w*`, Name, nil}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go b/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go deleted file mode 100644 index deb182e9b702..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go +++ /dev/null @@ -1,98 +0,0 @@ -package k - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Kotlin lexer. -var Kotlin = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Kotlin", - Aliases: []string{"kotlin"}, - Filenames: []string{"*.kt"}, - MimeTypes: []string{"text/x-kotlin"}, - DotAll: true, - }, - kotlinRules, -)) - -func kotlinRules() Rules { - const kotlinIdentifier = "(?:[_\\p{L}][\\p{L}\\p{N}]*|`@?[_\\p{L}][\\p{L}\\p{N}]+`)" - - return Rules{ - "root": { - {`^\s*\[.*?\]`, NameAttribute, nil}, - {`[^\S\n]+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//[^\n]*\n?`, CommentSingle, nil}, - {`/[*].*?[*]/`, CommentMultiline, nil}, - {`\n`, Text, nil}, - {`!==|!in|!is|===`, Operator, nil}, - {`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil}, - {`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil}, - {`[{}]`, Punctuation, nil}, - {`"""`, LiteralString, Push("rawstring")}, - {`"`, LiteralStringDouble, Push("string")}, - {`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil}, - {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, - {`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil}, - {`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil}, - {`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, - {`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")}, - {`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")}, - {`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")}, - {`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil}, - {`@` + kotlinIdentifier, NameDecorator, nil}, - {kotlinIdentifier, Name, nil}, - }, - "package": { - {`\S+`, NameNamespace, Pop(1)}, - }, - "class": { - // \x60 is the back tick character (`) - {`\x60[^\x60]+?\x60`, NameClass, Pop(1)}, - {kotlinIdentifier, NameClass, Pop(1)}, - }, - "property": { - {`\x60[^\x60]+?\x60`, NameProperty, Pop(1)}, - {kotlinIdentifier, NameProperty, Pop(1)}, - }, - "generics-specification": { - {`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. > - {`>`, Punctuation, Pop(1)}, - {`[,:*?]`, Punctuation, nil}, - {`(in|out|reified)`, Keyword, nil}, - {`\x60[^\x60]+?\x60`, NameClass, nil}, - {kotlinIdentifier, NameClass, nil}, - {`\s+`, Text, nil}, - }, - "function": { - {`<`, Punctuation, Push("generics-specification")}, - {`\x60[^\x60]+?\x60`, NameFunction, Pop(1)}, - {kotlinIdentifier, NameFunction, Pop(1)}, - {`\s+`, Text, nil}, - }, - "rawstring": { - // raw strings don't allow character escaping - {`"""`, LiteralString, Pop(1)}, - {`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil}, - Include("string-interpol"), - // remaining dollar signs are just a string - {`\$`, LiteralString, nil}, - }, - "string": { - {`\\[tbnr'"\\\$]`, LiteralStringEscape, nil}, - {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - Include("string-interpol"), - {`[^\n\\"$]+`, LiteralStringDouble, nil}, - // remaining dollar signs are just a string - {`\$`, LiteralStringDouble, nil}, - }, - "string-interpol": { - {`\$` + kotlinIdentifier, LiteralStringInterpol, nil}, - {`\${[^}\n]*}`, LiteralStringInterpol, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go b/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go deleted file mode 100644 index 6d83298b663b..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go +++ /dev/null @@ -1,34 +0,0 @@ -package l - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Lighttpd Configuration File lexer. -var Lighttpd = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Lighttpd configuration file", - Aliases: []string{"lighty", "lighttpd"}, - Filenames: []string{}, - MimeTypes: []string{"text/x-lighttpd-conf"}, - }, - lighttpdRules, -)) - -func lighttpdRules() Rules { - return Rules{ - "root": { - {`#.*\n`, CommentSingle, nil}, - {`/\S*`, Name, nil}, - {`[a-zA-Z._-]+`, Keyword, nil}, - {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, - {`[0-9]+`, LiteralNumber, nil}, - {`=>|=~|\+=|==|=|\+`, Operator, nil}, - {`\$[A-Z]+`, NameBuiltin, nil}, - {`[(){}\[\],]`, Punctuation, nil}, - {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go b/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go deleted file mode 100644 index e9ea319e3ca5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go +++ /dev/null @@ -1,47 +0,0 @@ -package l - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Llvm lexer. -var Llvm = internal.Register(MustNewLazyLexer( - &Config{ - Name: "LLVM", - Aliases: []string{"llvm"}, - Filenames: []string{"*.ll"}, - MimeTypes: []string{"text/x-llvm"}, - }, - llvmRules, -)) - -func llvmRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:`, NameLabel, nil}, - Include("keyword"), - {`%([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, - {`@([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariableGlobal, nil}, - {`%\d+`, NameVariableAnonymous, nil}, - {`@\d+`, NameVariableGlobal, nil}, - {`#\d+`, NameVariableGlobal, nil}, - {`!([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, - {`!\d+`, NameVariableAnonymous, nil}, - {`c?"[^"]*?"`, LiteralString, nil}, - {`0[xX][a-fA-F0-9]+`, LiteralNumber, nil}, - {`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil}, - {`[=<>{}\[\]()*.,!]|x\b`, Punctuation, nil}, - }, - "whitespace": { - {`(\n|\s)+`, Text, nil}, - {`;.*?\n`, Comment, nil}, - }, - "keyword": { - {Words(``, `\b`, `begin`, `end`, `true`, `false`, `declare`, `define`, `global`, `constant`, `private`, `linker_private`, `internal`, `available_externally`, `linkonce`, `linkonce_odr`, `weak`, `weak_odr`, `appending`, `dllimport`, `dllexport`, `common`, `default`, `hidden`, `protected`, `extern_weak`, `external`, `thread_local`, `zeroinitializer`, `undef`, `null`, `to`, `tail`, `target`, `triple`, `datalayout`, `volatile`, `nuw`, `nsw`, `nnan`, `ninf`, `nsz`, `arcp`, `fast`, `exact`, `inbounds`, `align`, `addrspace`, `section`, `alias`, `module`, `asm`, `sideeffect`, `gc`, `dbg`, `linker_private_weak`, `attributes`, `blockaddress`, `initialexec`, `localdynamic`, `localexec`, `prefix`, `unnamed_addr`, `ccc`, `fastcc`, `coldcc`, `x86_stdcallcc`, `x86_fastcallcc`, `arm_apcscc`, `arm_aapcscc`, `arm_aapcs_vfpcc`, `ptx_device`, `ptx_kernel`, `intel_ocl_bicc`, `msp430_intrcc`, `spir_func`, `spir_kernel`, `x86_64_sysvcc`, `x86_64_win64cc`, `x86_thiscallcc`, `cc`, `c`, `signext`, `zeroext`, `inreg`, `sret`, `nounwind`, `noreturn`, `noalias`, `nocapture`, `byval`, `nest`, `readnone`, `readonly`, `inlinehint`, `noinline`, `alwaysinline`, `optsize`, `ssp`, `sspreq`, `noredzone`, `noimplicitfloat`, `naked`, `builtin`, `cold`, `nobuiltin`, `noduplicate`, `nonlazybind`, `optnone`, `returns_twice`, `sanitize_address`, `sanitize_memory`, `sanitize_thread`, `sspstrong`, `uwtable`, `returned`, `type`, `opaque`, `eq`, `ne`, `slt`, `sgt`, `sle`, `sge`, `ult`, `ugt`, `ule`, `uge`, `oeq`, `one`, `olt`, `ogt`, `ole`, `oge`, `ord`, `uno`, `ueq`, `une`, `x`, `acq_rel`, `acquire`, `alignstack`, `atomic`, `catch`, `cleanup`, `filter`, `inteldialect`, `max`, `min`, `monotonic`, `nand`, `personality`, `release`, `seq_cst`, `singlethread`, `umax`, `umin`, `unordered`, `xchg`, `add`, `fadd`, `sub`, `fsub`, `mul`, `fmul`, `udiv`, `sdiv`, `fdiv`, `urem`, `srem`, `frem`, `shl`, `lshr`, `ashr`, `and`, `or`, `xor`, `icmp`, `fcmp`, `phi`, `call`, `trunc`, `zext`, `sext`, `fptrunc`, `fpext`, `uitofp`, `sitofp`, `fptoui`, `fptosi`, `inttoptr`, `ptrtoint`, `bitcast`, `addrspacecast`, `select`, `va_arg`, `ret`, `br`, `switch`, `invoke`, `unwind`, `unreachable`, `indirectbr`, `landingpad`, `resume`, `malloc`, `alloca`, `free`, `load`, `store`, `getelementptr`, `extractelement`, `insertelement`, `shufflevector`, `getresult`, `extractvalue`, `insertvalue`, `atomicrmw`, `cmpxchg`, `fence`, `allocsize`, `amdgpu_cs`, `amdgpu_gs`, `amdgpu_kernel`, `amdgpu_ps`, `amdgpu_vs`, `any`, `anyregcc`, `argmemonly`, `avr_intrcc`, `avr_signalcc`, `caller`, `catchpad`, `catchret`, `catchswitch`, `cleanuppad`, `cleanupret`, `comdat`, `convergent`, `cxx_fast_tlscc`, `deplibs`, `dereferenceable`, `dereferenceable_or_null`, `distinct`, `exactmatch`, `externally_initialized`, `from`, `ghccc`, `hhvm_ccc`, `hhvmcc`, `ifunc`, `inaccessiblemem_or_argmemonly`, `inaccessiblememonly`, `inalloca`, `jumptable`, `largest`, `local_unnamed_addr`, `minsize`, `musttail`, `noduplicates`, `none`, `nonnull`, `norecurse`, `notail`, `preserve_allcc`, `preserve_mostcc`, `prologue`, `safestack`, `samesize`, `source_filename`, `swiftcc`, `swifterror`, `swiftself`, `webkit_jscc`, `within`, `writeonly`, `x86_intrcc`, `x86_vectorcallcc`), Keyword, nil}, - {Words(``, ``, `void`, `half`, `float`, `double`, `x86_fp80`, `fp128`, `ppc_fp128`, `label`, `metadata`, `token`), KeywordType, nil}, - {`i[1-9]\d*`, Keyword, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lua.go b/vendor/github.com/alecthomas/chroma/lexers/l/lua.go deleted file mode 100644 index db574a1b9a88..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/l/lua.go +++ /dev/null @@ -1,79 +0,0 @@ -package l - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Lua lexer. -var Lua = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Lua", - Aliases: []string{"lua"}, - Filenames: []string{"*.lua", "*.wlua"}, - MimeTypes: []string{"text/x-lua", "application/x-lua"}, - }, - luaRules, -)) - -func luaRules() Rules { - return Rules{ - "root": { - {`#!.*`, CommentPreproc, nil}, - Default(Push("base")), - }, - "ws": { - {`(?:--\[(=*)\[[\w\W]*?\](\1)\])`, CommentMultiline, nil}, - {`(?:--.*$)`, CommentSingle, nil}, - {`(?:\s+)`, Text, nil}, - }, - "base": { - Include("ws"), - {`(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?`, LiteralNumberHex, nil}, - {`(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, - {`(?i)\d+e[+-]?\d+`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`(?s)\[(=*)\[.*?\]\1\]`, LiteralString, nil}, - {`::`, Punctuation, Push("label")}, - {`\.{3}`, Punctuation, nil}, - {`[=<>|~&+\-*/%#^]+|\.\.`, Operator, nil}, - {`[\[\]{}().,:;]`, Punctuation, nil}, - {`(and|or|not)\b`, OperatorWord, nil}, - {`(break|do|else|elseif|end|for|if|in|repeat|return|then|until|while)\b`, KeywordReserved, nil}, - {`goto\b`, KeywordReserved, Push("goto")}, - {`(local)\b`, KeywordDeclaration, nil}, - {`(true|false|nil)\b`, KeywordConstant, nil}, - {`(function)\b`, KeywordReserved, Push("funcname")}, - {`[A-Za-z_]\w*(\.[A-Za-z_]\w*)?`, Name, nil}, - {`'`, LiteralStringSingle, Combined("stringescape", "sqs")}, - {`"`, LiteralStringDouble, Combined("stringescape", "dqs")}, - }, - "funcname": { - Include("ws"), - {`[.:]`, Punctuation, nil}, - {`(?:[^\W\d]\w*)(?=(?:(?:--\[(=*)\[[\w\W]*?\](\2)\])|(?:--.*$)|(?:\s+))*[.:])`, NameClass, nil}, - {`(?:[^\W\d]\w*)`, NameFunction, Pop(1)}, - {`\(`, Punctuation, Pop(1)}, - }, - "goto": { - Include("ws"), - {`(?:[^\W\d]\w*)`, NameLabel, Pop(1)}, - }, - "label": { - Include("ws"), - {`::`, Punctuation, Pop(1)}, - {`(?:[^\W\d]\w*)`, NameLabel, nil}, - }, - "stringescape": { - {`\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|u\{[0-9a-fA-F]+\})`, LiteralStringEscape, nil}, - }, - "sqs": { - {`'`, LiteralStringSingle, Pop(1)}, - {`[^\\']+`, LiteralStringSingle, nil}, - }, - "dqs": { - {`"`, LiteralStringDouble, Pop(1)}, - {`[^\\"]+`, LiteralStringDouble, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/lexers.go b/vendor/github.com/alecthomas/chroma/lexers/lexers.go deleted file mode 100644 index 2b429212e5f0..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/lexers.go +++ /dev/null @@ -1,60 +0,0 @@ -// Package lexers contains the registry of all lexers. -// -// Sub-packages contain lexer implementations. -package lexers - -// nolint -import ( - "github.com/alecthomas/chroma" - _ "github.com/alecthomas/chroma/lexers/a" - _ "github.com/alecthomas/chroma/lexers/b" - _ "github.com/alecthomas/chroma/lexers/c" - _ "github.com/alecthomas/chroma/lexers/circular" - _ "github.com/alecthomas/chroma/lexers/d" - _ "github.com/alecthomas/chroma/lexers/e" - _ "github.com/alecthomas/chroma/lexers/f" - _ "github.com/alecthomas/chroma/lexers/g" - _ "github.com/alecthomas/chroma/lexers/h" - _ "github.com/alecthomas/chroma/lexers/i" - "github.com/alecthomas/chroma/lexers/internal" - _ "github.com/alecthomas/chroma/lexers/j" - _ "github.com/alecthomas/chroma/lexers/k" - _ "github.com/alecthomas/chroma/lexers/l" - _ "github.com/alecthomas/chroma/lexers/m" - _ "github.com/alecthomas/chroma/lexers/n" - _ "github.com/alecthomas/chroma/lexers/o" - _ "github.com/alecthomas/chroma/lexers/p" - _ "github.com/alecthomas/chroma/lexers/q" - _ "github.com/alecthomas/chroma/lexers/r" - _ "github.com/alecthomas/chroma/lexers/s" - _ "github.com/alecthomas/chroma/lexers/t" - _ "github.com/alecthomas/chroma/lexers/v" - _ "github.com/alecthomas/chroma/lexers/w" - _ "github.com/alecthomas/chroma/lexers/x" - _ "github.com/alecthomas/chroma/lexers/y" - _ "github.com/alecthomas/chroma/lexers/z" -) - -// Registry of Lexers. -var Registry = internal.Registry - -// Names of all lexers, optionally including aliases. -func Names(withAliases bool) []string { return internal.Names(withAliases) } - -// Get a Lexer by name, alias or file extension. -func Get(name string) chroma.Lexer { return internal.Get(name) } - -// MatchMimeType attempts to find a lexer for the given MIME type. -func MatchMimeType(mimeType string) chroma.Lexer { return internal.MatchMimeType(mimeType) } - -// Match returns the first lexer matching filename. -func Match(filename string) chroma.Lexer { return internal.Match(filename) } - -// Analyse text content and return the "best" lexer.. -func Analyse(text string) chroma.Lexer { return internal.Analyse(text) } - -// Register a Lexer with the global registry. -func Register(lexer chroma.Lexer) chroma.Lexer { return internal.Register(lexer) } - -// Fallback lexer if no other is found. -var Fallback = internal.Fallback diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/make.go b/vendor/github.com/alecthomas/chroma/lexers/m/make.go deleted file mode 100644 index 905491a642b2..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/make.go +++ /dev/null @@ -1,58 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - . "github.com/alecthomas/chroma/lexers/b" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Makefile lexer. -var Makefile = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Base Makefile", - Aliases: []string{"make", "makefile", "mf", "bsdmake"}, - Filenames: []string{"*.mak", "*.mk", "Makefile", "makefile", "Makefile.*", "GNUmakefile"}, - MimeTypes: []string{"text/x-makefile"}, - EnsureNL: true, - }, - makefileRules, -)) - -func makefileRules() Rules { - return Rules{ - "root": { - {`^(?:[\t ]+.*\n|\n)+`, Using(Bash), nil}, - {`\$[<@$+%?|*]`, Keyword, nil}, - {`\s+`, Text, nil}, - {`#.*?\n`, Comment, nil}, - {`(export)(\s+)(?=[\w${}\t -]+\n)`, ByGroups(Keyword, Text), Push("export")}, - {`export\s+`, Keyword, nil}, - {`([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)`, ByGroups(NameVariable, Text, Operator, Text, Using(Bash)), nil}, - {`(?s)"(\\\\|\\.|[^"\\])*"`, LiteralStringDouble, nil}, - {`(?s)'(\\\\|\\.|[^'\\])*'`, LiteralStringSingle, nil}, - {`([^\n:]+)(:+)([ \t]*)`, ByGroups(NameFunction, Operator, Text), Push("block-header")}, - {`\$\(`, Keyword, Push("expansion")}, - }, - "expansion": { - {`[^$a-zA-Z_()]+`, Text, nil}, - {`[a-zA-Z_]+`, NameVariable, nil}, - {`\$`, Keyword, nil}, - {`\(`, Keyword, Push()}, - {`\)`, Keyword, Pop(1)}, - }, - "export": { - {`[\w${}-]+`, NameVariable, nil}, - {`\n`, Text, Pop(1)}, - {`\s+`, Text, nil}, - }, - "block-header": { - {`[,|]`, Punctuation, nil}, - {`#.*?\n`, Comment, Pop(1)}, - {`\\\n`, Text, nil}, - {`\$\(`, Keyword, Push("expansion")}, - {`[a-zA-Z_]+`, Name, nil}, - {`\n`, Text, Pop(1)}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mako.go b/vendor/github.com/alecthomas/chroma/lexers/m/mako.go deleted file mode 100644 index 6f777dd58d2e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/mako.go +++ /dev/null @@ -1,64 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" - . "github.com/alecthomas/chroma/lexers/p" // nolint -) - -// Mako lexer. -var Mako = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Mako", - Aliases: []string{"mako"}, - Filenames: []string{"*.mao"}, - MimeTypes: []string{"application/x-mako"}, - }, - makoRules, -)) - -func makoRules() Rules { - return Rules{ - "root": { - {`(\s*)(%)(\s*end(?:\w+))(\n|\Z)`, ByGroups(Text, CommentPreproc, Keyword, Other), nil}, - {`(\s*)(%)([^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Using(Python), Other), nil}, - {`(\s*)(##[^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Other), nil}, - {`(?s)<%doc>.*?`, CommentPreproc, nil}, - {`(<%)([\w.:]+)`, ByGroups(CommentPreproc, NameBuiltin), Push("tag")}, - {`()`, ByGroups(CommentPreproc, NameBuiltin, CommentPreproc), nil}, - {`<%(?=([\w.:]+))`, CommentPreproc, Push("ondeftags")}, - {`(<%(?:!?))(.*?)(%>)(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, - {`(\$\{)(.*?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, - {`(?sx) - (.+?) # anything, followed by: - (?: - (?<=\n)(?=%|\#\#) | # an eval or comment line - (?=\#\*) | # multiline comment - (?=`, CommentPreproc, Pop(1)}, - {`\s+`, Text, nil}, - }, - "attr": { - {`".*?"`, LiteralString, Pop(1)}, - {`'.*?'`, LiteralString, Pop(1)}, - {`[^\s>]+`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mason.go b/vendor/github.com/alecthomas/chroma/lexers/m/mason.go deleted file mode 100644 index bc48f5caaa25..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/mason.go +++ /dev/null @@ -1,47 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - . "github.com/alecthomas/chroma/lexers/h" // nolint - "github.com/alecthomas/chroma/lexers/internal" - . "github.com/alecthomas/chroma/lexers/p" // nolint -) - -// Mason lexer. -var Mason = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Mason", - Aliases: []string{"mason"}, - Filenames: []string{"*.m", "*.mhtml", "*.mc", "*.mi", "autohandler", "dhandler"}, - MimeTypes: []string{"application/x-mason"}, - Priority: 0.1, - }, - masonRules, -)) - -func masonRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`(<%doc>)(.*?)()(?s)`, ByGroups(NameTag, CommentMultiline, NameTag), nil}, - {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, - {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Perl), NameTag), nil}, - {`(<&[^|])(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, - {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, - {``, NameTag, nil}, - {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Perl), NameTag), nil}, - {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, - {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Perl), Other), nil}, - {`(?sx) - (.+?) # anything, followed by: - (?: - (?<=\n)(?=[%#]) | # an eval or comment line - (?=`, `:>`, `/.`, `+`, `-`, `*`, `/`, `^`, `&&`, `||`, `!`, `<>`, `|`, `/;`, `?`, `@`, `//`, `/@`, `@@`, `@@@`, `~~`, `===`, `&`, `<`, `>`, `<=`, `>=`), Operator, nil}, - {Words(``, ``, `,`, `;`, `(`, `)`, `[`, `]`, `{`, `}`), Punctuation, nil}, - {`".*?"`, LiteralString, nil}, - {`\s+`, TextWhitespace, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go b/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go deleted file mode 100644 index 4e98d69776f8..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go +++ /dev/null @@ -1,55 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Matlab lexer. -var Matlab = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Matlab", - Aliases: []string{"matlab"}, - Filenames: []string{"*.m"}, - MimeTypes: []string{"text/matlab"}, - }, - matlabRules, -)) - -func matlabRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`^!.*`, LiteralStringOther, nil}, - {`%\{\s*\n`, CommentMultiline, Push("blockcomment")}, - {`%.*$`, Comment, nil}, - {`^\s*function`, Keyword, Push("deffunc")}, - {Words(``, `\b`, `break`, `case`, `catch`, `classdef`, `continue`, `else`, `elseif`, `end`, `enumerated`, `events`, `for`, `function`, `global`, `if`, `methods`, `otherwise`, `parfor`, `persistent`, `properties`, `return`, `spmd`, `switch`, `try`, `while`), Keyword, nil}, - {`(sin|sind|sinh|asin|asind|asinh|cos|cosd|cosh|acos|acosd|acosh|tan|tand|tanh|atan|atand|atan2|atanh|sec|secd|sech|asec|asecd|asech|csc|cscd|csch|acsc|acscd|acsch|cot|cotd|coth|acot|acotd|acoth|hypot|exp|expm1|log|log1p|log10|log2|pow2|realpow|reallog|realsqrt|sqrt|nthroot|nextpow2|abs|angle|complex|conj|imag|real|unwrap|isreal|cplxpair|fix|floor|ceil|round|mod|rem|sign|airy|besselj|bessely|besselh|besseli|besselk|beta|betainc|betaln|ellipj|ellipke|erf|erfc|erfcx|erfinv|expint|gamma|gammainc|gammaln|psi|legendre|cross|dot|factor|isprime|primes|gcd|lcm|rat|rats|perms|nchoosek|factorial|cart2sph|cart2pol|pol2cart|sph2cart|hsv2rgb|rgb2hsv|zeros|ones|eye|repmat|rand|randn|linspace|logspace|freqspace|meshgrid|accumarray|size|length|ndims|numel|disp|isempty|isequal|isequalwithequalnans|cat|reshape|diag|blkdiag|tril|triu|fliplr|flipud|flipdim|rot90|find|end|sub2ind|ind2sub|bsxfun|ndgrid|permute|ipermute|shiftdim|circshift|squeeze|isscalar|isvector|ans|eps|realmax|realmin|pi|i|inf|nan|isnan|isinf|isfinite|j|why|compan|gallery|hadamard|hankel|hilb|invhilb|magic|pascal|rosser|toeplitz|vander|wilkinson)\b`, NameBuiltin, nil}, - {`\.\.\..*$`, Comment, nil}, - {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, - {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, - {`\[|\]|\(|\)|\{|\}|:|@|\.|,`, Punctuation, nil}, - {`=|:|;`, Punctuation, nil}, - {`(?<=[\w)\].])\'+`, Operator, nil}, - {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`(?=]|<=|>=)`, Punctuation, nil}, - Include("simplevalue"), - {`\s+`, TextWhitespace, nil}, - }, - } - }, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/meson.go b/vendor/github.com/alecthomas/chroma/lexers/m/meson.go deleted file mode 100644 index 2a6a22f898ed..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/meson.go +++ /dev/null @@ -1,51 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Meson lexer. -var Meson = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Meson", - Aliases: []string{"meson", "meson.build"}, - Filenames: []string{"meson.build", "meson_options.txt"}, - MimeTypes: []string{"text/x-meson"}, - }, - func() Rules { - return Rules{ - "root": { - {`#.*?$`, Comment, nil}, - {`'''.*'''`, LiteralStringSingle, nil}, - {`[1-9][0-9]*`, LiteralNumberInteger, nil}, - {`0o[0-7]+`, LiteralNumberOct, nil}, - {`0x[a-fA-F0-9]+`, LiteralNumberHex, nil}, - Include("string"), - Include("keywords"), - Include("expr"), - {`[a-zA-Z_][a-zA-Z_0-9]*`, Name, nil}, - {`\s+`, TextWhitespace, nil}, - }, - "string": { - {`[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}`, LiteralString, nil}, - {`'.*?(?/-]`, Operator, nil}, - {`[()\[\],.]`, Punctuation, nil}, - {Words(``, `\b`, `break`, `case`, `const`, `continue`, `do`, `else`, `enum`, `extern`, `for`, `if`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `while`), Keyword, nil}, - {`(bool|float|half|long|ptrdiff_t|size_t|unsigned|u?char|u?int((8|16|32|64)_t)?|u?short)\b`, KeywordType, nil}, - {`(bool|float|half|u?(char|int|long|short))(2|3|4)\b`, KeywordType, nil}, - {`packed_(float|half|long|u?(char|int|short))(2|3|4)\b`, KeywordType, nil}, - {`(float|half)(2|3|4)x(2|3|4)\b`, KeywordType, nil}, - {`atomic_u?int\b`, KeywordType, nil}, - {`(rg?(8|16)(u|s)norm|rgba(8|16)(u|s)norm|srgba8unorm|rgb10a2|rg11b10f|rgb9e5)\b`, KeywordType, nil}, - {`(array|depth(2d|cube)(_array)?|depth2d_ms(_array)?|sampler|texture_buffer|texture(1|2)d(_array)?|texture2d_ms(_array)?|texture3d|texturecube(_array)?|uniform|visible_function_table)\b`, KeywordType, nil}, - {`(true|false|NULL)\b`, NameBuiltin, nil}, - {Words(``, `\b`, `device`, `constant`, `ray_data`, `thread`, `threadgroup`, `threadgroup_imageblock`), Keyword, nil}, - {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "root": { - Include("whitespace"), - {`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, - {`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, - Default(Push("statement")), - }, - "classname": { - {`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil}, - {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, - {`\s*(?=[>{])`, Text, Pop(1)}, - }, - "whitespace": { - {`^#if\s+0`, CommentPreproc, Push("if0")}, - {`^#`, CommentPreproc, Push("macro")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - }, - "statement": { - Include("whitespace"), - Include("statements"), - {`[{]`, Punctuation, Push("root")}, - {`[;}]`, Punctuation, Pop(1)}, - }, - "function": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "macro": { - {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, - {`[^/\n]+`, CommentPreproc, nil}, - {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, - {`//.*?\n`, CommentSingle, Pop(1)}, - {`/`, CommentPreproc, nil}, - {`(?<=\\)\n`, CommentPreproc, nil}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "if0": { - {`^\s*#if.*?(?|->|<-|\\/|xor|/\\)`, Operator, nil}, - {`(<|>|<=|>=|==|=|!=)`, Operator, nil}, - {`(\+|-|\*|/|div|mod)`, Operator, nil}, - {Words(`\b`, `\b`, `in`, `subset`, `superset`, `union`, `diff`, `symdiff`, `intersect`), Operator, nil}, - {`(\\|\.\.|\+\+)`, Operator, nil}, - {`[|()\[\]{},:;]`, Punctuation, nil}, - {`(true|false)\b`, KeywordConstant, nil}, - {`([+-]?)\d+(\.(?!\.)\d*)?([eE][-+]?\d+)?`, LiteralNumber, nil}, - {`::\s*([^\W\d]\w*)(\s*\([^\)]*\))?`, NameDecorator, nil}, - {`\b([^\W\d]\w*)\b(\()`, ByGroups(NameFunction, Punctuation), nil}, - {`[^\W\d]\w*`, NameOther, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go b/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go deleted file mode 100644 index 112a1c3a61bd..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go +++ /dev/null @@ -1,47 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// MLIR lexer. -var Mlir = internal.Register(MustNewLazyLexer( - &Config{ - Name: "MLIR", - Aliases: []string{"mlir"}, - Filenames: []string{"*.mlir"}, - MimeTypes: []string{"text/x-mlir"}, - }, - mlirRules, -)) - -func mlirRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`c?"[^"]*?"`, LiteralString, nil}, - {`\^([-a-zA-Z$._][\w\-$.0-9]*)\s*`, NameLabel, nil}, - {`([\w\d_$.]+)\s*=`, NameLabel, nil}, - Include("keyword"), - {`->`, Punctuation, nil}, - {`@([\w_][\w\d_$.]*)`, NameFunction, nil}, - {`[%#][\w\d_$.]+`, NameVariable, nil}, - {`([1-9?][\d?]*\s*x)+`, LiteralNumber, nil}, - {`0[xX][a-fA-F0-9]+`, LiteralNumber, nil}, - {`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil}, - {`[=<>{}\[\]()*.,!:]|x\b`, Punctuation, nil}, - {`[\w\d]+`, Text, nil}, - }, - "whitespace": { - {`(\n|\s)+`, Text, nil}, - {`//.*?\n`, Comment, nil}, - }, - "keyword": { - {Words(``, ``, `constant`, `return`), KeywordType, nil}, - {Words(``, ``, `func`, `loc`, `memref`, `tensor`, `vector`), KeywordType, nil}, - {`bf16|f16|f32|f64|index`, Keyword, nil}, - {`i[1-9]\d*`, Keyword, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go b/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go deleted file mode 100644 index 6ab05c9ed3d2..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go +++ /dev/null @@ -1,119 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Modula-2 lexer. -var Modula2 = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Modula-2", - Aliases: []string{"modula2", "m2"}, - Filenames: []string{"*.def", "*.mod"}, - MimeTypes: []string{"text/x-modula2"}, - DotAll: true, - }, - modula2Rules, -)) - -func modula2Rules() Rules { - return Rules{ - "whitespace": { - {`\n+`, Text, nil}, - {`\s+`, Text, nil}, - }, - "dialecttags": { - {`\(\*!m2pim\*\)`, CommentSpecial, nil}, - {`\(\*!m2iso\*\)`, CommentSpecial, nil}, - {`\(\*!m2r10\*\)`, CommentSpecial, nil}, - {`\(\*!objm2\*\)`, CommentSpecial, nil}, - {`\(\*!m2iso\+aglet\*\)`, CommentSpecial, nil}, - {`\(\*!m2pim\+gm2\*\)`, CommentSpecial, nil}, - {`\(\*!m2iso\+p1\*\)`, CommentSpecial, nil}, - {`\(\*!m2iso\+xds\*\)`, CommentSpecial, nil}, - }, - "identifiers": { - {`([a-zA-Z_$][\w$]*)`, Name, nil}, - }, - "prefixed_number_literals": { - {`0b[01]+(\'[01]+)*`, LiteralNumberBin, nil}, - {`0[ux][0-9A-F]+(\'[0-9A-F]+)*`, LiteralNumberHex, nil}, - }, - "plain_number_literals": { - {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, - {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, - {`[0-9]+(\'[0-9]+)*`, LiteralNumberInteger, nil}, - }, - "suffixed_number_literals": { - {`[0-7]+B`, LiteralNumberOct, nil}, - {`[0-7]+C`, LiteralNumberOct, nil}, - {`[0-9A-F]+H`, LiteralNumberHex, nil}, - }, - "string_literals": { - {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - }, - "digraph_operators": { - {`\*\.`, Operator, nil}, - {`\+>`, Operator, nil}, - {`<>`, Operator, nil}, - {`<=`, Operator, nil}, - {`>=`, Operator, nil}, - {`==`, Operator, nil}, - {`::`, Operator, nil}, - {`:=`, Operator, nil}, - {`\+\+`, Operator, nil}, - {`--`, Operator, nil}, - }, - "unigraph_operators": { - {`[+-]`, Operator, nil}, - {`[*/]`, Operator, nil}, - {`\\`, Operator, nil}, - {`[=#<>]`, Operator, nil}, - {`\^`, Operator, nil}, - {`@`, Operator, nil}, - {`&`, Operator, nil}, - {`~`, Operator, nil}, - {"`", Operator, nil}, - }, - "digraph_punctuation": { - {`\.\.`, Punctuation, nil}, - {`<<`, Punctuation, nil}, - {`>>`, Punctuation, nil}, - {`->`, Punctuation, nil}, - {`\|#`, Punctuation, nil}, - {`##`, Punctuation, nil}, - {`\|\*`, Punctuation, nil}, - }, - "unigraph_punctuation": { - {`[()\[\]{},.:;|]`, Punctuation, nil}, - {`!`, Punctuation, nil}, - {`\?`, Punctuation, nil}, - }, - "comments": { - {`^//.*?\n`, CommentSingle, nil}, - {`\(\*([^$].*?)\*\)`, CommentMultiline, nil}, - {`/\*(.*?)\*/`, CommentMultiline, nil}, - }, - "pragmas": { - {`<\*.*?\*>`, CommentPreproc, nil}, - {`\(\*\$.*?\*\)`, CommentPreproc, nil}, - }, - "root": { - Include("whitespace"), - Include("dialecttags"), - Include("pragmas"), - Include("comments"), - Include("identifiers"), - Include("suffixed_number_literals"), - Include("prefixed_number_literals"), - Include("plain_number_literals"), - Include("string_literals"), - Include("digraph_punctuation"), - Include("digraph_operators"), - Include("unigraph_punctuation"), - Include("unigraph_operators"), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go b/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go deleted file mode 100644 index 8ad81cc08661..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go +++ /dev/null @@ -1,66 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var MonkeyC = internal.Register(MustNewLazyLexer( - &Config{ - Name: "MonkeyC", - Aliases: []string{"monkeyc"}, - Filenames: []string{"*.mc"}, - MimeTypes: []string{"text/x-monkeyc"}, - }, - monkeyCRules, -)) - -func monkeyCRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - {`:[a-zA-Z_][\w_\.]*`, StringSymbol, nil}, - {`[{}\[\]\(\),;:\.]`, Punctuation, nil}, - {`[&~\|\^!+\-*\/%=?]`, Operator, nil}, - {`=>|[+-]=|&&|\|\||>>|<<|[<>]=?|[!=]=`, Operator, nil}, - {`\b(and|or|instanceof|has|extends|new)`, OperatorWord, nil}, - {Words(``, `\b`, `NaN`, `null`, `true`, `false`), KeywordConstant, nil}, - {`(using)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - {`(class)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, - {`(function)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("function")}, - {`(module)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("module")}, - {`\b(if|else|for|switch|case|while|break|continue|default|do|try|catch|finally|return|throw|extends|function)\b`, Keyword, nil}, - {`\b(const|enum|hidden|public|protected|private|static)\b`, KeywordType, nil}, - {`\bvar\b`, KeywordDeclaration, nil}, - {`\b(Activity(Monitor|Recording)?|Ant(Plus)?|Application|Attention|Background|Communications|Cryptography|FitContributor|Graphics|Gregorian|Lang|Math|Media|Persisted(Content|Locations)|Position|Properties|Sensor(History|Logging)?|Storage|StringUtil|System|Test|Time(r)?|Toybox|UserProfile|WatchUi|Rez|Drawables|Strings|Fonts|method)\b`, NameBuiltin, nil}, - {`\b(me|self|\$)\b`, NameBuiltinPseudo, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\'|[^''])*'`, LiteralStringSingle, nil}, - {`-?(0x[0-9a-fA-F]+l?)`, NumberHex, nil}, - {`-?([0-9]+(\.[0-9]+[df]?|[df]))\b`, NumberFloat, nil}, - {`-?([0-9]+l?)`, NumberInteger, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "import": { - {`([a-zA-Z_][\w_\.]*)(?:(\s+)(as)(\s+)([a-zA-Z_][\w_]*))?`, ByGroups(NameNamespace, Text, KeywordNamespace, Text, NameNamespace), nil}, - Default(Pop(1)), - }, - "class": { - {`([a-zA-Z_][\w_\.]*)(?:(\s+)(extends)(\s+)([a-zA-Z_][\w_\.]*))?`, ByGroups(NameClass, Text, KeywordDeclaration, Text, NameClass), nil}, - Default(Pop(1)), - }, - "function": { - {`initialize`, NameFunctionMagic, nil}, - {`[a-zA-Z_][\w_\.]*`, NameFunction, nil}, - Default(Pop(1)), - }, - "module": { - {`[a-zA-Z_][\w_\.]*`, NameNamespace, nil}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go b/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go deleted file mode 100644 index 0ba0ba97c315..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go +++ /dev/null @@ -1,57 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// MorrowindScript lexer. -var MorrowindScript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "MorrowindScript", - Aliases: []string{"morrowind", "mwscript"}, - Filenames: []string{}, - MimeTypes: []string{}, - }, - morrowindScriptRules, -)) - -func morrowindScriptRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`;.*$`, Comment, nil}, - {`(["'])(?:(?=(\\?))\2.)*?\1`, LiteralString, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, - Include("keywords"), - Include("types"), - Include("builtins"), - Include("punct"), - Include("operators"), - {`\n`, Text, nil}, - {`\S+\s+`, Text, nil}, - {`[a-zA-Z0-9_]\w*`, Name, nil}, - }, - "keywords": { - {`(?i)(begin|if|else|elseif|endif|while|endwhile|return|to)\b`, Keyword, nil}, - {`(?i)(end)\b`, Keyword, nil}, - {`(?i)(end)\w+.*$`, Text, nil}, - {`[\w+]->[\w+]`, Operator, nil}, - }, - "builtins": { - {`(?i)(Activate|AddItem|AddSoulGem|AddSpell|AddToLevCreature|AddToLevItem|AddTopic|AIActivate|AIEscort|AIEscortCell|AIFollow|AiFollowCell|AITravel|AIWander|BecomeWerewolf|Cast|ChangeWeather|Choice|ClearForceJump|ClearForceMoveJump|ClearForceRun|ClearForceSneak|ClearInfoActor|Disable|DisableLevitation|DisablePlayerControls|DisablePlayerFighting|DisablePlayerJumping|DisablePlayerLooking|DisablePlayerMagic|DisablePlayerViewSwitch|DisableTeleporting|DisableVanityMode|DontSaveObject|Drop|Enable|EnableBirthMenu|EnableClassMenu|EnableInventoryMenu|EnableLevelUpMenu|EnableLevitation|EnableMagicMenu|EnableMapMenu|EnableNameMenu|EnablePlayerControls|EnablePlayerFighting|EnablePlayerJumping|EnablePlayerLooking|EnablePlayerMagic|EnablePlayerViewSwitch|EnableRaceMenu|EnableRest|EnableStatsMenu|EnableTeleporting|EnableVanityMode|Equip|ExplodeSpell|Face|FadeIn|FadeOut|FadeTo|Fall|ForceGreeting|ForceJump|ForceRun|ForceSneak|Flee|GotoJail|HurtCollidingActor|HurtStandingActor|Journal|Lock|LoopGroup|LowerRank|MenuTest|MessageBox|ModAcrobatics|ModAgility|ModAlarm|ModAlchemy|ModAlteration|ModArmorBonus|ModArmorer|ModAthletics|ModAttackBonus|ModAxe|ModBlock|ModBluntWeapon|ModCastPenalty|ModChameleon|ModConjuration|ModCurrentFatigue|ModCurrentHealth|ModCurrentMagicka|ModDefendBonus|ModDestruction|ModDisposition|ModEnchant|ModEndurance|ModFactionReaction|ModFatigue|ModFight|ModFlee|ModFlying|ModHandToHand|ModHealth|ModHeavyArmor|ModIllusion|ModIntelligence|ModInvisible|ModLightArmor|ModLongBlade|ModLuck|ModMagicka|ModMarksman|ModMediumArmor|ModMercantile|ModMysticism|ModParalysis|ModPCCrimeLevel|ModPCFacRep|ModPersonality|ModRegion|ModReputation|ModResistBlight|ModResistCorprus|ModResistDisease|ModResistFire|ModResistFrost|ModResistMagicka|ModResistNormalWeapons|ModResistParalysis|ModResistPoison|ModResistShock|ModRestoration|ModScale|ModSecurity|ModShortBlade|ModSilence|ModSneak|ModSpear|ModSpeechcraft|ModSpeed|ModStrength|ModSuperJump|ModSwimSpeed|ModUnarmored|ModWaterBreathing|ModWaterLevel|ModWaterWalking|ModWillpower|Move|MoveWorld|PayFine|PayFineThief|PCClearExpelled|PCExpell|PCForce1stPerson|PCForce3rdPerson|PCJoinFaction|PCLowerRank|PCRaiseRank|PlaceAtMe|PlaceAtPC|PlaceItem|PlaceItemCell|PlayBink|PlayGroup|PlayLoopSound3D|PlayLoopSound3DVP|PlaySound|PlaySound3D|PlaySound3DVP|PlaySoundVP|Position|PositionCell|RaiseRank|RemoveEffects|RemoveFromLevCreature|RemoveFromLevItem|RemoveItem|RemoveSoulgem|RemoveSpell|RemoveSpellEffects|ResetActors|Resurrect|Rotate|RotateWorld|Say|StartScript|[S|s]et|SetAcrobatics|SetAgility|SetAlarm|SetAlchemy|SetAlteration|SetAngle|SetArmorBonus|SetArmorer|SetAthletics|SetAtStart|SetAttackBonus|SetAxe|SetBlock|SetBluntWeapon|SetCastPenalty|SetChameleon|SetConjuration|SetDelete|SetDefendBonus|SetDestruction|SetDisposition|SetEnchant|SetEndurance|SetFactionReaction|SetFatigue|SetFight|SetFlee|SetFlying|SetHandToHand|SetHealth|SetHeavyArmor|SetIllusion|SetIntelligence|SetInvisible|SetJournalIndex|SetLightArmor|SetLevel|SetLongBlade|SetLuck|SetMagicka|SetMarksman|SetMediumArmor|SetMercantile|SetMysticism|SetParalysis|SetPCCCrimeLevel|SetPCFacRep|SetPersonality|SetPos|SetReputation|SetResistBlight|SetResistCorprus|SetResistDisease|SetResistFire|SetResistFrost|SetResistMagicka|SetResistNormalWeapons|SetResistParalysis|SetResistPoison|SetResistShock|SetRestoration|SetScale|SetSecurity|SetShortBlade|SetSilence|SetSneak|SetSpear|SetSpeechcraft|SetSpeed|SetStrength|SetSuperJump|SetSwimSpeed|SetUnarmored|SetWaterBreathing|SetWaterlevel|SetWaterWalking|SetWerewolfAcrobatics|SetWillpower|ShowMap|ShowRestMenu|SkipAnim|StartCombat|StopCombat|StopScript|StopSound|StreamMusic|TurnMoonRed|TurnMoonWhite|UndoWerewolf|Unlock|WakeUpPC|CenterOnCell|CenterOnExterior|FillMap|FixMe|ToggleAI|ToggleCollision|ToggleFogOfWar|ToggleGodMode|ToggleMenus|ToggleSky|ToggleWorld|ToggleVanityMode|CellChanged|GetAcrobatics|GetAgility|GetAIPackageDone|GetAlarm|GetAlchemy|GetAlteration|GetAngle|GetArmorBonus|GetArmorer|GetAthletics|GetAttackBonus|GetAttacked|GetArmorType,|GetAxe|GetBlightDisease|GetBlock|GetBluntWeapon|GetButtonPressed|GetCastPenalty|GetChameleon|GetCollidingActor|GetCollidingPC|GetCommonDisease|GetConjuration|GetCurrentAIPackage|GetCurrentTime|GetCurrentWeather|GetDeadCount|GetDefendBonus|GetDestruction|GetDetected|GetDisabled|GetDisposition|GetDistance|GetEffect|GetEnchant|GetEndurance|GetFatigue|GetFight|GetFlee|GetFlying|GetForceJump|GetForceRun|GetForceSneak|GetHandToHand|GetHealth|GetHealthGetRatio|GetHeavyArmor|GetIllusion|GetIntelligence|GetInterior|GetInvisible|GetItemCount|GetJournalIndex|GetLightArmor|GetLineOfSight|GetLOS|GetLevel|GetLocked|GetLongBlade|GetLuck|GetMagicka|GetMarksman|GetMasserPhase|GetSecundaPhase|GetMediumArmor|GetMercantile|GetMysticism|GetParalysis|GetPCCell|GetPCCrimeLevel|GetPCinJail|GetPCJumping|GetPCRank|GetPCRunning|GetPCSleep|GetPCSneaking|GetPCTraveling|GetPersonality|GetPlayerControlsDisabled|GetPlayerFightingDisabled|GetPlayerJumpingDisabled|GetPlayerLookingDisabled|GetPlayerMagicDisabled|GetPos|GetRace|GetReputation|GetResistBlight|GetResistCorprus|GetResistDisease|GetResistFire|GetResistFrost|GetResistMagicka|GetResistNormalWeapons|GetResistParalysis|GetResistPoison|GetResistShock|GetRestoration|GetScale|GetSecondsPassed|GetSecurity|GetShortBlade|GetSilence|GetSneak|GetSoundPlaying|GetSpear|GetSpeechcraft|GetSpeed|GetSpell|GetSpellEffects|GetSpellReadied|GetSquareRoot|GetStandingActor|GetStandingPC|GetStrength|GetSuperJump|GetSwimSpeed|GetTarget|GetUnarmored|GetVanityModeDisabled|GetWaterBreathing|GetWaterLevel|GetWaterWalking|GetWeaponDrawn|GetWeaponType|GetWerewolfKills|GetWillpower|GetWindSpeed|HasItemEquipped|HasSoulgem|HitAttemptOnMe|HitOnMe|IsWerewolf|MenuMode|OnActivate|OnDeath|OnKnockout|OnMurder|PCExpelled|PCGet3rdPerson|PCKnownWerewolf|Random|RepairedOnMe|SameFaction|SayDone|ScriptRunning|AllowWereWolfForceGreeting|Companion|MinimumProfit|NoFlee|NoHello|NoIdle|NoLore|OnPCAdd|OnPCDrop|OnPCEquip|OnPCHitMe|OnPCRepair|PCSkipEquip|OnPCSoulGemUse|StayOutside|CrimeGoldDiscount|CrimeGoldTurnIn|Day|DaysPassed|GameHour|Month|NPCVoiceDistance|PCRace|PCWerewolf|PCVampire|TimeScale|VampClan|Year)\b`, NameBuiltin, nil}, - {`(?i)(sEffectWaterBreathing|sEffectSwiftSwim|sEffectWaterWalking|sEffectShield|sEffectFireShield|sEffectLightningShield|sEffectFrostShield|sEffectBurden|sEffectFeather|sEffectJump|sEffectLevitate|sEffectSlowFall|sEffectLock|sEffectOpen|sEffectFireDamage|sEffectShockDamage|sEffectFrostDamage|sEffectDrainAttribute|sEffectDrainHealth|sEffectDrainSpellpoints|sEffectDrainFatigue|sEffectDrainSkill|sEffectDamageAttribute|sEffectDamageHealth|sEffectDamageMagicka|sEffectDamageFatigue|sEffectDamageSkill|sEffectPoison|sEffectWeaknessToFire|sEffectWeaknessToFrost|sEffectWeaknessToShock|sEffectWeaknessToMagicka|sEffectWeaknessToCommonDisease|sEffectWeaknessToBlightDisease|sEffectWeaknessToCorprusDisease|sEffectWeaknessToPoison|sEffectWeaknessToNormalWeapons|sEffectDisintegrateWeapon|sEffectDisintegrateArmor|sEffectInvisibility|sEffectChameleon|sEffectLight|sEffectSanctuary|sEffectNightEye|sEffectCharm|sEffectParalyze|sEffectSilence|sEffectBlind|sEffectSound|sEffectCalmHumanoid|sEffectCalmCreature|sEffectFrenzyHumanoid|sEffectFrenzyCreature|sEffectDemoralizeHumanoid|sEffectDemoralizeCreature|sEffectRallyHumanoid|sEffectRallyCreature|sEffectDispel|sEffectSoultrap|sEffectTelekinesis|sEffectMark|sEffectRecall|sEffectDivineIntervention|sEffectAlmsiviIntervention|sEffectDetectAnimal|sEffectDetectEnchantment|sEffectDetectKey|sEffectSpellAbsorption|sEffectReflect|sEffectCureCommonDisease|sEffectCureBlightDisease|sEffectCureCorprusDisease|sEffectCurePoison|sEffectCureParalyzation|sEffectRestoreAttribute|sEffectRestoreHealth|sEffectRestoreSpellPoints|sEffectRestoreFatigue|sEffectRestoreSkill|sEffectFortifyAttribute|sEffectFortifyHealth|sEffectFortifySpellpoints|sEffectFortifyFatigue|sEffectFortifySkill|sEffectFortifyMagickaMultiplier|sEffectAbsorbAttribute|sEffectAbsorbHealth|sEffectAbsorbSpellPoints|sEffectAbsorbFatigue|sEffectAbsorbSkill|sEffectResistFire|sEffectResistFrost|sEffectResistShock|sEffectResistMagicka|sEffectResistCommonDisease|sEffectResistBlightDisease|sEffectResistCorprusDisease|sEffectResistPoison|sEffectResistNormalWeapons|sEffectResistParalysis|sEffectRemoveCurse|sEffectTurnUndead|sEffectSummonScamp|sEffectSummonClannfear|sEffectSummonDaedroth|sEffectSummonDremora|sEffectSummonAncestralGhost|sEffectSummonSkeletalMinion|sEffectSummonLeastBonewalker|sEffectSummonGreaterBonewalker|sEffectSummonBonelord|sEffectSummonWingedTwilight|sEffectSummonHunger|sEffectSummonGoldensaint|sEffectSummonFlameAtronach|sEffectSummonFrostAtronach|sEffectSummonStormAtronach|sEffectFortifyAttackBonus|sEffectCommandCreatures|sEffectCommandHumanoids|sEffectBoundDagger|sEffectBoundLongsword|sEffectBoundMace|sEffectBoundBattleAxe|sEffectBoundSpear|sEffectBoundLongbow|sEffectExtraSpell|sEffectBoundCuirass|sEffectBoundHelm|sEffectBoundBoots|sEffectBoundShield|sEffectBoundGloves|sEffectCorpus|sEffectVampirism|sEffectSummonCenturionSphere|sEffectSunDamage|sEffectStuntedMagicka)`, NameBuiltin, nil}, - }, - "types": { - {`(?i)(short|long|float)\b`, KeywordType, nil}, - }, - "punct": { - {`[()]`, Punctuation, nil}, - }, - "operators": { - {`[#=,./%+\-?]`, Operator, nil}, - {`(==|<=|<|>=|>|!=)`, Operator, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go b/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go deleted file mode 100644 index eac082cff6ba..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go +++ /dev/null @@ -1,44 +0,0 @@ -package m - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" - . "github.com/alecthomas/chroma/lexers/p" // nolint -) - -// Myghty lexer. -var Myghty = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Myghty", - Aliases: []string{"myghty"}, - Filenames: []string{"*.myt", "autodelegate"}, - MimeTypes: []string{"application/x-myghty"}, - }, - myghtyRules, -)) - -func myghtyRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, - {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python2), NameTag), nil}, - {`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil}, - {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil}, - {``, NameTag, nil}, - {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python2), NameTag), nil}, - {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, - {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python2), Other), nil}, - {`(?sx) - (.+?) # anything, followed by: - (?: - (?<=\n)(?=[%#]) | # an eval or comment line - (?== 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) { - // Found at least twice as many `name` as [name]. - result += 0.5 - } else if nameBetweenBacktickCount > nameBetweenBracketCount { - result += 0.2 - } else if nameBetweenBacktickCount > 0 { - result += 0.1 - } - - return result -})) - -func mySQLRules() Rules { - return Rules{ - "root": { - {`\s+`, TextWhitespace, nil}, - {`(#|--\s+).*\n?`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`[0-9]*\.[0-9]+(e[+-][0-9]+)`, LiteralNumberFloat, nil}, - {`((?:_[a-z0-9]+)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, - {`((?:_[a-z0-9]+)?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("double-string")}, - {"[+*/<>=~!@#%^&|`?-]", Operator, nil}, - {`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, TextWhitespace, Punctuation), nil}, - {`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|identified|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|privileges|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|user|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil}, - {`\b(auto_increment|engine|charset|tables)\b`, KeywordPseudo, nil}, - {`(true|false|null)`, NameConstant, nil}, - {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, TextWhitespace, Punctuation), nil}, - {`[a-z_]\w*`, Name, nil}, - {`@[a-z0-9]*[._]*[a-z0-9]*`, NameVariable, nil}, - {`[;:()\[\],.]`, Punctuation, nil}, - }, - "multiline-comments": { - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[^/*]+`, CommentMultiline, nil}, - {`[/*]`, CommentMultiline, nil}, - }, - "string": { - {`[^']+`, LiteralStringSingle, nil}, - {`''`, LiteralStringSingle, nil}, - {`'`, LiteralStringSingle, Pop(1)}, - }, - "double-string": { - {`[^"]+`, LiteralStringDouble, nil}, - {`""`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go b/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go deleted file mode 100644 index de6734e8659e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go +++ /dev/null @@ -1,63 +0,0 @@ -package n - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Nasm lexer. -var Nasm = internal.Register(MustNewLazyLexer( - &Config{ - Name: "NASM", - Aliases: []string{"nasm"}, - Filenames: []string{"*.asm", "*.ASM"}, - MimeTypes: []string{"text/x-nasm"}, - CaseInsensitive: true, - }, - nasmRules, -)) - -func nasmRules() Rules { - return Rules{ - "root": { - {`^\s*%`, CommentPreproc, Push("preproc")}, - Include("whitespace"), - {`[a-z$._?][\w$.?#@~]*:`, NameLabel, nil}, - {`([a-z$._?][\w$.?#@~]*)(\s+)(equ)`, ByGroups(NameConstant, KeywordDeclaration, KeywordDeclaration), Push("instruction-args")}, - {`BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE`, Keyword, Push("instruction-args")}, - {`(?:res|d)[bwdqt]|times`, KeywordDeclaration, Push("instruction-args")}, - {`[a-z$._?][\w$.?#@~]*`, NameFunction, Push("instruction-args")}, - {`[\r\n]+`, Text, nil}, - }, - "instruction-args": { - {"\"(\\\\\"|[^\"\\n])*\"|'(\\\\'|[^'\\n])*'|`(\\\\`|[^`\\n])*`", LiteralString, nil}, - {`(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)`, LiteralNumberHex, nil}, - {`[0-7]+q`, LiteralNumberOct, nil}, - {`[01]+b`, LiteralNumberBin, nil}, - {`[0-9]+\.e?[0-9]+`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - Include("punctuation"), - {`r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]`, NameBuiltin, nil}, - {`[a-z$._?][\w$.?#@~]*`, NameVariable, nil}, - {`[\r\n]+`, Text, Pop(1)}, - Include("whitespace"), - }, - "preproc": { - {`[^;\n]+`, CommentPreproc, nil}, - {`;.*?\n`, CommentSingle, Pop(1)}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "whitespace": { - {`\n`, Text, nil}, - {`[ \t]+`, Text, nil}, - {`;.*`, CommentSingle, nil}, - }, - "punctuation": { - {`[,():\[\]]+`, Punctuation, nil}, - {`[&|^<>+*/%~-]+`, Operator, nil}, - {`[$]+`, KeywordConstant, nil}, - {`seg|wrt|strict`, OperatorWord, nil}, - {`byte|[dq]?word`, KeywordType, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go b/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go deleted file mode 100644 index c9d3ae032b3d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go +++ /dev/null @@ -1,59 +0,0 @@ -package n - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Newspeak lexer. -var Newspeak = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Newspeak", - Aliases: []string{"newspeak"}, - Filenames: []string{"*.ns2"}, - MimeTypes: []string{"text/x-newspeak"}, - }, - newspeakRules, -)) - -func newspeakRules() Rules { - return Rules{ - "root": { - {`\b(Newsqueak2)\b`, KeywordDeclaration, nil}, - {`'[^']*'`, LiteralString, nil}, - {`\b(class)(\s+)(\w+)(\s*)`, ByGroups(KeywordDeclaration, Text, NameClass, Text), nil}, - {`\b(mixin|self|super|private|public|protected|nil|true|false)\b`, Keyword, nil}, - {`(\w+\:)(\s*)([a-zA-Z_]\w+)`, ByGroups(NameFunction, Text, NameVariable), nil}, - {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, - {`<\w+>`, CommentSpecial, nil}, - Include("expressionstat"), - Include("whitespace"), - }, - "expressionstat": { - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`:\w+`, NameVariable, nil}, - {`(\w+)(::)`, ByGroups(NameVariable, Operator), nil}, - {`\w+:`, NameFunction, nil}, - {`\w+`, NameVariable, nil}, - {`\(|\)`, Punctuation, nil}, - {`\[|\]`, Punctuation, nil}, - {`\{|\}`, Punctuation, nil}, - {`(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)`, Operator, nil}, - {`\.|;`, Punctuation, nil}, - Include("whitespace"), - Include("literals"), - }, - "literals": { - {`\$.`, LiteralString, nil}, - {`'[^']*'`, LiteralString, nil}, - {`#'[^']*'`, LiteralStringSymbol, nil}, - {`#\w+:?`, LiteralStringSymbol, nil}, - {`#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+`, LiteralStringSymbol, nil}, - }, - "whitespace": { - {`\s+`, Text, nil}, - {`"[^"]*"`, Comment, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go b/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go deleted file mode 100644 index 6d80523ec781..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go +++ /dev/null @@ -1,51 +0,0 @@ -package n - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Nginx Configuration File lexer. -var Nginx = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Nginx configuration file", - Aliases: []string{"nginx"}, - Filenames: []string{"nginx.conf"}, - MimeTypes: []string{"text/x-nginx-conf"}, - }, - nginxRules, -)) - -func nginxRules() Rules { - return Rules{ - "root": { - {`(include)(\s+)([^\s;]+)`, ByGroups(Keyword, Text, Name), nil}, - {`[^\s;#]+`, Keyword, Push("stmt")}, - Include("base"), - }, - "block": { - {`\}`, Punctuation, Pop(2)}, - {`[^\s;#]+`, KeywordNamespace, Push("stmt")}, - Include("base"), - }, - "stmt": { - {`\{`, Punctuation, Push("block")}, - {`;`, Punctuation, Pop(1)}, - Include("base"), - }, - "base": { - {`#.*\n`, CommentSingle, nil}, - {`on|off`, NameConstant, nil}, - {`\$[^\s;#()]+`, NameVariable, nil}, - {`([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Punctuation, LiteralNumberInteger), nil}, - {`[a-z-]+/[a-z-+]+`, LiteralString, nil}, - {`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, - {`(~)(\s*)([^\s{]+)`, ByGroups(Punctuation, Text, LiteralStringRegex), nil}, - {`[:=~]`, Punctuation, nil}, - {`[^\s;#{}$]+`, LiteralString, nil}, - {`/[^\s;#]*`, Name, nil}, - {`\s+`, Text, nil}, - {`[$;]`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nim.go b/vendor/github.com/alecthomas/chroma/lexers/n/nim.go deleted file mode 100644 index 3f98086b45f0..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/n/nim.go +++ /dev/null @@ -1,97 +0,0 @@ -package n - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Nim lexer. -var Nim = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Nim", - Aliases: []string{"nim", "nimrod"}, - Filenames: []string{"*.nim", "*.nimrod"}, - MimeTypes: []string{"text/x-nim"}, - CaseInsensitive: true, - }, - nimRules, -)) - -func nimRules() Rules { - return Rules{ - "root": { - {`#\[[\s\S]*?\]#`, CommentMultiline, nil}, - {`##.*$`, LiteralStringDoc, nil}, - {`#.*$`, Comment, nil}, - {`[*=><+\-/@$~&%!?|\\\[\]]`, Operator, nil}, - {"\\.\\.|\\.|,|\\[\\.|\\.\\]|\\{\\.|\\.\\}|\\(\\.|\\.\\)|\\{|\\}|\\(|\\)|:|\\^|`|;", Punctuation, nil}, - {`(?:[\w]+)"`, LiteralString, Push("rdqs")}, - {`"""`, LiteralString, Push("tdqs")}, - {`"`, LiteralString, Push("dqs")}, - {`'`, LiteralStringChar, Push("chars")}, - {`(a_?n_?d_?|o_?r_?|n_?o_?t_?|x_?o_?r_?|s_?h_?l_?|s_?h_?r_?|d_?i_?v_?|m_?o_?d_?|i_?n_?|n_?o_?t_?i_?n_?|i_?s_?|i_?s_?n_?o_?t_?)\b`, OperatorWord, nil}, - {`(p_?r_?o_?c_?\s)(?![(\[\]])`, Keyword, Push("funcname")}, - {`(a_?d_?d_?r_?|a_?n_?d_?|a_?s_?|a_?s_?m_?|a_?t_?o_?m_?i_?c_?|b_?i_?n_?d_?|b_?l_?o_?c_?k_?|b_?r_?e_?a_?k_?|c_?a_?s_?e_?|c_?a_?s_?t_?|c_?o_?n_?c_?e_?p_?t_?|c_?o_?n_?s_?t_?|c_?o_?n_?t_?i_?n_?u_?e_?|c_?o_?n_?v_?e_?r_?t_?e_?r_?|d_?e_?f_?e_?r_?|d_?i_?s_?c_?a_?r_?d_?|d_?i_?s_?t_?i_?n_?c_?t_?|d_?i_?v_?|d_?o_?|e_?l_?i_?f_?|e_?l_?s_?e_?|e_?n_?d_?|e_?n_?u_?m_?|e_?x_?c_?e_?p_?t_?|e_?x_?p_?o_?r_?t_?|f_?i_?n_?a_?l_?l_?y_?|f_?o_?r_?|f_?u_?n_?c_?|i_?f_?|i_?n_?|y_?i_?e_?l_?d_?|i_?n_?t_?e_?r_?f_?a_?c_?e_?|i_?s_?|i_?s_?n_?o_?t_?|i_?t_?e_?r_?a_?t_?o_?r_?|l_?e_?t_?|m_?a_?c_?r_?o_?|m_?e_?t_?h_?o_?d_?|m_?i_?x_?i_?n_?|m_?o_?d_?|n_?o_?t_?|n_?o_?t_?i_?n_?|o_?b_?j_?e_?c_?t_?|o_?f_?|o_?r_?|o_?u_?t_?|p_?r_?o_?c_?|p_?t_?r_?|r_?a_?i_?s_?e_?|r_?e_?f_?|r_?e_?t_?u_?r_?n_?|s_?h_?a_?r_?e_?d_?|s_?h_?l_?|s_?h_?r_?|s_?t_?a_?t_?i_?c_?|t_?e_?m_?p_?l_?a_?t_?e_?|t_?r_?y_?|t_?u_?p_?l_?e_?|t_?y_?p_?e_?|w_?h_?e_?n_?|w_?h_?i_?l_?e_?|w_?i_?t_?h_?|w_?i_?t_?h_?o_?u_?t_?|x_?o_?r_?)\b`, Keyword, nil}, - {`(f_?r_?o_?m_?|i_?m_?p_?o_?r_?t_?|i_?n_?c_?l_?u_?d_?e_?)\b`, KeywordNamespace, nil}, - {`(v_?a_?r)\b`, KeywordDeclaration, nil}, - {`(i_?n_?t_?|i_?n_?t_?8_?|i_?n_?t_?1_?6_?|i_?n_?t_?3_?2_?|i_?n_?t_?6_?4_?|f_?l_?o_?a_?t_?|f_?l_?o_?a_?t_?3_?2_?|f_?l_?o_?a_?t_?6_?4_?|b_?o_?o_?l_?|c_?h_?a_?r_?|r_?a_?n_?g_?e_?|a_?r_?r_?a_?y_?|s_?e_?q_?|s_?e_?t_?|s_?t_?r_?i_?n_?g_?)\b`, KeywordType, nil}, - {`(n_?i_?l_?|t_?r_?u_?e_?|f_?a_?l_?s_?e_?)\b`, KeywordPseudo, nil}, - {`\b_\b`, Name, nil}, // Standalone _ used as discardable variable identifier - {`\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*`, Name, nil}, - {`[0-9][0-9_]*(?=([e.]|\'f(32|64)))`, LiteralNumberFloat, Push("float-suffix", "float-number")}, - {`0x[a-f0-9][a-f0-9_]*`, LiteralNumberHex, Push("int-suffix")}, - {`0b[01][01_]*`, LiteralNumberBin, Push("int-suffix")}, - {`0o[0-7][0-7_]*`, LiteralNumberOct, Push("int-suffix")}, - {`[0-9][0-9_]*`, LiteralNumberInteger, Push("int-suffix")}, - {`\s+`, Text, nil}, - {`.+$`, Error, nil}, - }, - "chars": { - {`\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})`, LiteralStringEscape, nil}, - {`'`, LiteralStringChar, Pop(1)}, - {`.`, LiteralStringChar, nil}, - }, - "strings": { - {`(? <= < >= > *")...), Operator, nil}, - {`[;:]`, Punctuation, nil}, - }, - "comment": { - {`\*/`, CommentMultiline, Pop(1)}, - {`.|\n`, CommentMultiline, nil}, - }, - "paren": { - {`\)`, Punctuation, Pop(1)}, - Include("root"), - }, - "list": { - {`\]`, Punctuation, Pop(1)}, - Include("root"), - }, - "qstring": { - {`"`, StringDouble, Pop(1)}, - {`\${`, StringInterpol, Push("interpol")}, - {`\\.`, StringEscape, nil}, - {`.|\n`, StringDouble, nil}, - }, - "istring": { - {`''\$`, StringEscape, nil}, // "$" - {`'''`, StringEscape, nil}, // "''" - {`''\\.`, StringEscape, nil}, // "\." - {`''`, StringSingle, Pop(1)}, - {`\${`, StringInterpol, Push("interpol")}, - // The next rule is important: "$" escapes any symbol except "{"! - {`\$.`, StringSingle, nil}, // "$." - {`.|\n`, StringSingle, nil}, - }, - "scope": { - {`}:`, Punctuation, Pop(1)}, - {`}`, Punctuation, Pop(1)}, - {`in` + nixb, Keyword, Pop(1)}, - {`\${`, StringInterpol, Push("interpol")}, - Include("root"), // "==" has to be above "=" - {Words(``, ``, strings.Fields("= ? ,")...), Operator, nil}, - }, - "interpol": { - {`}`, StringInterpol, Pop(1)}, - Include("root"), - }, - "id": { - {`[a-zA-Z_][a-zA-Z0-9_'-]*`, Name, nil}, - }, - "uri": { - {`[a-zA-Z][a-zA-Z0-9+.-]*:[a-zA-Z0-9%/?:@&=+$,_.!~*'-]+`, StringDoc, nil}, - }, - "path": { - {`[a-zA-Z0-9._+-]*(/[a-zA-Z0-9._+-]+)+`, StringRegex, nil}, - {`~(/[a-zA-Z0-9._+-]+)+/?`, StringRegex, nil}, - {`<[a-zA-Z0-9._+-]+(/[a-zA-Z0-9._+-]+)*>`, StringRegex, nil}, - }, - "int": { - {`-?[0-9]+` + nixb, NumberInteger, nil}, - }, - "float": { - {`-?(([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?` + nixb, NumberFloat, nil}, - }, - "space": { - {`[ \t\r\n]+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go b/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go deleted file mode 100644 index 0ae302960943..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go +++ /dev/null @@ -1,169 +0,0 @@ -package o - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Objective-C lexer. -var ObjectiveC = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Objective-C", - Aliases: []string{"objective-c", "objectivec", "obj-c", "objc"}, - Filenames: []string{"*.m", "*.h"}, - MimeTypes: []string{"text/x-objective-c"}, - }, - objectiveCRules, -)) - -func objectiveCRules() Rules { - return Rules{ - "statements": { - {`@"`, LiteralString, Push("string")}, - {`@(YES|NO)`, LiteralNumber, nil}, - {`@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, - {`@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil}, - {`@(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`@0x[0-9a-fA-F]+[Ll]?`, LiteralNumberHex, nil}, - {`@0[0-7]+[Ll]?`, LiteralNumberOct, nil}, - {`@\d+[Ll]?`, LiteralNumberInteger, nil}, - {`@\(`, Literal, Push("literal_number")}, - {`@\[`, Literal, Push("literal_array")}, - {`@\{`, Literal, Push("literal_dictionary")}, - {Words(``, `\b`, `@selector`, `@private`, `@protected`, `@public`, `@encode`, `@synchronized`, `@try`, `@throw`, `@catch`, `@finally`, `@end`, `@property`, `@synthesize`, `__bridge`, `__bridge_transfer`, `__autoreleasing`, `__block`, `__weak`, `__strong`, `weak`, `strong`, `copy`, `retain`, `assign`, `unsafe_unretained`, `atomic`, `nonatomic`, `readonly`, `readwrite`, `setter`, `getter`, `typeof`, `in`, `out`, `inout`, `release`, `class`, `@dynamic`, `@optional`, `@required`, `@autoreleasepool`), Keyword, nil}, - {Words(``, `\b`, `id`, `instancetype`, `Class`, `IMP`, `SEL`, `BOOL`, `IBOutlet`, `IBAction`, `unichar`), KeywordType, nil}, - {`@(true|false|YES|NO)\n`, NameBuiltin, nil}, - {`(YES|NO|nil|self|super)\b`, NameBuiltin, nil}, - {`(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b`, KeywordType, nil}, - {`(TRUE|FALSE)\b`, NameBuiltin, nil}, - {`(@interface|@implementation)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_classname")}, - {`(@class|@protocol)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_forward_classname")}, - {`@`, Punctuation, nil}, - {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, - {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, - {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, - {`\d+[LlUu]*`, LiteralNumberInteger, nil}, - {`\*/`, Error, nil}, - {`[~!%^&*+=|?:<>/-]`, Operator, nil}, - {`[()\[\],.]`, Punctuation, nil}, - {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, - {`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil}, - {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, - {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, - {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, - {`(true|false|NULL)\b`, NameBuiltin, nil}, - {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "oc_classname": { - {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)`, ByGroups(NameClass, Text, NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, - {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?`, ByGroups(NameClass, Text, NameClass), Pop(1)}, - {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)`, ByGroups(NameClass, Text, NameLabel, Text, Punctuation), Push("#pop", "oc_ivars")}, - {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))`, ByGroups(NameClass, Text, NameLabel), Pop(1)}, - {`([a-zA-Z$_][\w$]*)(\s*)(\{)`, ByGroups(NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, - {`([a-zA-Z$_][\w$]*)`, NameClass, Pop(1)}, - }, - "oc_forward_classname": { - {`([a-zA-Z$_][\w$]*)(\s*,\s*)`, ByGroups(NameClass, Text), Push("oc_forward_classname")}, - {`([a-zA-Z$_][\w$]*)(\s*;?)`, ByGroups(NameClass, Text), Pop(1)}, - }, - "oc_ivars": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "root": { - {`^([-+])(\s*)(\(.*?\))?(\s*)([a-zA-Z$_][\w$]*:?)`, ByGroups(Punctuation, Text, UsingSelf("root"), Text, NameFunction), Push("method")}, - Include("whitespace"), - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, - {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, - Default(Push("statement")), - }, - "method": { - Include("whitespace"), - {`,`, Punctuation, nil}, - {`\.\.\.`, Punctuation, nil}, - {`(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)`, ByGroups(UsingSelf("root"), Text, NameVariable), nil}, - {`[a-zA-Z$_][\w$]*:`, NameFunction, nil}, - {`;`, Punctuation, Pop(1)}, - {`\{`, Punctuation, Push("function")}, - Default(Pop(1)), - }, - "literal_number": { - {`\(`, Punctuation, Push("literal_number_inner")}, - {`\)`, Literal, Pop(1)}, - Include("statement"), - }, - "literal_number_inner": { - {`\(`, Punctuation, Push()}, - {`\)`, Punctuation, Pop(1)}, - Include("statement"), - }, - "literal_array": { - {`\[`, Punctuation, Push("literal_array_inner")}, - {`\]`, Literal, Pop(1)}, - Include("statement"), - }, - "literal_array_inner": { - {`\[`, Punctuation, Push()}, - {`\]`, Punctuation, Pop(1)}, - Include("statement"), - }, - "literal_dictionary": { - {`\}`, Literal, Pop(1)}, - Include("statement"), - }, - "whitespace": { - {`^#if\s+0`, CommentPreproc, Push("if0")}, - {`^#`, CommentPreproc, Push("macro")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, - {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, - {`\n`, Text, nil}, - {`\s+`, Text, nil}, - {`\\\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - }, - "statement": { - Include("whitespace"), - Include("statements"), - {`[{}]`, Punctuation, nil}, - {`;`, Punctuation, Pop(1)}, - }, - "function": { - Include("whitespace"), - Include("statements"), - {`;`, Punctuation, nil}, - {`\{`, Punctuation, Push()}, - {`\}`, Punctuation, Pop(1)}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`\\\n`, LiteralString, nil}, - {`\\`, LiteralString, nil}, - }, - "macro": { - {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, - {`[^/\n]+`, CommentPreproc, nil}, - {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, - {`//.*?\n`, CommentSingle, Pop(1)}, - {`/`, CommentPreproc, nil}, - {`(?<=\\)\n`, CommentPreproc, nil}, - {`\n`, CommentPreproc, Pop(1)}, - }, - "if0": { - {`^\s*#if.*?(?|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, - {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, - {`\b(and|asr|land|lor|lsl|lxor|mod|or)\b`, OperatorWord, nil}, - {`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil}, - {`[^\W\d][\w']*`, Name, nil}, - {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, - {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, - {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, - {`0[bB][01][01_]*`, LiteralNumberBin, nil}, - {`\d[\d_]*`, LiteralNumberInteger, nil}, - {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, - {`'.'`, LiteralStringChar, nil}, - {`'`, Keyword, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`[~?][a-z][\w\']*:`, NameVariable, nil}, - }, - "comment": { - {`[^(*)]+`, Comment, nil}, - {`\(\*`, Comment, Push()}, - {`\*\)`, Comment, Pop(1)}, - {`[(*)]`, Comment, nil}, - }, - "string": { - {`[^\\"]+`, LiteralStringDouble, nil}, - Include("escape-sequence"), - {`\\\n`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - "dotted": { - {`\s+`, Text, nil}, - {`\.`, Punctuation, nil}, - {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, - {`[A-Z][\w\']*`, NameClass, Pop(1)}, - {`[a-z_][\w\']*`, Name, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/octave.go b/vendor/github.com/alecthomas/chroma/lexers/o/octave.go deleted file mode 100644 index 99fe298c7aea..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/o/octave.go +++ /dev/null @@ -1,50 +0,0 @@ -package o - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Octave lexer. -var Octave = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Octave", - Aliases: []string{"octave"}, - Filenames: []string{"*.m"}, - MimeTypes: []string{"text/octave"}, - }, - octaveRules, -)) - -func octaveRules() Rules { - return Rules{ - "root": { - {`[%#].*$`, Comment, nil}, - {`^\s*function`, Keyword, Push("deffunc")}, - {Words(``, `\b`, `__FILE__`, `__LINE__`, `break`, `case`, `catch`, `classdef`, `continue`, `do`, `else`, `elseif`, `end`, `end_try_catch`, `end_unwind_protect`, `endclassdef`, `endevents`, `endfor`, `endfunction`, `endif`, `endmethods`, `endproperties`, `endswitch`, `endwhile`, `events`, `for`, `function`, `get`, `global`, `if`, `methods`, `otherwise`, `persistent`, `properties`, `return`, `set`, `static`, `switch`, `try`, `until`, `unwind_protect`, `unwind_protect_cleanup`, `while`), Keyword, nil}, - {Words(``, `\b`, `addlistener`, `addpath`, `addproperty`, `all`, `and`, `any`, `argnames`, `argv`, `assignin`, `atexit`, `autoload`, `available_graphics_toolkits`, `beep_on_error`, `bitand`, `bitmax`, `bitor`, `bitshift`, `bitxor`, `cat`, `cell`, `cellstr`, `char`, `class`, `clc`, `columns`, `command_line_path`, `completion_append_char`, `completion_matches`, `complex`, `confirm_recursive_rmdir`, `cputime`, `crash_dumps_octave_core`, `ctranspose`, `cumprod`, `cumsum`, `debug_on_error`, `debug_on_interrupt`, `debug_on_warning`, `default_save_options`, `dellistener`, `diag`, `diff`, `disp`, `doc_cache_file`, `do_string_escapes`, `double`, `drawnow`, `e`, `echo_executing_commands`, `eps`, `eq`, `errno`, `errno_list`, `error`, `eval`, `evalin`, `exec`, `exist`, `exit`, `eye`, `false`, `fclear`, `fclose`, `fcntl`, `fdisp`, `feof`, `ferror`, `feval`, `fflush`, `fgetl`, `fgets`, `fieldnames`, `file_in_loadpath`, `file_in_path`, `filemarker`, `filesep`, `find_dir_in_path`, `fixed_point_format`, `fnmatch`, `fopen`, `fork`, `formula`, `fprintf`, `fputs`, `fread`, `freport`, `frewind`, `fscanf`, `fseek`, `fskipl`, `ftell`, `functions`, `fwrite`, `ge`, `genpath`, `get`, `getegid`, `getenv`, `geteuid`, `getgid`, `getpgrp`, `getpid`, `getppid`, `getuid`, `glob`, `gt`, `gui_mode`, `history_control`, `history_file`, `history_size`, `history_timestamp_format_string`, `home`, `horzcat`, `hypot`, `ifelse`, `ignore_function_time_stamp`, `inferiorto`, `info_file`, `info_program`, `inline`, `input`, `intmax`, `intmin`, `ipermute`, `is_absolute_filename`, `isargout`, `isbool`, `iscell`, `iscellstr`, `ischar`, `iscomplex`, `isempty`, `isfield`, `isfloat`, `isglobal`, `ishandle`, `isieee`, `isindex`, `isinteger`, `islogical`, `ismatrix`, `ismethod`, `isnull`, `isnumeric`, `isobject`, `isreal`, `is_rooted_relative_filename`, `issorted`, `isstruct`, `isvarname`, `kbhit`, `keyboard`, `kill`, `lasterr`, `lasterror`, `lastwarn`, `ldivide`, `le`, `length`, `link`, `linspace`, `logical`, `lstat`, `lt`, `make_absolute_filename`, `makeinfo_program`, `max_recursion_depth`, `merge`, `methods`, `mfilename`, `minus`, `mislocked`, `mkdir`, `mkfifo`, `mkstemp`, `mldivide`, `mlock`, `mouse_wheel_zoom`, `mpower`, `mrdivide`, `mtimes`, `munlock`, `nargin`, `nargout`, `native_float_format`, `ndims`, `ne`, `nfields`, `nnz`, `norm`, `not`, `numel`, `nzmax`, `octave_config_info`, `octave_core_file_limit`, `octave_core_file_name`, `octave_core_file_options`, `ones`, `or`, `output_max_field_width`, `output_precision`, `page_output_immediately`, `page_screen_output`, `path`, `pathsep`, `pause`, `pclose`, `permute`, `pi`, `pipe`, `plus`, `popen`, `power`, `print_empty_dimensions`, `printf`, `print_struct_array_contents`, `prod`, `program_invocation_name`, `program_name`, `putenv`, `puts`, `pwd`, `quit`, `rats`, `rdivide`, `readdir`, `readlink`, `read_readline_init_file`, `realmax`, `realmin`, `rehash`, `rename`, `repelems`, `re_read_readline_init_file`, `reset`, `reshape`, `resize`, `restoredefaultpath`, `rethrow`, `rmdir`, `rmfield`, `rmpath`, `rows`, `save_header_format_string`, `save_precision`, `saving_history`, `scanf`, `set`, `setenv`, `shell_cmd`, `sighup_dumps_octave_core`, `sigterm_dumps_octave_core`, `silent_functions`, `single`, `size`, `size_equal`, `sizemax`, `sizeof`, `sleep`, `source`, `sparse_auto_mutate`, `split_long_rows`, `sprintf`, `squeeze`, `sscanf`, `stat`, `stderr`, `stdin`, `stdout`, `strcmp`, `strcmpi`, `string_fill_char`, `strncmp`, `strncmpi`, `struct`, `struct_levels_to_print`, `strvcat`, `subsasgn`, `subsref`, `sum`, `sumsq`, `superiorto`, `suppress_verbose_help_message`, `symlink`, `system`, `tic`, `tilde_expand`, `times`, `tmpfile`, `tmpnam`, `toc`, `toupper`, `transpose`, `true`, `typeinfo`, `umask`, `uminus`, `uname`, `undo_string_escapes`, `unlink`, `uplus`, `upper`, `usage`, `usleep`, `vec`, `vectorize`, `vertcat`, `waitpid`, `warning`, `warranty`, `whos_line_format`, `yes_or_no`, `zeros`, `inf`, `Inf`, `nan`, `NaN`, `close`, `load`, `who`, `whos`, `accumarray`, `accumdim`, `acosd`, `acotd`, `acscd`, `addtodate`, `allchild`, `ancestor`, `anova`, `arch_fit`, `arch_rnd`, `arch_test`, `area`, `arma_rnd`, `arrayfun`, `ascii`, `asctime`, `asecd`, `asind`, `assert`, `atand`, `autoreg_matrix`, `autumn`, `axes`, `axis`, `bar`, `barh`, `bartlett`, `bartlett_test`, `beep`, `betacdf`, `betainv`, `betapdf`, `betarnd`, `bicgstab`, `bicubic`, `binary`, `binocdf`, `binoinv`, `binopdf`, `binornd`, `bitcmp`, `bitget`, `bitset`, `blackman`, `blanks`, `blkdiag`, `bone`, `box`, `brighten`, `calendar`, `cast`, `cauchy_cdf`, `cauchy_inv`, `cauchy_pdf`, `cauchy_rnd`, `caxis`, `celldisp`, `center`, `cgs`, `chisquare_test_homogeneity`, `chisquare_test_independence`, `circshift`, `cla`, `clabel`, `clf`, `clock`, `cloglog`, `closereq`, `colon`, `colorbar`, `colormap`, `colperm`, `comet`, `common_size`, `commutation_matrix`, `compan`, `compare_versions`, `compass`, `computer`, `cond`, `condest`, `contour`, `contourc`, `contourf`, `contrast`, `conv`, `convhull`, `cool`, `copper`, `copyfile`, `cor`, `corrcoef`, `cor_test`, `cosd`, `cotd`, `cov`, `cplxpair`, `cross`, `cscd`, `cstrcat`, `csvread`, `csvwrite`, `ctime`, `cumtrapz`, `curl`, `cut`, `cylinder`, `date`, `datenum`, `datestr`, `datetick`, `datevec`, `dblquad`, `deal`, `deblank`, `deconv`, `delaunay`, `delaunayn`, `delete`, `demo`, `detrend`, `diffpara`, `diffuse`, `dir`, `discrete_cdf`, `discrete_inv`, `discrete_pdf`, `discrete_rnd`, `display`, `divergence`, `dlmwrite`, `dos`, `dsearch`, `dsearchn`, `duplication_matrix`, `durbinlevinson`, `ellipsoid`, `empirical_cdf`, `empirical_inv`, `empirical_pdf`, `empirical_rnd`, `eomday`, `errorbar`, `etime`, `etreeplot`, `example`, `expcdf`, `expinv`, `expm`, `exppdf`, `exprnd`, `ezcontour`, `ezcontourf`, `ezmesh`, `ezmeshc`, `ezplot`, `ezpolar`, `ezsurf`, `ezsurfc`, `factor`, `factorial`, `fail`, `fcdf`, `feather`, `fftconv`, `fftfilt`, `fftshift`, `figure`, `fileattrib`, `fileparts`, `fill`, `findall`, `findobj`, `findstr`, `finv`, `flag`, `flipdim`, `fliplr`, `flipud`, `fpdf`, `fplot`, `fractdiff`, `freqz`, `freqz_plot`, `frnd`, `fsolve`, `f_test_regression`, `ftp`, `fullfile`, `fzero`, `gamcdf`, `gaminv`, `gampdf`, `gamrnd`, `gca`, `gcbf`, `gcbo`, `gcf`, `genvarname`, `geocdf`, `geoinv`, `geopdf`, `geornd`, `getfield`, `ginput`, `glpk`, `gls`, `gplot`, `gradient`, `graphics_toolkit`, `gray`, `grid`, `griddata`, `griddatan`, `gtext`, `gunzip`, `gzip`, `hadamard`, `hamming`, `hankel`, `hanning`, `hggroup`, `hidden`, `hilb`, `hist`, `histc`, `hold`, `hot`, `hotelling_test`, `housh`, `hsv`, `hurst`, `hygecdf`, `hygeinv`, `hygepdf`, `hygernd`, `idivide`, `ifftshift`, `image`, `imagesc`, `imfinfo`, `imread`, `imshow`, `imwrite`, `index`, `info`, `inpolygon`, `inputname`, `interpft`, `interpn`, `intersect`, `invhilb`, `iqr`, `isa`, `isdefinite`, `isdir`, `is_duplicate_entry`, `isequal`, `isequalwithequalnans`, `isfigure`, `ishermitian`, `ishghandle`, `is_leap_year`, `isletter`, `ismac`, `ismember`, `ispc`, `isprime`, `isprop`, `isscalar`, `issquare`, `isstrprop`, `issymmetric`, `isunix`, `is_valid_file_id`, `isvector`, `jet`, `kendall`, `kolmogorov_smirnov_cdf`, `kolmogorov_smirnov_test`, `kruskal_wallis_test`, `krylov`, `kurtosis`, `laplace_cdf`, `laplace_inv`, `laplace_pdf`, `laplace_rnd`, `legend`, `legendre`, `license`, `line`, `linkprop`, `list_primes`, `loadaudio`, `loadobj`, `logistic_cdf`, `logistic_inv`, `logistic_pdf`, `logistic_rnd`, `logit`, `loglog`, `loglogerr`, `logm`, `logncdf`, `logninv`, `lognpdf`, `lognrnd`, `logspace`, `lookfor`, `ls_command`, `lsqnonneg`, `magic`, `mahalanobis`, `manova`, `matlabroot`, `mcnemar_test`, `mean`, `meansq`, `median`, `menu`, `mesh`, `meshc`, `meshgrid`, `meshz`, `mexext`, `mget`, `mkpp`, `mode`, `moment`, `movefile`, `mpoles`, `mput`, `namelengthmax`, `nargchk`, `nargoutchk`, `nbincdf`, `nbininv`, `nbinpdf`, `nbinrnd`, `nchoosek`, `ndgrid`, `newplot`, `news`, `nonzeros`, `normcdf`, `normest`, `norminv`, `normpdf`, `normrnd`, `now`, `nthroot`, `null`, `ocean`, `ols`, `onenormest`, `optimget`, `optimset`, `orderfields`, `orient`, `orth`, `pack`, `pareto`, `parseparams`, `pascal`, `patch`, `pathdef`, `pcg`, `pchip`, `pcolor`, `pcr`, `peaks`, `periodogram`, `perl`, `perms`, `pie`, `pink`, `planerot`, `playaudio`, `plot`, `plotmatrix`, `plotyy`, `poisscdf`, `poissinv`, `poisspdf`, `poissrnd`, `polar`, `poly`, `polyaffine`, `polyarea`, `polyderiv`, `polyfit`, `polygcd`, `polyint`, `polyout`, `polyreduce`, `polyval`, `polyvalm`, `postpad`, `powerset`, `ppder`, `ppint`, `ppjumps`, `ppplot`, `ppval`, `pqpnonneg`, `prepad`, `primes`, `print`, `print_usage`, `prism`, `probit`, `qp`, `qqplot`, `quadcc`, `quadgk`, `quadl`, `quadv`, `quiver`, `qzhess`, `rainbow`, `randi`, `range`, `rank`, `ranks`, `rat`, `reallog`, `realpow`, `realsqrt`, `record`, `rectangle_lw`, `rectangle_sw`, `rectint`, `refresh`, `refreshdata`, `regexptranslate`, `repmat`, `residue`, `ribbon`, `rindex`, `roots`, `rose`, `rosser`, `rotdim`, `rref`, `run`, `run_count`, `rundemos`, `run_test`, `runtests`, `saveas`, `saveaudio`, `saveobj`, `savepath`, `scatter`, `secd`, `semilogx`, `semilogxerr`, `semilogy`, `semilogyerr`, `setaudio`, `setdiff`, `setfield`, `setxor`, `shading`, `shift`, `shiftdim`, `sign_test`, `sinc`, `sind`, `sinetone`, `sinewave`, `skewness`, `slice`, `sombrero`, `sortrows`, `spaugment`, `spconvert`, `spdiags`, `spearman`, `spectral_adf`, `spectral_xdf`, `specular`, `speed`, `spencer`, `speye`, `spfun`, `sphere`, `spinmap`, `spline`, `spones`, `sprand`, `sprandn`, `sprandsym`, `spring`, `spstats`, `spy`, `sqp`, `stairs`, `statistics`, `std`, `stdnormal_cdf`, `stdnormal_inv`, `stdnormal_pdf`, `stdnormal_rnd`, `stem`, `stft`, `strcat`, `strchr`, `strjust`, `strmatch`, `strread`, `strsplit`, `strtok`, `strtrim`, `strtrunc`, `structfun`, `studentize`, `subplot`, `subsindex`, `subspace`, `substr`, `substruct`, `summer`, `surf`, `surface`, `surfc`, `surfl`, `surfnorm`, `svds`, `swapbytes`, `sylvester_matrix`, `symvar`, `synthesis`, `table`, `tand`, `tar`, `tcdf`, `tempdir`, `tempname`, `test`, `text`, `textread`, `textscan`, `tinv`, `title`, `toeplitz`, `tpdf`, `trace`, `trapz`, `treelayout`, `treeplot`, `triangle_lw`, `triangle_sw`, `tril`, `trimesh`, `triplequad`, `triplot`, `trisurf`, `triu`, `trnd`, `tsearchn`, `t_test`, `t_test_regression`, `type`, `unidcdf`, `unidinv`, `unidpdf`, `unidrnd`, `unifcdf`, `unifinv`, `unifpdf`, `unifrnd`, `union`, `unique`, `unix`, `unmkpp`, `unpack`, `untabify`, `untar`, `unwrap`, `unzip`, `u_test`, `validatestring`, `vander`, `var`, `var_test`, `vech`, `ver`, `version`, `view`, `voronoi`, `voronoin`, `waitforbuttonpress`, `wavread`, `wavwrite`, `wblcdf`, `wblinv`, `wblpdf`, `wblrnd`, `weekday`, `welch_test`, `what`, `white`, `whitebg`, `wienrnd`, `wilcoxon_test`, `wilkinson`, `winter`, `xlabel`, `xlim`, `ylabel`, `yulewalker`, `zip`, `zlabel`, `z_test`, `airy`, `amd`, `balance`, `besselh`, `besseli`, `besselj`, `besselk`, `bessely`, `bitpack`, `bsxfun`, `builtin`, `ccolamd`, `cellfun`, `cellslices`, `chol`, `choldelete`, `cholinsert`, `cholinv`, `cholshift`, `cholupdate`, `colamd`, `colloc`, `convhulln`, `convn`, `csymamd`, `cummax`, `cummin`, `daspk`, `daspk_options`, `dasrt`, `dasrt_options`, `dassl`, `dassl_options`, `dbclear`, `dbdown`, `dbstack`, `dbstatus`, `dbstop`, `dbtype`, `dbup`, `dbwhere`, `det`, `dlmread`, `dmperm`, `dot`, `eig`, `eigs`, `endgrent`, `endpwent`, `etree`, `fft`, `fftn`, `fftw`, `filter`, `find`, `full`, `gcd`, `getgrent`, `getgrgid`, `getgrnam`, `getpwent`, `getpwnam`, `getpwuid`, `getrusage`, `givens`, `gmtime`, `gnuplot_binary`, `hess`, `ifft`, `ifftn`, `inv`, `isdebugmode`, `issparse`, `kron`, `localtime`, `lookup`, `lsode`, `lsode_options`, `lu`, `luinc`, `luupdate`, `matrix_type`, `max`, `min`, `mktime`, `pinv`, `qr`, `qrdelete`, `qrinsert`, `qrshift`, `qrupdate`, `quad`, `quad_options`, `qz`, `rand`, `rande`, `randg`, `randn`, `randp`, `randperm`, `rcond`, `regexp`, `regexpi`, `regexprep`, `schur`, `setgrent`, `setpwent`, `sort`, `spalloc`, `sparse`, `spparms`, `sprank`, `sqrtm`, `strfind`, `strftime`, `strptime`, `strrep`, `svd`, `svd_driver`, `syl`, `symamd`, `symbfact`, `symrcm`, `time`, `tsearch`, `typecast`, `urlread`, `urlwrite`, `abs`, `acos`, `acosh`, `acot`, `acoth`, `acsc`, `acsch`, `angle`, `arg`, `asec`, `asech`, `asin`, `asinh`, `atan`, `atanh`, `beta`, `betainc`, `betaln`, `bincoeff`, `cbrt`, `ceil`, `conj`, `cos`, `cosh`, `cot`, `coth`, `csc`, `csch`, `erf`, `erfc`, `erfcx`, `erfinv`, `exp`, `finite`, `fix`, `floor`, `fmod`, `gamma`, `gammainc`, `gammaln`, `imag`, `isalnum`, `isalpha`, `isascii`, `iscntrl`, `isdigit`, `isfinite`, `isgraph`, `isinf`, `islower`, `isna`, `isnan`, `isprint`, `ispunct`, `isspace`, `isupper`, `isxdigit`, `lcm`, `lgamma`, `log`, `lower`, `mod`, `real`, `rem`, `round`, `roundb`, `sec`, `sech`, `sign`, `sin`, `sinh`, `sqrt`, `tan`, `tanh`, `toascii`, `tolower`, `xor`), NameBuiltin, nil}, - {Words(``, `\b`, `EDITOR`, `EXEC_PATH`, `I`, `IMAGE_PATH`, `NA`, `OCTAVE_HOME`, `OCTAVE_VERSION`, `PAGER`, `PAGER_FLAGS`, `SEEK_CUR`, `SEEK_END`, `SEEK_SET`, `SIG`, `S_ISBLK`, `S_ISCHR`, `S_ISDIR`, `S_ISFIFO`, `S_ISLNK`, `S_ISREG`, `S_ISSOCK`, `WCONTINUE`, `WCOREDUMP`, `WEXITSTATUS`, `WIFCONTINUED`, `WIFEXITED`, `WIFSIGNALED`, `WIFSTOPPED`, `WNOHANG`, `WSTOPSIG`, `WTERMSIG`, `WUNTRACED`), NameConstant, nil}, - {`-=|!=|!|/=|--`, Operator, nil}, - {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, - {`\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*`, Operator, nil}, - {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, - {`[\[\](){}:@.,]`, Punctuation, nil}, - {`=|:|;`, Punctuation, nil}, - {`"[^"]*"`, LiteralString, nil}, - {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`(?<=[\w)\].])\'+`, Operator, nil}, - {`(?=|<=|<>|\+|-|=|>|<|\*|/|%)`, Operator, nil}, - {`(;|,|\)|\(|\.)`, Punctuation, nil}, - {Words(``, `\b`, `истина`, `true`, `ложь`, `false`, `и`, `and`, `или`, `or`, `не`, `not`), Operator, nil}, - {Words(``, `\b`, `если`, `if`, `тогда`, `then`, `иначе`, `else`, `иначеесли`, `elsif`, `конецесли`, `endif`), Operator, nil}, - {Words(``, `\b`, `для`, `for`, `каждого`, `each`, `из`, `in`, `цикл`, `do`, `пока`, `while`, `конеццикла`, `enddo`, `по`, `to`), Operator, nil}, - {Words(``, `\b`, `прервать`, `break`, `продолжить`, `continue`, `возврат`, `return`, `перейти`, `goto`), Operator, nil}, - {Words(``, `\b`, `процедура`, `procedure`, `конецпроцедуры`, `endprocedure`, `функция`, `function`, `конецфункции`, `endfunction`), Keyword, nil}, - {Words(``, `\b`, `новый`, `new`, `знач`, `val`, `экспорт`, `export`, `перем`, `var`), Keyword, nil}, - {Words(``, `\b`, `попытка`, `try`, `исключение`, `except`, `вызватьисключение`, `raise`, `конецпопытки`, `endtry`), Keyword, nil}, - {Words(``, `\b`, `выполнить`, `execute`, `вычислить`, `eval`), Keyword, nil}, - {`"`, LiteralString, Push("string")}, - {`[_а-яА-Я0-9][а-яА-Я0-9]*`, Name, nil}, - {`[_\w][\w]*`, Name, nil}, - }, - "string": { - {`""`, LiteralString, nil}, - {`"C?`, LiteralString, Pop(1)}, - {`[^"]+`, LiteralString, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go b/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go deleted file mode 100644 index 55619711d4c7..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go +++ /dev/null @@ -1,50 +0,0 @@ -package o - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// OpenedgeAbl lexer. -var OpenEdgeABL = internal.Register(MustNewLazyLexer( - &Config{ - Name: "OpenEdge ABL", - Aliases: []string{"openedge", "abl", "progress", "openedgeabl"}, - Filenames: []string{"*.p", "*.cls", "*.w", "*.i"}, - MimeTypes: []string{"text/x-openedge", "application/x-openedge"}, - }, - func() Rules { - return Rules{ - "root": { - {`/\*`, CommentMultiline, Push("comment")}, - {`//.*?$`, CommentSingle, nil}, - {`\{`, CommentPreproc, Push("preprocessor")}, - {`\s*&.*`, CommentPreproc, nil}, - {`0[xX][0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, - {`(?i)(DEFINE|DEF|DEFI|DEFIN)\b`, KeywordDeclaration, nil}, - {`(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|COM-HANDLE|DATE|DATETIME|DATETIME-TZ|DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|INT64|INTEGER|INT|INTE|INTEG|INTEGE|LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))`, KeywordType, nil}, - {Words(`(?i)(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ABS`, `ABSO`, `ABSOL`, `ABSOLU`, `ABSOLUT`, `ABSOLUTE`, `ABSTRACT`, `ACCELERATOR`, `ACCUM`, `ACCUMU`, `ACCUMUL`, `ACCUMULA`, `ACCUMULAT`, `ACCUMULATE`, `ACTIVE-FORM`, `ACTIVE-WINDOW`, `ADD`, `ADD-BUFFER`, `ADD-CALC-COLUMN`, `ADD-COLUMNS-FROM`, `ADD-EVENTS-PROCEDURE`, `ADD-FIELDS-FROM`, `ADD-FIRST`, `ADD-INDEX-FIELD`, `ADD-LAST`, `ADD-LIKE-COLUMN`, `ADD-LIKE-FIELD`, `ADD-LIKE-INDEX`, `ADD-NEW-FIELD`, `ADD-NEW-INDEX`, `ADD-SCHEMA-LOCATION`, `ADD-SUPER-PROCEDURE`, `ADM-DATA`, `ADVISE`, `ALERT-BOX`, `ALIAS`, `ALL`, `ALLOW-COLUMN-SEARCHING`, `ALLOW-REPLICATION`, `ALTER`, `ALWAYS-ON-TOP`, `AMBIG`, `AMBIGU`, `AMBIGUO`, `AMBIGUOU`, `AMBIGUOUS`, `ANALYZ`, `ANALYZE`, `AND`, `ANSI-ONLY`, `ANY`, `ANYWHERE`, `APPEND`, `APPL-ALERT`, `APPL-ALERT-`, `APPL-ALERT-B`, `APPL-ALERT-BO`, `APPL-ALERT-BOX`, `APPL-ALERT-BOXE`, `APPL-ALERT-BOXES`, `APPL-CONTEXT-ID`, `APPLICATION`, `APPLY`, `APPSERVER-INFO`, `APPSERVER-PASSWORD`, `APPSERVER-USERID`, `ARRAY-MESSAGE`, `AS`, `ASC`, `ASCE`, `ASCEN`, `ASCEND`, `ASCENDI`, `ASCENDIN`, `ASCENDING`, `ASK-OVERWRITE`, `ASSEMBLY`, `ASSIGN`, `ASYNC-REQUEST-COUNT`, `ASYNC-REQUEST-HANDLE`, `ASYNCHRONOUS`, `AT`, `ATTACHED-PAIRLIST`, `ATTR`, `ATTR-SPACE`, `ATTRI`, `ATTRIB`, `ATTRIBU`, `ATTRIBUT`, `AUDIT-CONTROL`, `AUDIT-ENABLED`, `AUDIT-EVENT-CONTEXT`, `AUDIT-POLICY`, `AUTHENTICATION-FAILED`, `AUTHORIZATION`, `AUTO-COMP`, `AUTO-COMPL`, `AUTO-COMPLE`, `AUTO-COMPLET`, `AUTO-COMPLETI`, `AUTO-COMPLETIO`, `AUTO-COMPLETION`, `AUTO-END-KEY`, `AUTO-ENDKEY`, `AUTO-GO`, `AUTO-IND`, `AUTO-INDE`, `AUTO-INDEN`, `AUTO-INDENT`, `AUTO-RESIZE`, `AUTO-RET`, `AUTO-RETU`, `AUTO-RETUR`, `AUTO-RETURN`, `AUTO-SYNCHRONIZE`, `AUTO-Z`, `AUTO-ZA`, `AUTO-ZAP`, `AUTOMATIC`, `AVAIL`, `AVAILA`, `AVAILAB`, `AVAILABL`, `AVAILABLE`, `AVAILABLE-FORMATS`, `AVE`, `AVER`, `AVERA`, `AVERAG`, `AVERAGE`, `AVG`, `BACK`, `BACKG`, `BACKGR`, `BACKGRO`, `BACKGROU`, `BACKGROUN`, `BACKGROUND`, `BACKWARD`, `BACKWARDS`, `BASE64-DECODE`, `BASE64-ENCODE`, `BASE-ADE`, `BASE-KEY`, `BATCH`, `BATCH-`, `BATCH-M`, `BATCH-MO`, `BATCH-MOD`, `BATCH-MODE`, `BATCH-SIZE`, `BEFORE-H`, `BEFORE-HI`, `BEFORE-HID`, `BEFORE-HIDE`, `BEGIN-EVENT-GROUP`, `BEGINS`, `BELL`, `BETWEEN`, `BGC`, `BGCO`, `BGCOL`, `BGCOLO`, `BGCOLOR`, `BIG-ENDIAN`, `BINARY`, `BIND`, `BIND-WHERE`, `BLANK`, `BLOCK-ITERATION-DISPLAY`, `BLOCK-LEVEL`, `BORDER-B`, `BORDER-BO`, `BORDER-BOT`, `BORDER-BOTT`, `BORDER-BOTTO`, `BORDER-BOTTOM-CHARS`, `BORDER-BOTTOM-P`, `BORDER-BOTTOM-PI`, `BORDER-BOTTOM-PIX`, `BORDER-BOTTOM-PIXE`, `BORDER-BOTTOM-PIXEL`, `BORDER-BOTTOM-PIXELS`, `BORDER-L`, `BORDER-LE`, `BORDER-LEF`, `BORDER-LEFT`, `BORDER-LEFT-`, `BORDER-LEFT-C`, `BORDER-LEFT-CH`, `BORDER-LEFT-CHA`, `BORDER-LEFT-CHAR`, `BORDER-LEFT-CHARS`, `BORDER-LEFT-P`, `BORDER-LEFT-PI`, `BORDER-LEFT-PIX`, `BORDER-LEFT-PIXE`, `BORDER-LEFT-PIXEL`, `BORDER-LEFT-PIXELS`, `BORDER-R`, `BORDER-RI`, `BORDER-RIG`, `BORDER-RIGH`, `BORDER-RIGHT`, `BORDER-RIGHT-`, `BORDER-RIGHT-C`, `BORDER-RIGHT-CH`, `BORDER-RIGHT-CHA`, `BORDER-RIGHT-CHAR`, `BORDER-RIGHT-CHARS`, `BORDER-RIGHT-P`, `BORDER-RIGHT-PI`, `BORDER-RIGHT-PIX`, `BORDER-RIGHT-PIXE`, `BORDER-RIGHT-PIXEL`, `BORDER-RIGHT-PIXELS`, `BORDER-T`, `BORDER-TO`, `BORDER-TOP`, `BORDER-TOP-`, `BORDER-TOP-C`, `BORDER-TOP-CH`, `BORDER-TOP-CHA`, `BORDER-TOP-CHAR`, `BORDER-TOP-CHARS`, `BORDER-TOP-P`, `BORDER-TOP-PI`, `BORDER-TOP-PIX`, `BORDER-TOP-PIXE`, `BORDER-TOP-PIXEL`, `BORDER-TOP-PIXELS`, `BOX`, `BOX-SELECT`, `BOX-SELECTA`, `BOX-SELECTAB`, `BOX-SELECTABL`, `BOX-SELECTABLE`, `BREAK`, `BROWSE`, `BUFFER`, `BUFFER-CHARS`, `BUFFER-COMPARE`, `BUFFER-COPY`, `BUFFER-CREATE`, `BUFFER-DELETE`, `BUFFER-FIELD`, `BUFFER-HANDLE`, `BUFFER-LINES`, `BUFFER-NAME`, `BUFFER-PARTITION-ID`, `BUFFER-RELEASE`, `BUFFER-VALUE`, `BUTTON`, `BUTTONS`, `BY`, `BY-POINTER`, `BY-VARIANT-POINTER`, `CACHE`, `CACHE-SIZE`, `CALL`, `CALL-NAME`, `CALL-TYPE`, `CAN-CREATE`, `CAN-DELETE`, `CAN-DO`, `CAN-DO-DOMAIN-SUPPORT`, `CAN-FIND`, `CAN-QUERY`, `CAN-READ`, `CAN-SET`, `CAN-WRITE`, `CANCEL-BREAK`, `CANCEL-BUTTON`, `CAPS`, `CAREFUL-PAINT`, `CASE`, `CASE-SEN`, `CASE-SENS`, `CASE-SENSI`, `CASE-SENSIT`, `CASE-SENSITI`, `CASE-SENSITIV`, `CASE-SENSITIVE`, `CAST`, `CATCH`, `CDECL`, `CENTER`, `CENTERE`, `CENTERED`, `CHAINED`, `CHARACTER`, `CHARACTER_LENGTH`, `CHARSET`, `CHECK`, `CHECKED`, `CHOOSE`, `CHR`, `CLASS`, `CLASS-TYPE`, `CLEAR`, `CLEAR-APPL-CONTEXT`, `CLEAR-LOG`, `CLEAR-SELECT`, `CLEAR-SELECTI`, `CLEAR-SELECTIO`, `CLEAR-SELECTION`, `CLEAR-SORT-ARROW`, `CLEAR-SORT-ARROWS`, `CLIENT-CONNECTION-ID`, `CLIENT-PRINCIPAL`, `CLIENT-TTY`, `CLIENT-TYPE`, `CLIENT-WORKSTATION`, `CLIPBOARD`, `CLOSE`, `CLOSE-LOG`, `CODE`, `CODEBASE-LOCATOR`, `CODEPAGE`, `CODEPAGE-CONVERT`, `COL`, `COL-OF`, `COLLATE`, `COLON`, `COLON-ALIGN`, `COLON-ALIGNE`, `COLON-ALIGNED`, `COLOR`, `COLOR-TABLE`, `COLU`, `COLUM`, `COLUMN`, `COLUMN-BGCOLOR`, `COLUMN-DCOLOR`, `COLUMN-FGCOLOR`, `COLUMN-FONT`, `COLUMN-LAB`, `COLUMN-LABE`, `COLUMN-LABEL`, `COLUMN-MOVABLE`, `COLUMN-OF`, `COLUMN-PFCOLOR`, `COLUMN-READ-ONLY`, `COLUMN-RESIZABLE`, `COLUMN-SCROLLING`, `COLUMNS`, `COM-HANDLE`, `COM-SELF`, `COMBO-BOX`, `COMMAND`, `COMPARES`, `COMPILE`, `COMPILER`, `COMPLETE`, `CONFIG-NAME`, `CONNECT`, `CONNECTED`, `CONSTRUCTOR`, `CONTAINS`, `CONTENTS`, `CONTEXT`, `CONTEXT-HELP`, `CONTEXT-HELP-FILE`, `CONTEXT-HELP-ID`, `CONTEXT-POPUP`, `CONTROL`, `CONTROL-BOX`, `CONTROL-FRAME`, `CONVERT`, `CONVERT-3D-COLORS`, `CONVERT-TO-OFFS`, `CONVERT-TO-OFFSE`, `CONVERT-TO-OFFSET`, `COPY-DATASET`, `COPY-LOB`, `COPY-SAX-ATTRIBUTES`, `COPY-TEMP-TABLE`, `COUNT`, `COUNT-OF`, `CPCASE`, `CPCOLL`, `CPINTERNAL`, `CPLOG`, `CPPRINT`, `CPRCODEIN`, `CPRCODEOUT`, `CPSTREAM`, `CPTERM`, `CRC-VALUE`, `CREATE`, `CREATE-LIKE`, `CREATE-LIKE-SEQUENTIAL`, `CREATE-NODE-NAMESPACE`, `CREATE-RESULT-LIST-ENTRY`, `CREATE-TEST-FILE`, `CURRENT`, `CURRENT-CHANGED`, `CURRENT-COLUMN`, `CURRENT-ENV`, `CURRENT-ENVI`, `CURRENT-ENVIR`, `CURRENT-ENVIRO`, `CURRENT-ENVIRON`, `CURRENT-ENVIRONM`, `CURRENT-ENVIRONME`, `CURRENT-ENVIRONMEN`, `CURRENT-ENVIRONMENT`, `CURRENT-ITERATION`, `CURRENT-LANG`, `CURRENT-LANGU`, `CURRENT-LANGUA`, `CURRENT-LANGUAG`, `CURRENT-LANGUAGE`, `CURRENT-QUERY`, `CURRENT-REQUEST-INFO`, `CURRENT-RESPONSE-INFO`, `CURRENT-RESULT-ROW`, `CURRENT-ROW-MODIFIED`, `CURRENT-VALUE`, `CURRENT-WINDOW`, `CURRENT_DATE`, `CURS`, `CURSO`, `CURSOR`, `CURSOR-CHAR`, `CURSOR-LINE`, `CURSOR-OFFSET`, `DATA-BIND`, `DATA-ENTRY-RET`, `DATA-ENTRY-RETU`, `DATA-ENTRY-RETUR`, `DATA-ENTRY-RETURN`, `DATA-REL`, `DATA-RELA`, `DATA-RELAT`, `DATA-RELATI`, `DATA-RELATIO`, `DATA-RELATION`, `DATA-SOURCE`, `DATA-SOURCE-COMPLETE-MAP`, `DATA-SOURCE-MODIFIED`, `DATA-SOURCE-ROWID`, `DATA-T`, `DATA-TY`, `DATA-TYP`, `DATA-TYPE`, `DATABASE`, `DATASERVERS`, `DATASET`, `DATASET-HANDLE`, `DATE`, `DATE-F`, `DATE-FO`, `DATE-FOR`, `DATE-FORM`, `DATE-FORMA`, `DATE-FORMAT`, `DAY`, `DB-CONTEXT`, `DB-REFERENCES`, `DBCODEPAGE`, `DBCOLLATION`, `DBNAME`, `DBPARAM`, `DBREST`, `DBRESTR`, `DBRESTRI`, `DBRESTRIC`, `DBRESTRICT`, `DBRESTRICTI`, `DBRESTRICTIO`, `DBRESTRICTION`, `DBRESTRICTIONS`, `DBTASKID`, `DBTYPE`, `DBVERS`, `DBVERSI`, `DBVERSIO`, `DBVERSION`, `DCOLOR`, `DDE`, `DDE-ERROR`, `DDE-I`, `DDE-ID`, `DDE-ITEM`, `DDE-NAME`, `DDE-TOPIC`, `DEBLANK`, `DEBU`, `DEBUG`, `DEBUG-ALERT`, `DEBUG-LIST`, `DEBUGGER`, `DECIMAL`, `DECIMALS`, `DECLARE`, `DECLARE-NAMESPACE`, `DECRYPT`, `DEFAULT`, `DEFAULT-B`, `DEFAULT-BU`, `DEFAULT-BUFFER-HANDLE`, `DEFAULT-BUT`, `DEFAULT-BUTT`, `DEFAULT-BUTTO`, `DEFAULT-BUTTON`, `DEFAULT-COMMIT`, `DEFAULT-EX`, `DEFAULT-EXT`, `DEFAULT-EXTE`, `DEFAULT-EXTEN`, `DEFAULT-EXTENS`, `DEFAULT-EXTENSI`, `DEFAULT-EXTENSIO`, `DEFAULT-EXTENSION`, `DEFAULT-NOXL`, `DEFAULT-NOXLA`, `DEFAULT-NOXLAT`, `DEFAULT-NOXLATE`, `DEFAULT-VALUE`, `DEFAULT-WINDOW`, `DEFINE`, `DEFINE-USER-EVENT-MANAGER`, `DEFINED`, `DEL`, `DELE`, `DELEGATE`, `DELET`, `DELETE PROCEDURE`, `DELETE`, `DELETE-CHAR`, `DELETE-CHARA`, `DELETE-CHARAC`, `DELETE-CHARACT`, `DELETE-CHARACTE`, `DELETE-CHARACTER`, `DELETE-CURRENT-ROW`, `DELETE-LINE`, `DELETE-RESULT-LIST-ENTRY`, `DELETE-SELECTED-ROW`, `DELETE-SELECTED-ROWS`, `DELIMITER`, `DESC`, `DESCE`, `DESCEN`, `DESCEND`, `DESCENDI`, `DESCENDIN`, `DESCENDING`, `DESELECT-FOCUSED-ROW`, `DESELECT-ROWS`, `DESELECT-SELECTED-ROW`, `DESELECTION`, `DESTRUCTOR`, `DIALOG-BOX`, `DICT`, `DICTI`, `DICTIO`, `DICTION`, `DICTIONA`, `DICTIONAR`, `DICTIONARY`, `DIR`, `DISABLE`, `DISABLE-AUTO-ZAP`, `DISABLE-DUMP-TRIGGERS`, `DISABLE-LOAD-TRIGGERS`, `DISABLED`, `DISCON`, `DISCONN`, `DISCONNE`, `DISCONNEC`, `DISCONNECT`, `DISP`, `DISPL`, `DISPLA`, `DISPLAY`, `DISPLAY-MESSAGE`, `DISPLAY-T`, `DISPLAY-TY`, `DISPLAY-TYP`, `DISPLAY-TYPE`, `DISTINCT`, `DO`, `DOMAIN-DESCRIPTION`, `DOMAIN-NAME`, `DOMAIN-TYPE`, `DOS`, `DOUBLE`, `DOWN`, `DRAG-ENABLED`, `DROP`, `DROP-DOWN`, `DROP-DOWN-LIST`, `DROP-FILE-NOTIFY`, `DROP-TARGET`, `DS-CLOSE-CURSOR`, `DSLOG-MANAGER`, `DUMP`, `DYNAMIC`, `DYNAMIC-ENUM`, `DYNAMIC-FUNCTION`, `DYNAMIC-INVOKE`, `EACH`, `ECHO`, `EDGE`, `EDGE-`, `EDGE-C`, `EDGE-CH`, `EDGE-CHA`, `EDGE-CHAR`, `EDGE-CHARS`, `EDGE-P`, `EDGE-PI`, `EDGE-PIX`, `EDGE-PIXE`, `EDGE-PIXEL`, `EDGE-PIXELS`, `EDIT-CAN-PASTE`, `EDIT-CAN-UNDO`, `EDIT-CLEAR`, `EDIT-COPY`, `EDIT-CUT`, `EDIT-PASTE`, `EDIT-UNDO`, `EDITING`, `EDITOR`, `ELSE`, `EMPTY`, `EMPTY-TEMP-TABLE`, `ENABLE`, `ENABLED-FIELDS`, `ENCODE`, `ENCRYPT`, `ENCRYPT-AUDIT-MAC-KEY`, `ENCRYPTION-SALT`, `END`, `END-DOCUMENT`, `END-ELEMENT`, `END-EVENT-GROUP`, `END-FILE-DROP`, `END-KEY`, `END-MOVE`, `END-RESIZE`, `END-ROW-RESIZE`, `END-USER-PROMPT`, `ENDKEY`, `ENTERED`, `ENTITY-EXPANSION-LIMIT`, `ENTRY`, `ENUM`, `EQ`, `ERROR`, `ERROR-COL`, `ERROR-COLU`, `ERROR-COLUM`, `ERROR-COLUMN`, `ERROR-ROW`, `ERROR-STACK-TRACE`, `ERROR-STAT`, `ERROR-STATU`, `ERROR-STATUS`, `ESCAPE`, `ETIME`, `EVENT`, `EVENT-GROUP-ID`, `EVENT-PROCEDURE`, `EVENT-PROCEDURE-CONTEXT`, `EVENT-T`, `EVENT-TY`, `EVENT-TYP`, `EVENT-TYPE`, `EVENTS`, `EXCEPT`, `EXCLUSIVE`, `EXCLUSIVE-`, `EXCLUSIVE-ID`, `EXCLUSIVE-L`, `EXCLUSIVE-LO`, `EXCLUSIVE-LOC`, `EXCLUSIVE-LOCK`, `EXCLUSIVE-WEB-USER`, `EXECUTE`, `EXISTS`, `EXP`, `EXPAND`, `EXPANDABLE`, `EXPLICIT`, `EXPORT`, `EXPORT-PRINCIPAL`, `EXTENDED`, `EXTENT`, `EXTERNAL`, `FALSE`, `FETCH`, `FETCH-SELECTED-ROW`, `FGC`, `FGCO`, `FGCOL`, `FGCOLO`, `FGCOLOR`, `FIELD`, `FIELDS`, `FILE`, `FILE-CREATE-DATE`, `FILE-CREATE-TIME`, `FILE-INFO`, `FILE-INFOR`, `FILE-INFORM`, `FILE-INFORMA`, `FILE-INFORMAT`, `FILE-INFORMATI`, `FILE-INFORMATIO`, `FILE-INFORMATION`, `FILE-MOD-DATE`, `FILE-MOD-TIME`, `FILE-NAME`, `FILE-OFF`, `FILE-OFFS`, `FILE-OFFSE`, `FILE-OFFSET`, `FILE-SIZE`, `FILE-TYPE`, `FILENAME`, `FILL`, `FILL-IN`, `FILLED`, `FILTERS`, `FINAL`, `FINALLY`, `FIND`, `FIND-BY-ROWID`, `FIND-CASE-SENSITIVE`, `FIND-CURRENT`, `FIND-FIRST`, `FIND-GLOBAL`, `FIND-LAST`, `FIND-NEXT-OCCURRENCE`, `FIND-PREV-OCCURRENCE`, `FIND-SELECT`, `FIND-UNIQUE`, `FIND-WRAP-AROUND`, `FINDER`, `FIRST`, `FIRST-ASYNCH-REQUEST`, `FIRST-CHILD`, `FIRST-COLUMN`, `FIRST-FORM`, `FIRST-OBJECT`, `FIRST-OF`, `FIRST-PROC`, `FIRST-PROCE`, `FIRST-PROCED`, `FIRST-PROCEDU`, `FIRST-PROCEDUR`, `FIRST-PROCEDURE`, `FIRST-SERVER`, `FIRST-TAB-I`, `FIRST-TAB-IT`, `FIRST-TAB-ITE`, `FIRST-TAB-ITEM`, `FIT-LAST-COLUMN`, `FIXED-ONLY`, `FLAT-BUTTON`, `FLOAT`, `FOCUS`, `FOCUSED-ROW`, `FOCUSED-ROW-SELECTED`, `FONT`, `FONT-TABLE`, `FOR`, `FORCE-FILE`, `FORE`, `FOREG`, `FOREGR`, `FOREGRO`, `FOREGROU`, `FOREGROUN`, `FOREGROUND`, `FORM INPUT`, `FORM`, `FORM-LONG-INPUT`, `FORMA`, `FORMAT`, `FORMATTE`, `FORMATTED`, `FORWARD`, `FORWARDS`, `FRAGMEN`, `FRAGMENT`, `FRAM`, `FRAME`, `FRAME-COL`, `FRAME-DB`, `FRAME-DOWN`, `FRAME-FIELD`, `FRAME-FILE`, `FRAME-INDE`, `FRAME-INDEX`, `FRAME-LINE`, `FRAME-NAME`, `FRAME-ROW`, `FRAME-SPA`, `FRAME-SPAC`, `FRAME-SPACI`, `FRAME-SPACIN`, `FRAME-SPACING`, `FRAME-VAL`, `FRAME-VALU`, `FRAME-VALUE`, `FRAME-X`, `FRAME-Y`, `FREQUENCY`, `FROM`, `FROM-C`, `FROM-CH`, `FROM-CHA`, `FROM-CHAR`, `FROM-CHARS`, `FROM-CUR`, `FROM-CURR`, `FROM-CURRE`, `FROM-CURREN`, `FROM-CURRENT`, `FROM-P`, `FROM-PI`, `FROM-PIX`, `FROM-PIXE`, `FROM-PIXEL`, `FROM-PIXELS`, `FULL-HEIGHT`, `FULL-HEIGHT-`, `FULL-HEIGHT-C`, `FULL-HEIGHT-CH`, `FULL-HEIGHT-CHA`, `FULL-HEIGHT-CHAR`, `FULL-HEIGHT-CHARS`, `FULL-HEIGHT-P`, `FULL-HEIGHT-PI`, `FULL-HEIGHT-PIX`, `FULL-HEIGHT-PIXE`, `FULL-HEIGHT-PIXEL`, `FULL-HEIGHT-PIXELS`, `FULL-PATHN`, `FULL-PATHNA`, `FULL-PATHNAM`, `FULL-PATHNAME`, `FULL-WIDTH`, `FULL-WIDTH-`, `FULL-WIDTH-C`, `FULL-WIDTH-CH`, `FULL-WIDTH-CHA`, `FULL-WIDTH-CHAR`, `FULL-WIDTH-CHARS`, `FULL-WIDTH-P`, `FULL-WIDTH-PI`, `FULL-WIDTH-PIX`, `FULL-WIDTH-PIXE`, `FULL-WIDTH-PIXEL`, `FULL-WIDTH-PIXELS`, `FUNCTION`, `FUNCTION-CALL-TYPE`, `GATEWAY`, `GATEWAYS`, `GE`, `GENERATE-MD5`, `GENERATE-PBE-KEY`, `GENERATE-PBE-SALT`, `GENERATE-RANDOM-KEY`, `GENERATE-UUID`, `GET`, `GET-ATTR-CALL-TYPE`, `GET-ATTRIBUTE-NODE`, `GET-BINARY-DATA`, `GET-BLUE`, `GET-BLUE-`, `GET-BLUE-V`, `GET-BLUE-VA`, `GET-BLUE-VAL`, `GET-BLUE-VALU`, `GET-BLUE-VALUE`, `GET-BROWSE-COLUMN`, `GET-BUFFER-HANDLE`, `GET-BYTE`, `GET-CALLBACK-PROC-CONTEXT`, `GET-CALLBACK-PROC-NAME`, `GET-CGI-LIST`, `GET-CGI-LONG-VALUE`, `GET-CGI-VALUE`, `GET-CLASS`, `GET-CODEPAGES`, `GET-COLLATIONS`, `GET-CONFIG-VALUE`, `GET-CURRENT`, `GET-DOUBLE`, `GET-DROPPED-FILE`, `GET-DYNAMIC`, `GET-ERROR-COLUMN`, `GET-ERROR-ROW`, `GET-FILE`, `GET-FILE-NAME`, `GET-FILE-OFFSE`, `GET-FILE-OFFSET`, `GET-FIRST`, `GET-FLOAT`, `GET-GREEN`, `GET-GREEN-`, `GET-GREEN-V`, `GET-GREEN-VA`, `GET-GREEN-VAL`, `GET-GREEN-VALU`, `GET-GREEN-VALUE`, `GET-INDEX-BY-NAMESPACE-NAME`, `GET-INDEX-BY-QNAME`, `GET-INT64`, `GET-ITERATION`, `GET-KEY-VAL`, `GET-KEY-VALU`, `GET-KEY-VALUE`, `GET-LAST`, `GET-LOCALNAME-BY-INDEX`, `GET-LONG`, `GET-MESSAGE`, `GET-NEXT`, `GET-NUMBER`, `GET-POINTER-VALUE`, `GET-PREV`, `GET-PRINTERS`, `GET-PROPERTY`, `GET-QNAME-BY-INDEX`, `GET-RED`, `GET-RED-`, `GET-RED-V`, `GET-RED-VA`, `GET-RED-VAL`, `GET-RED-VALU`, `GET-RED-VALUE`, `GET-REPOSITIONED-ROW`, `GET-RGB-VALUE`, `GET-SELECTED`, `GET-SELECTED-`, `GET-SELECTED-W`, `GET-SELECTED-WI`, `GET-SELECTED-WID`, `GET-SELECTED-WIDG`, `GET-SELECTED-WIDGE`, `GET-SELECTED-WIDGET`, `GET-SHORT`, `GET-SIGNATURE`, `GET-SIZE`, `GET-STRING`, `GET-TAB-ITEM`, `GET-TEXT-HEIGHT`, `GET-TEXT-HEIGHT-`, `GET-TEXT-HEIGHT-C`, `GET-TEXT-HEIGHT-CH`, `GET-TEXT-HEIGHT-CHA`, `GET-TEXT-HEIGHT-CHAR`, `GET-TEXT-HEIGHT-CHARS`, `GET-TEXT-HEIGHT-P`, `GET-TEXT-HEIGHT-PI`, `GET-TEXT-HEIGHT-PIX`, `GET-TEXT-HEIGHT-PIXE`, `GET-TEXT-HEIGHT-PIXEL`, `GET-TEXT-HEIGHT-PIXELS`, `GET-TEXT-WIDTH`, `GET-TEXT-WIDTH-`, `GET-TEXT-WIDTH-C`, `GET-TEXT-WIDTH-CH`, `GET-TEXT-WIDTH-CHA`, `GET-TEXT-WIDTH-CHAR`, `GET-TEXT-WIDTH-CHARS`, `GET-TEXT-WIDTH-P`, `GET-TEXT-WIDTH-PI`, `GET-TEXT-WIDTH-PIX`, `GET-TEXT-WIDTH-PIXE`, `GET-TEXT-WIDTH-PIXEL`, `GET-TEXT-WIDTH-PIXELS`, `GET-TYPE-BY-INDEX`, `GET-TYPE-BY-NAMESPACE-NAME`, `GET-TYPE-BY-QNAME`, `GET-UNSIGNED-LONG`, `GET-UNSIGNED-SHORT`, `GET-URI-BY-INDEX`, `GET-VALUE-BY-INDEX`, `GET-VALUE-BY-NAMESPACE-NAME`, `GET-VALUE-BY-QNAME`, `GET-WAIT-STATE`, `GETBYTE`, `GLOBAL`, `GO-ON`, `GO-PEND`, `GO-PENDI`, `GO-PENDIN`, `GO-PENDING`, `GRANT`, `GRAPHIC-E`, `GRAPHIC-ED`, `GRAPHIC-EDG`, `GRAPHIC-EDGE`, `GRID-FACTOR-H`, `GRID-FACTOR-HO`, `GRID-FACTOR-HOR`, `GRID-FACTOR-HORI`, `GRID-FACTOR-HORIZ`, `GRID-FACTOR-HORIZO`, `GRID-FACTOR-HORIZON`, `GRID-FACTOR-HORIZONT`, `GRID-FACTOR-HORIZONTA`, `GRID-FACTOR-HORIZONTAL`, `GRID-FACTOR-V`, `GRID-FACTOR-VE`, `GRID-FACTOR-VER`, `GRID-FACTOR-VERT`, `GRID-FACTOR-VERTI`, `GRID-FACTOR-VERTIC`, `GRID-FACTOR-VERTICA`, `GRID-FACTOR-VERTICAL`, `GRID-SNAP`, `GRID-UNIT-HEIGHT`, `GRID-UNIT-HEIGHT-`, `GRID-UNIT-HEIGHT-C`, `GRID-UNIT-HEIGHT-CH`, `GRID-UNIT-HEIGHT-CHA`, `GRID-UNIT-HEIGHT-CHARS`, `GRID-UNIT-HEIGHT-P`, `GRID-UNIT-HEIGHT-PI`, `GRID-UNIT-HEIGHT-PIX`, `GRID-UNIT-HEIGHT-PIXE`, `GRID-UNIT-HEIGHT-PIXEL`, `GRID-UNIT-HEIGHT-PIXELS`, `GRID-UNIT-WIDTH`, `GRID-UNIT-WIDTH-`, `GRID-UNIT-WIDTH-C`, `GRID-UNIT-WIDTH-CH`, `GRID-UNIT-WIDTH-CHA`, `GRID-UNIT-WIDTH-CHAR`, `GRID-UNIT-WIDTH-CHARS`, `GRID-UNIT-WIDTH-P`, `GRID-UNIT-WIDTH-PI`, `GRID-UNIT-WIDTH-PIX`, `GRID-UNIT-WIDTH-PIXE`, `GRID-UNIT-WIDTH-PIXEL`, `GRID-UNIT-WIDTH-PIXELS`, `GRID-VISIBLE`, `GROUP`, `GT`, `GUID`, `HANDLE`, `HANDLER`, `HAS-RECORDS`, `HAVING`, `HEADER`, `HEIGHT`, `HEIGHT-`, `HEIGHT-C`, `HEIGHT-CH`, `HEIGHT-CHA`, `HEIGHT-CHAR`, `HEIGHT-CHARS`, `HEIGHT-P`, `HEIGHT-PI`, `HEIGHT-PIX`, `HEIGHT-PIXE`, `HEIGHT-PIXEL`, `HEIGHT-PIXELS`, `HELP`, `HEX-DECODE`, `HEX-ENCODE`, `HIDDEN`, `HIDE`, `HORI`, `HORIZ`, `HORIZO`, `HORIZON`, `HORIZONT`, `HORIZONTA`, `HORIZONTAL`, `HOST-BYTE-ORDER`, `HTML-CHARSET`, `HTML-END-OF-LINE`, `HTML-END-OF-PAGE`, `HTML-FRAME-BEGIN`, `HTML-FRAME-END`, `HTML-HEADER-BEGIN`, `HTML-HEADER-END`, `HTML-TITLE-BEGIN`, `HTML-TITLE-END`, `HWND`, `ICON`, `IF`, `IMAGE`, `IMAGE-DOWN`, `IMAGE-INSENSITIVE`, `IMAGE-SIZE`, `IMAGE-SIZE-C`, `IMAGE-SIZE-CH`, `IMAGE-SIZE-CHA`, `IMAGE-SIZE-CHAR`, `IMAGE-SIZE-CHARS`, `IMAGE-SIZE-P`, `IMAGE-SIZE-PI`, `IMAGE-SIZE-PIX`, `IMAGE-SIZE-PIXE`, `IMAGE-SIZE-PIXEL`, `IMAGE-SIZE-PIXELS`, `IMAGE-UP`, `IMMEDIATE-DISPLAY`, `IMPLEMENTS`, `IMPORT`, `IMPORT-PRINCIPAL`, `IN`, `IN-HANDLE`, `INCREMENT-EXCLUSIVE-ID`, `INDEX`, `INDEX-HINT`, `INDEX-INFORMATION`, `INDEXED-REPOSITION`, `INDICATOR`, `INFO`, `INFOR`, `INFORM`, `INFORMA`, `INFORMAT`, `INFORMATI`, `INFORMATIO`, `INFORMATION`, `INHERIT-BGC`, `INHERIT-BGCO`, `INHERIT-BGCOL`, `INHERIT-BGCOLO`, `INHERIT-BGCOLOR`, `INHERIT-FGC`, `INHERIT-FGCO`, `INHERIT-FGCOL`, `INHERIT-FGCOLO`, `INHERIT-FGCOLOR`, `INHERITS`, `INIT`, `INITI`, `INITIA`, `INITIAL`, `INITIAL-DIR`, `INITIAL-FILTER`, `INITIALIZE-DOCUMENT-TYPE`, `INITIATE`, `INNER-CHARS`, `INNER-LINES`, `INPUT`, `INPUT-O`, `INPUT-OU`, `INPUT-OUT`, `INPUT-OUTP`, `INPUT-OUTPU`, `INPUT-OUTPUT`, `INPUT-VALUE`, `INSERT`, `INSERT-ATTRIBUTE`, `INSERT-B`, `INSERT-BA`, `INSERT-BAC`, `INSERT-BACK`, `INSERT-BACKT`, `INSERT-BACKTA`, `INSERT-BACKTAB`, `INSERT-FILE`, `INSERT-ROW`, `INSERT-STRING`, `INSERT-T`, `INSERT-TA`, `INSERT-TAB`, `INT64`, `INT`, `INTEGER`, `INTERFACE`, `INTERNAL-ENTRIES`, `INTO`, `INVOKE`, `IS`, `IS-ATTR`, `IS-ATTR-`, `IS-ATTR-S`, `IS-ATTR-SP`, `IS-ATTR-SPA`, `IS-ATTR-SPAC`, `IS-ATTR-SPACE`, `IS-CLASS`, `IS-JSON`, `IS-LEAD-BYTE`, `IS-OPEN`, `IS-PARAMETER-SET`, `IS-PARTITIONED`, `IS-ROW-SELECTED`, `IS-SELECTED`, `IS-XML`, `ITEM`, `ITEMS-PER-ROW`, `JOIN`, `JOIN-BY-SQLDB`, `KBLABEL`, `KEEP-CONNECTION-OPEN`, `KEEP-FRAME-Z`, `KEEP-FRAME-Z-`, `KEEP-FRAME-Z-O`, `KEEP-FRAME-Z-OR`, `KEEP-FRAME-Z-ORD`, `KEEP-FRAME-Z-ORDE`, `KEEP-FRAME-Z-ORDER`, `KEEP-MESSAGES`, `KEEP-SECURITY-CACHE`, `KEEP-TAB-ORDER`, `KEY`, `KEY-CODE`, `KEY-FUNC`, `KEY-FUNCT`, `KEY-FUNCTI`, `KEY-FUNCTIO`, `KEY-FUNCTION`, `KEY-LABEL`, `KEYCODE`, `KEYFUNC`, `KEYFUNCT`, `KEYFUNCTI`, `KEYFUNCTIO`, `KEYFUNCTION`, `KEYLABEL`, `KEYS`, `KEYWORD`, `KEYWORD-ALL`, `LABEL`, `LABEL-BGC`, `LABEL-BGCO`, `LABEL-BGCOL`, `LABEL-BGCOLO`, `LABEL-BGCOLOR`, `LABEL-DC`, `LABEL-DCO`, `LABEL-DCOL`, `LABEL-DCOLO`, `LABEL-DCOLOR`, `LABEL-FGC`, `LABEL-FGCO`, `LABEL-FGCOL`, `LABEL-FGCOLO`, `LABEL-FGCOLOR`, `LABEL-FONT`, `LABEL-PFC`, `LABEL-PFCO`, `LABEL-PFCOL`, `LABEL-PFCOLO`, `LABEL-PFCOLOR`, `LABELS`, `LABELS-HAVE-COLONS`, `LANDSCAPE`, `LANGUAGE`, `LANGUAGES`, `LARGE`, `LARGE-TO-SMALL`, `LAST`, `LAST-ASYNCH-REQUEST`, `LAST-BATCH`, `LAST-CHILD`, `LAST-EVEN`, `LAST-EVENT`, `LAST-FORM`, `LAST-KEY`, `LAST-OBJECT`, `LAST-OF`, `LAST-PROCE`, `LAST-PROCED`, `LAST-PROCEDU`, `LAST-PROCEDUR`, `LAST-PROCEDURE`, `LAST-SERVER`, `LAST-TAB-I`, `LAST-TAB-IT`, `LAST-TAB-ITE`, `LAST-TAB-ITEM`, `LASTKEY`, `LC`, `LDBNAME`, `LE`, `LEAVE`, `LEFT-ALIGN`, `LEFT-ALIGNE`, `LEFT-ALIGNED`, `LEFT-TRIM`, `LENGTH`, `LIBRARY`, `LIKE`, `LIKE-SEQUENTIAL`, `LINE`, `LINE-COUNT`, `LINE-COUNTE`, `LINE-COUNTER`, `LIST-EVENTS`, `LIST-ITEM-PAIRS`, `LIST-ITEMS`, `LIST-PROPERTY-NAMES`, `LIST-QUERY-ATTRS`, `LIST-SET-ATTRS`, `LIST-WIDGETS`, `LISTI`, `LISTIN`, `LISTING`, `LITERAL-QUESTION`, `LITTLE-ENDIAN`, `LOAD`, `LOAD-DOMAINS`, `LOAD-ICON`, `LOAD-IMAGE`, `LOAD-IMAGE-DOWN`, `LOAD-IMAGE-INSENSITIVE`, `LOAD-IMAGE-UP`, `LOAD-MOUSE-P`, `LOAD-MOUSE-PO`, `LOAD-MOUSE-POI`, `LOAD-MOUSE-POIN`, `LOAD-MOUSE-POINT`, `LOAD-MOUSE-POINTE`, `LOAD-MOUSE-POINTER`, `LOAD-PICTURE`, `LOAD-SMALL-ICON`, `LOCAL-NAME`, `LOCAL-VERSION-INFO`, `LOCATOR-COLUMN-NUMBER`, `LOCATOR-LINE-NUMBER`, `LOCATOR-PUBLIC-ID`, `LOCATOR-SYSTEM-ID`, `LOCATOR-TYPE`, `LOCK-REGISTRATION`, `LOCKED`, `LOG`, `LOG-AUDIT-EVENT`, `LOG-MANAGER`, `LOGICAL`, `LOGIN-EXPIRATION-TIMESTAMP`, `LOGIN-HOST`, `LOGIN-STATE`, `LOGOUT`, `LONGCHAR`, `LOOKAHEAD`, `LOOKUP`, `LT`, `MACHINE-CLASS`, `MANDATORY`, `MANUAL-HIGHLIGHT`, `MAP`, `MARGIN-EXTRA`, `MARGIN-HEIGHT`, `MARGIN-HEIGHT-`, `MARGIN-HEIGHT-C`, `MARGIN-HEIGHT-CH`, `MARGIN-HEIGHT-CHA`, `MARGIN-HEIGHT-CHAR`, `MARGIN-HEIGHT-CHARS`, `MARGIN-HEIGHT-P`, `MARGIN-HEIGHT-PI`, `MARGIN-HEIGHT-PIX`, `MARGIN-HEIGHT-PIXE`, `MARGIN-HEIGHT-PIXEL`, `MARGIN-HEIGHT-PIXELS`, `MARGIN-WIDTH`, `MARGIN-WIDTH-`, `MARGIN-WIDTH-C`, `MARGIN-WIDTH-CH`, `MARGIN-WIDTH-CHA`, `MARGIN-WIDTH-CHAR`, `MARGIN-WIDTH-CHARS`, `MARGIN-WIDTH-P`, `MARGIN-WIDTH-PI`, `MARGIN-WIDTH-PIX`, `MARGIN-WIDTH-PIXE`, `MARGIN-WIDTH-PIXEL`, `MARGIN-WIDTH-PIXELS`, `MARK-NEW`, `MARK-ROW-STATE`, `MATCHES`, `MAX`, `MAX-BUTTON`, `MAX-CHARS`, `MAX-DATA-GUESS`, `MAX-HEIGHT`, `MAX-HEIGHT-C`, `MAX-HEIGHT-CH`, `MAX-HEIGHT-CHA`, `MAX-HEIGHT-CHAR`, `MAX-HEIGHT-CHARS`, `MAX-HEIGHT-P`, `MAX-HEIGHT-PI`, `MAX-HEIGHT-PIX`, `MAX-HEIGHT-PIXE`, `MAX-HEIGHT-PIXEL`, `MAX-HEIGHT-PIXELS`, `MAX-ROWS`, `MAX-SIZE`, `MAX-VAL`, `MAX-VALU`, `MAX-VALUE`, `MAX-WIDTH`, `MAX-WIDTH-`, `MAX-WIDTH-C`, `MAX-WIDTH-CH`, `MAX-WIDTH-CHA`, `MAX-WIDTH-CHAR`, `MAX-WIDTH-CHARS`, `MAX-WIDTH-P`, `MAX-WIDTH-PI`, `MAX-WIDTH-PIX`, `MAX-WIDTH-PIXE`, `MAX-WIDTH-PIXEL`, `MAX-WIDTH-PIXELS`, `MAXI`, `MAXIM`, `MAXIMIZE`, `MAXIMU`, `MAXIMUM`, `MAXIMUM-LEVEL`, `MD5-DIGEST`, `MEMBER`, `MEMPTR-TO-NODE-VALUE`, `MENU`, `MENU-BAR`, `MENU-ITEM`, `MENU-K`, `MENU-KE`, `MENU-KEY`, `MENU-M`, `MENU-MO`, `MENU-MOU`, `MENU-MOUS`, `MENU-MOUSE`, `MENUBAR`, `MERGE-BY-FIELD`, `MESSAGE`, `MESSAGE-AREA`, `MESSAGE-AREA-FONT`, `MESSAGE-LINES`, `METHOD`, `MIN`, `MIN-BUTTON`, `MIN-COLUMN-WIDTH-C`, `MIN-COLUMN-WIDTH-CH`, `MIN-COLUMN-WIDTH-CHA`, `MIN-COLUMN-WIDTH-CHAR`, `MIN-COLUMN-WIDTH-CHARS`, `MIN-COLUMN-WIDTH-P`, `MIN-COLUMN-WIDTH-PI`, `MIN-COLUMN-WIDTH-PIX`, `MIN-COLUMN-WIDTH-PIXE`, `MIN-COLUMN-WIDTH-PIXEL`, `MIN-COLUMN-WIDTH-PIXELS`, `MIN-HEIGHT`, `MIN-HEIGHT-`, `MIN-HEIGHT-C`, `MIN-HEIGHT-CH`, `MIN-HEIGHT-CHA`, `MIN-HEIGHT-CHAR`, `MIN-HEIGHT-CHARS`, `MIN-HEIGHT-P`, `MIN-HEIGHT-PI`, `MIN-HEIGHT-PIX`, `MIN-HEIGHT-PIXE`, `MIN-HEIGHT-PIXEL`, `MIN-HEIGHT-PIXELS`, `MIN-SIZE`, `MIN-VAL`, `MIN-VALU`, `MIN-VALUE`, `MIN-WIDTH`, `MIN-WIDTH-`, `MIN-WIDTH-C`, `MIN-WIDTH-CH`, `MIN-WIDTH-CHA`, `MIN-WIDTH-CHAR`, `MIN-WIDTH-CHARS`, `MIN-WIDTH-P`, `MIN-WIDTH-PI`, `MIN-WIDTH-PIX`, `MIN-WIDTH-PIXE`, `MIN-WIDTH-PIXEL`, `MIN-WIDTH-PIXELS`, `MINI`, `MINIM`, `MINIMU`, `MINIMUM`, `MOD`, `MODIFIED`, `MODU`, `MODUL`, `MODULO`, `MONTH`, `MOUSE`, `MOUSE-P`, `MOUSE-PO`, `MOUSE-POI`, `MOUSE-POIN`, `MOUSE-POINT`, `MOUSE-POINTE`, `MOUSE-POINTER`, `MOVABLE`, `MOVE-AFTER`, `MOVE-AFTER-`, `MOVE-AFTER-T`, `MOVE-AFTER-TA`, `MOVE-AFTER-TAB`, `MOVE-AFTER-TAB-`, `MOVE-AFTER-TAB-I`, `MOVE-AFTER-TAB-IT`, `MOVE-AFTER-TAB-ITE`, `MOVE-AFTER-TAB-ITEM`, `MOVE-BEFOR`, `MOVE-BEFORE`, `MOVE-BEFORE-`, `MOVE-BEFORE-T`, `MOVE-BEFORE-TA`, `MOVE-BEFORE-TAB`, `MOVE-BEFORE-TAB-`, `MOVE-BEFORE-TAB-I`, `MOVE-BEFORE-TAB-IT`, `MOVE-BEFORE-TAB-ITE`, `MOVE-BEFORE-TAB-ITEM`, `MOVE-COL`, `MOVE-COLU`, `MOVE-COLUM`, `MOVE-COLUMN`, `MOVE-TO-B`, `MOVE-TO-BO`, `MOVE-TO-BOT`, `MOVE-TO-BOTT`, `MOVE-TO-BOTTO`, `MOVE-TO-BOTTOM`, `MOVE-TO-EOF`, `MOVE-TO-T`, `MOVE-TO-TO`, `MOVE-TO-TOP`, `MPE`, `MTIME`, `MULTI-COMPILE`, `MULTIPLE`, `MULTIPLE-KEY`, `MULTITASKING-INTERVAL`, `MUST-EXIST`, `NAME`, `NAMESPACE-PREFIX`, `NAMESPACE-URI`, `NATIVE`, `NE`, `NEEDS-APPSERVER-PROMPT`, `NEEDS-PROMPT`, `NEW`, `NEW-INSTANCE`, `NEW-ROW`, `NEXT`, `NEXT-COLUMN`, `NEXT-PROMPT`, `NEXT-ROWID`, `NEXT-SIBLING`, `NEXT-TAB-I`, `NEXT-TAB-IT`, `NEXT-TAB-ITE`, `NEXT-TAB-ITEM`, `NEXT-VALUE`, `NO`, `NO-APPLY`, `NO-ARRAY-MESSAGE`, `NO-ASSIGN`, `NO-ATTR`, `NO-ATTR-`, `NO-ATTR-L`, `NO-ATTR-LI`, `NO-ATTR-LIS`, `NO-ATTR-LIST`, `NO-ATTR-S`, `NO-ATTR-SP`, `NO-ATTR-SPA`, `NO-ATTR-SPAC`, `NO-ATTR-SPACE`, `NO-AUTO-VALIDATE`, `NO-BIND-WHERE`, `NO-BOX`, `NO-CONSOLE`, `NO-CONVERT`, `NO-CONVERT-3D-COLORS`, `NO-CURRENT-VALUE`, `NO-DEBUG`, `NO-DRAG`, `NO-ECHO`, `NO-EMPTY-SPACE`, `NO-ERROR`, `NO-F`, `NO-FI`, `NO-FIL`, `NO-FILL`, `NO-FOCUS`, `NO-HELP`, `NO-HIDE`, `NO-INDEX-HINT`, `NO-INHERIT-BGC`, `NO-INHERIT-BGCO`, `NO-INHERIT-BGCOLOR`, `NO-INHERIT-FGC`, `NO-INHERIT-FGCO`, `NO-INHERIT-FGCOL`, `NO-INHERIT-FGCOLO`, `NO-INHERIT-FGCOLOR`, `NO-JOIN-BY-SQLDB`, `NO-LABE`, `NO-LABELS`, `NO-LOBS`, `NO-LOCK`, `NO-LOOKAHEAD`, `NO-MAP`, `NO-MES`, `NO-MESS`, `NO-MESSA`, `NO-MESSAG`, `NO-MESSAGE`, `NO-PAUSE`, `NO-PREFE`, `NO-PREFET`, `NO-PREFETC`, `NO-PREFETCH`, `NO-ROW-MARKERS`, `NO-SCROLLBAR-VERTICAL`, `NO-SEPARATE-CONNECTION`, `NO-SEPARATORS`, `NO-TAB-STOP`, `NO-UND`, `NO-UNDE`, `NO-UNDER`, `NO-UNDERL`, `NO-UNDERLI`, `NO-UNDERLIN`, `NO-UNDERLINE`, `NO-UNDO`, `NO-VAL`, `NO-VALI`, `NO-VALID`, `NO-VALIDA`, `NO-VALIDAT`, `NO-VALIDATE`, `NO-WAIT`, `NO-WORD-WRAP`, `NODE-VALUE-TO-MEMPTR`, `NONAMESPACE-SCHEMA-LOCATION`, `NONE`, `NORMALIZE`, `NOT`, `NOT-ACTIVE`, `NOW`, `NULL`, `NUM-ALI`, `NUM-ALIA`, `NUM-ALIAS`, `NUM-ALIASE`, `NUM-ALIASES`, `NUM-BUFFERS`, `NUM-BUT`, `NUM-BUTT`, `NUM-BUTTO`, `NUM-BUTTON`, `NUM-BUTTONS`, `NUM-COL`, `NUM-COLU`, `NUM-COLUM`, `NUM-COLUMN`, `NUM-COLUMNS`, `NUM-COPIES`, `NUM-DBS`, `NUM-DROPPED-FILES`, `NUM-ENTRIES`, `NUM-FIELDS`, `NUM-FORMATS`, `NUM-ITEMS`, `NUM-ITERATIONS`, `NUM-LINES`, `NUM-LOCKED-COL`, `NUM-LOCKED-COLU`, `NUM-LOCKED-COLUM`, `NUM-LOCKED-COLUMN`, `NUM-LOCKED-COLUMNS`, `NUM-MESSAGES`, `NUM-PARAMETERS`, `NUM-REFERENCES`, `NUM-REPLACED`, `NUM-RESULTS`, `NUM-SELECTED`, `NUM-SELECTED-`, `NUM-SELECTED-ROWS`, `NUM-SELECTED-W`, `NUM-SELECTED-WI`, `NUM-SELECTED-WID`, `NUM-SELECTED-WIDG`, `NUM-SELECTED-WIDGE`, `NUM-SELECTED-WIDGET`, `NUM-SELECTED-WIDGETS`, `NUM-TABS`, `NUM-TO-RETAIN`, `NUM-VISIBLE-COLUMNS`, `NUMERIC`, `NUMERIC-F`, `NUMERIC-FO`, `NUMERIC-FOR`, `NUMERIC-FORM`, `NUMERIC-FORMA`, `NUMERIC-FORMAT`, `OCTET-LENGTH`, `OF`, `OFF`, `OK`, `OK-CANCEL`, `OLD`, `ON`, `ON-FRAME`, `ON-FRAME-`, `ON-FRAME-B`, `ON-FRAME-BO`, `ON-FRAME-BOR`, `ON-FRAME-BORD`, `ON-FRAME-BORDE`, `ON-FRAME-BORDER`, `OPEN`, `OPSYS`, `OPTION`, `OR`, `ORDERED-JOIN`, `ORDINAL`, `OS-APPEND`, `OS-COMMAND`, `OS-COPY`, `OS-CREATE-DIR`, `OS-DELETE`, `OS-DIR`, `OS-DRIVE`, `OS-DRIVES`, `OS-ERROR`, `OS-GETENV`, `OS-RENAME`, `OTHERWISE`, `OUTPUT`, `OVERLAY`, `OVERRIDE`, `OWNER`, `PAGE`, `PAGE-BOT`, `PAGE-BOTT`, `PAGE-BOTTO`, `PAGE-BOTTOM`, `PAGE-NUM`, `PAGE-NUMB`, `PAGE-NUMBE`, `PAGE-NUMBER`, `PAGE-SIZE`, `PAGE-TOP`, `PAGE-WID`, `PAGE-WIDT`, `PAGE-WIDTH`, `PAGED`, `PARAM`, `PARAME`, `PARAMET`, `PARAMETE`, `PARAMETER`, `PARENT`, `PARSE-STATUS`, `PARTIAL-KEY`, `PASCAL`, `PASSWORD-FIELD`, `PATHNAME`, `PAUSE`, `PBE-HASH-ALG`, `PBE-HASH-ALGO`, `PBE-HASH-ALGOR`, `PBE-HASH-ALGORI`, `PBE-HASH-ALGORIT`, `PBE-HASH-ALGORITH`, `PBE-HASH-ALGORITHM`, `PBE-KEY-ROUNDS`, `PDBNAME`, `PERSIST`, `PERSISTE`, `PERSISTEN`, `PERSISTENT`, `PERSISTENT-CACHE-DISABLED`, `PFC`, `PFCO`, `PFCOL`, `PFCOLO`, `PFCOLOR`, `PIXELS`, `PIXELS-PER-COL`, `PIXELS-PER-COLU`, `PIXELS-PER-COLUM`, `PIXELS-PER-COLUMN`, `PIXELS-PER-ROW`, `POPUP-M`, `POPUP-ME`, `POPUP-MEN`, `POPUP-MENU`, `POPUP-O`, `POPUP-ON`, `POPUP-ONL`, `POPUP-ONLY`, `PORTRAIT`, `POSITION`, `PRECISION`, `PREFER-DATASET`, `PREPARE-STRING`, `PREPARED`, `PREPROC`, `PREPROCE`, `PREPROCES`, `PREPROCESS`, `PRESEL`, `PRESELE`, `PRESELEC`, `PRESELECT`, `PREV`, `PREV-COLUMN`, `PREV-SIBLING`, `PREV-TAB-I`, `PREV-TAB-IT`, `PREV-TAB-ITE`, `PREV-TAB-ITEM`, `PRIMARY`, `PRINTER`, `PRINTER-CONTROL-HANDLE`, `PRINTER-HDC`, `PRINTER-NAME`, `PRINTER-PORT`, `PRINTER-SETUP`, `PRIVATE`, `PRIVATE-D`, `PRIVATE-DA`, `PRIVATE-DAT`, `PRIVATE-DATA`, `PRIVILEGES`, `PROC-HA`, `PROC-HAN`, `PROC-HAND`, `PROC-HANDL`, `PROC-HANDLE`, `PROC-ST`, `PROC-STA`, `PROC-STAT`, `PROC-STATU`, `PROC-STATUS`, `PROC-TEXT`, `PROC-TEXT-BUFFER`, `PROCE`, `PROCED`, `PROCEDU`, `PROCEDUR`, `PROCEDURE`, `PROCEDURE-CALL-TYPE`, `PROCEDURE-TYPE`, `PROCESS`, `PROFILER`, `PROGRAM-NAME`, `PROGRESS`, `PROGRESS-S`, `PROGRESS-SO`, `PROGRESS-SOU`, `PROGRESS-SOUR`, `PROGRESS-SOURC`, `PROGRESS-SOURCE`, `PROMPT`, `PROMPT-F`, `PROMPT-FO`, `PROMPT-FOR`, `PROMSGS`, `PROPATH`, `PROPERTY`, `PROTECTED`, `PROVERS`, `PROVERSI`, `PROVERSIO`, `PROVERSION`, `PROXY`, `PROXY-PASSWORD`, `PROXY-USERID`, `PUBLIC`, `PUBLIC-ID`, `PUBLISH`, `PUBLISHED-EVENTS`, `PUT`, `PUT-BYTE`, `PUT-DOUBLE`, `PUT-FLOAT`, `PUT-INT64`, `PUT-KEY-VAL`, `PUT-KEY-VALU`, `PUT-KEY-VALUE`, `PUT-LONG`, `PUT-SHORT`, `PUT-STRING`, `PUT-UNSIGNED-LONG`, `PUTBYTE`, `QUERY`, `QUERY-CLOSE`, `QUERY-OFF-END`, `QUERY-OPEN`, `QUERY-PREPARE`, `QUERY-TUNING`, `QUESTION`, `QUIT`, `QUOTER`, `R-INDEX`, `RADIO-BUTTONS`, `RADIO-SET`, `RANDOM`, `RAW`, `RAW-TRANSFER`, `RCODE-INFO`, `RCODE-INFOR`, `RCODE-INFORM`, `RCODE-INFORMA`, `RCODE-INFORMAT`, `RCODE-INFORMATI`, `RCODE-INFORMATIO`, `RCODE-INFORMATION`, `READ-AVAILABLE`, `READ-EXACT-NUM`, `READ-FILE`, `READ-JSON`, `READ-ONLY`, `READ-XML`, `READ-XMLSCHEMA`, `READKEY`, `REAL`, `RECID`, `RECORD-LENGTH`, `RECT`, `RECTA`, `RECTAN`, `RECTANG`, `RECTANGL`, `RECTANGLE`, `RECURSIVE`, `REFERENCE-ONLY`, `REFRESH`, `REFRESH-AUDIT-POLICY`, `REFRESHABLE`, `REGISTER-DOMAIN`, `RELEASE`, `REMOTE`, `REMOVE-EVENTS-PROCEDURE`, `REMOVE-SUPER-PROCEDURE`, `REPEAT`, `REPLACE`, `REPLACE-SELECTION-TEXT`, `REPOSITION`, `REPOSITION-BACKWARD`, `REPOSITION-FORWARD`, `REPOSITION-MODE`, `REPOSITION-TO-ROW`, `REPOSITION-TO-ROWID`, `REQUEST`, `REQUEST-INFO`, `RESET`, `RESIZA`, `RESIZAB`, `RESIZABL`, `RESIZABLE`, `RESIZE`, `RESPONSE-INFO`, `RESTART-ROW`, `RESTART-ROWID`, `RETAIN`, `RETAIN-SHAPE`, `RETRY`, `RETRY-CANCEL`, `RETURN`, `RETURN-ALIGN`, `RETURN-ALIGNE`, `RETURN-INS`, `RETURN-INSE`, `RETURN-INSER`, `RETURN-INSERT`, `RETURN-INSERTE`, `RETURN-INSERTED`, `RETURN-TO-START-DI`, `RETURN-TO-START-DIR`, `RETURN-VAL`, `RETURN-VALU`, `RETURN-VALUE`, `RETURN-VALUE-DATA-TYPE`, `RETURNS`, `REVERSE-FROM`, `REVERT`, `REVOKE`, `RGB-VALUE`, `RIGHT-ALIGNED`, `RIGHT-TRIM`, `ROLES`, `ROUND`, `ROUTINE-LEVEL`, `ROW`, `ROW-HEIGHT-CHARS`, `ROW-HEIGHT-PIXELS`, `ROW-MARKERS`, `ROW-OF`, `ROW-RESIZABLE`, `ROWID`, `RULE`, `RUN`, `RUN-PROCEDURE`, `SAVE CACHE`, `SAVE`, `SAVE-AS`, `SAVE-FILE`, `SAX-COMPLE`, `SAX-COMPLET`, `SAX-COMPLETE`, `SAX-PARSE`, `SAX-PARSE-FIRST`, `SAX-PARSE-NEXT`, `SAX-PARSER-ERROR`, `SAX-RUNNING`, `SAX-UNINITIALIZED`, `SAX-WRITE-BEGIN`, `SAX-WRITE-COMPLETE`, `SAX-WRITE-CONTENT`, `SAX-WRITE-ELEMENT`, `SAX-WRITE-ERROR`, `SAX-WRITE-IDLE`, `SAX-WRITE-TAG`, `SAX-WRITER`, `SCHEMA`, `SCHEMA-LOCATION`, `SCHEMA-MARSHAL`, `SCHEMA-PATH`, `SCREEN`, `SCREEN-IO`, `SCREEN-LINES`, `SCREEN-VAL`, `SCREEN-VALU`, `SCREEN-VALUE`, `SCROLL`, `SCROLL-BARS`, `SCROLL-DELTA`, `SCROLL-OFFSET`, `SCROLL-TO-CURRENT-ROW`, `SCROLL-TO-I`, `SCROLL-TO-IT`, `SCROLL-TO-ITE`, `SCROLL-TO-ITEM`, `SCROLL-TO-SELECTED-ROW`, `SCROLLABLE`, `SCROLLBAR-H`, `SCROLLBAR-HO`, `SCROLLBAR-HOR`, `SCROLLBAR-HORI`, `SCROLLBAR-HORIZ`, `SCROLLBAR-HORIZO`, `SCROLLBAR-HORIZON`, `SCROLLBAR-HORIZONT`, `SCROLLBAR-HORIZONTA`, `SCROLLBAR-HORIZONTAL`, `SCROLLBAR-V`, `SCROLLBAR-VE`, `SCROLLBAR-VER`, `SCROLLBAR-VERT`, `SCROLLBAR-VERTI`, `SCROLLBAR-VERTIC`, `SCROLLBAR-VERTICA`, `SCROLLBAR-VERTICAL`, `SCROLLED-ROW-POS`, `SCROLLED-ROW-POSI`, `SCROLLED-ROW-POSIT`, `SCROLLED-ROW-POSITI`, `SCROLLED-ROW-POSITIO`, `SCROLLED-ROW-POSITION`, `SCROLLING`, `SDBNAME`, `SEAL`, `SEAL-TIMESTAMP`, `SEARCH`, `SEARCH-SELF`, `SEARCH-TARGET`, `SECTION`, `SECURITY-POLICY`, `SEEK`, `SELECT`, `SELECT-ALL`, `SELECT-FOCUSED-ROW`, `SELECT-NEXT-ROW`, `SELECT-PREV-ROW`, `SELECT-ROW`, `SELECTABLE`, `SELECTED`, `SELECTION`, `SELECTION-END`, `SELECTION-LIST`, `SELECTION-START`, `SELECTION-TEXT`, `SELF`, `SEND`, `SEND-SQL-STATEMENT`, `SENSITIVE`, `SEPARATE-CONNECTION`, `SEPARATOR-FGCOLOR`, `SEPARATORS`, `SERIALIZABLE`, `SERIALIZE-HIDDEN`, `SERIALIZE-NAME`, `SERVER`, `SERVER-CONNECTION-BOUND`, `SERVER-CONNECTION-BOUND-REQUEST`, `SERVER-CONNECTION-CONTEXT`, `SERVER-CONNECTION-ID`, `SERVER-OPERATING-MODE`, `SESSION`, `SESSION-ID`, `SET`, `SET-APPL-CONTEXT`, `SET-ATTR-CALL-TYPE`, `SET-ATTRIBUTE-NODE`, `SET-BLUE`, `SET-BLUE-`, `SET-BLUE-V`, `SET-BLUE-VA`, `SET-BLUE-VAL`, `SET-BLUE-VALU`, `SET-BLUE-VALUE`, `SET-BREAK`, `SET-BUFFERS`, `SET-CALLBACK`, `SET-CLIENT`, `SET-COMMIT`, `SET-CONTENTS`, `SET-CURRENT-VALUE`, `SET-DB-CLIENT`, `SET-DYNAMIC`, `SET-EVENT-MANAGER-OPTION`, `SET-GREEN`, `SET-GREEN-`, `SET-GREEN-V`, `SET-GREEN-VA`, `SET-GREEN-VAL`, `SET-GREEN-VALU`, `SET-GREEN-VALUE`, `SET-INPUT-SOURCE`, `SET-OPTION`, `SET-OUTPUT-DESTINATION`, `SET-PARAMETER`, `SET-POINTER-VALUE`, `SET-PROPERTY`, `SET-RED`, `SET-RED-`, `SET-RED-V`, `SET-RED-VA`, `SET-RED-VAL`, `SET-RED-VALU`, `SET-RED-VALUE`, `SET-REPOSITIONED-ROW`, `SET-RGB-VALUE`, `SET-ROLLBACK`, `SET-SELECTION`, `SET-SIZE`, `SET-SORT-ARROW`, `SET-WAIT-STATE`, `SETUSER`, `SETUSERI`, `SETUSERID`, `SHA1-DIGEST`, `SHARE`, `SHARE-`, `SHARE-L`, `SHARE-LO`, `SHARE-LOC`, `SHARE-LOCK`, `SHARED`, `SHOW-IN-TASKBAR`, `SHOW-STAT`, `SHOW-STATS`, `SIDE-LAB`, `SIDE-LABE`, `SIDE-LABEL`, `SIDE-LABEL-H`, `SIDE-LABEL-HA`, `SIDE-LABEL-HAN`, `SIDE-LABEL-HAND`, `SIDE-LABEL-HANDL`, `SIDE-LABEL-HANDLE`, `SIDE-LABELS`, `SIGNATURE`, `SILENT`, `SIMPLE`, `SINGLE`, `SINGLE-RUN`, `SINGLETON`, `SIZE`, `SIZE-C`, `SIZE-CH`, `SIZE-CHA`, `SIZE-CHAR`, `SIZE-CHARS`, `SIZE-P`, `SIZE-PI`, `SIZE-PIX`, `SIZE-PIXE`, `SIZE-PIXEL`, `SIZE-PIXELS`, `SKIP`, `SKIP-DELETED-RECORD`, `SLIDER`, `SMALL-ICON`, `SMALL-TITLE`, `SMALLINT`, `SOME`, `SORT`, `SORT-ASCENDING`, `SORT-NUMBER`, `SOURCE`, `SOURCE-PROCEDURE`, `SPACE`, `SQL`, `SQRT`, `SSL-SERVER-NAME`, `STANDALONE`, `START`, `START-DOCUMENT`, `START-ELEMENT`, `START-MOVE`, `START-RESIZE`, `START-ROW-RESIZE`, `STATE-DETAIL`, `STATIC`, `STATUS`, `STATUS-AREA`, `STATUS-AREA-FONT`, `STDCALL`, `STOP`, `STOP-AFTER`, `STOP-PARSING`, `STOPPE`, `STOPPED`, `STORED-PROC`, `STORED-PROCE`, `STORED-PROCED`, `STORED-PROCEDU`, `STORED-PROCEDUR`, `STORED-PROCEDURE`, `STREAM`, `STREAM-HANDLE`, `STREAM-IO`, `STRETCH-TO-FIT`, `STRICT`, `STRICT-ENTITY-RESOLUTION`, `STRING`, `STRING-VALUE`, `STRING-XREF`, `SUB-AVE`, `SUB-AVER`, `SUB-AVERA`, `SUB-AVERAG`, `SUB-AVERAGE`, `SUB-COUNT`, `SUB-MAXIMUM`, `SUB-MENU`, `SUB-MIN`, `SUB-MINIMUM`, `SUB-TOTAL`, `SUBSCRIBE`, `SUBST`, `SUBSTI`, `SUBSTIT`, `SUBSTITU`, `SUBSTITUT`, `SUBSTITUTE`, `SUBSTR`, `SUBSTRI`, `SUBSTRIN`, `SUBSTRING`, `SUBTYPE`, `SUM`, `SUM-MAX`, `SUM-MAXI`, `SUM-MAXIM`, `SUM-MAXIMU`, `SUPER`, `SUPER-PROCEDURES`, `SUPPRESS-NAMESPACE-PROCESSING`, `SUPPRESS-W`, `SUPPRESS-WA`, `SUPPRESS-WAR`, `SUPPRESS-WARN`, `SUPPRESS-WARNI`, `SUPPRESS-WARNIN`, `SUPPRESS-WARNING`, `SUPPRESS-WARNINGS`, `SYMMETRIC-ENCRYPTION-ALGORITHM`, `SYMMETRIC-ENCRYPTION-IV`, `SYMMETRIC-ENCRYPTION-KEY`, `SYMMETRIC-SUPPORT`, `SYSTEM-ALERT`, `SYSTEM-ALERT-`, `SYSTEM-ALERT-B`, `SYSTEM-ALERT-BO`, `SYSTEM-ALERT-BOX`, `SYSTEM-ALERT-BOXE`, `SYSTEM-ALERT-BOXES`, `SYSTEM-DIALOG`, `SYSTEM-HELP`, `SYSTEM-ID`, `TAB-POSITION`, `TAB-STOP`, `TABLE`, `TABLE-HANDLE`, `TABLE-NUMBER`, `TABLE-SCAN`, `TARGET`, `TARGET-PROCEDURE`, `TEMP-DIR`, `TEMP-DIRE`, `TEMP-DIREC`, `TEMP-DIRECT`, `TEMP-DIRECTO`, `TEMP-DIRECTOR`, `TEMP-DIRECTORY`, `TEMP-TABLE`, `TEMP-TABLE-PREPARE`, `TERM`, `TERMI`, `TERMIN`, `TERMINA`, `TERMINAL`, `TERMINATE`, `TEXT`, `TEXT-CURSOR`, `TEXT-SEG-GROW`, `TEXT-SELECTED`, `THEN`, `THIS-OBJECT`, `THIS-PROCEDURE`, `THREAD-SAFE`, `THREE-D`, `THROUGH`, `THROW`, `THRU`, `TIC-MARKS`, `TIME`, `TIME-SOURCE`, `TITLE`, `TITLE-BGC`, `TITLE-BGCO`, `TITLE-BGCOL`, `TITLE-BGCOLO`, `TITLE-BGCOLOR`, `TITLE-DC`, `TITLE-DCO`, `TITLE-DCOL`, `TITLE-DCOLO`, `TITLE-DCOLOR`, `TITLE-FGC`, `TITLE-FGCO`, `TITLE-FGCOL`, `TITLE-FGCOLO`, `TITLE-FGCOLOR`, `TITLE-FO`, `TITLE-FON`, `TITLE-FONT`, `TO`, `TO-ROWID`, `TODAY`, `TOGGLE-BOX`, `TOOLTIP`, `TOOLTIPS`, `TOP-NAV-QUERY`, `TOP-ONLY`, `TOPIC`, `TOTAL`, `TRAILING`, `TRANS`, `TRANS-INIT-PROCEDURE`, `TRANSACTION`, `TRANSACTION-MODE`, `TRANSPARENT`, `TRIGGER`, `TRIGGERS`, `TRIM`, `TRUE`, `TRUNC`, `TRUNCA`, `TRUNCAT`, `TRUNCATE`, `TYPE`, `TYPE-OF`, `UNBOX`, `UNBUFF`, `UNBUFFE`, `UNBUFFER`, `UNBUFFERE`, `UNBUFFERED`, `UNDERL`, `UNDERLI`, `UNDERLIN`, `UNDERLINE`, `UNDO`, `UNFORM`, `UNFORMA`, `UNFORMAT`, `UNFORMATT`, `UNFORMATTE`, `UNFORMATTED`, `UNION`, `UNIQUE`, `UNIQUE-ID`, `UNIQUE-MATCH`, `UNIX`, `UNLESS-HIDDEN`, `UNLOAD`, `UNSIGNED-LONG`, `UNSUBSCRIBE`, `UP`, `UPDATE`, `UPDATE-ATTRIBUTE`, `URL`, `URL-DECODE`, `URL-ENCODE`, `URL-PASSWORD`, `URL-USERID`, `USE`, `USE-DICT-EXPS`, `USE-FILENAME`, `USE-INDEX`, `USE-REVVIDEO`, `USE-TEXT`, `USE-UNDERLINE`, `USE-WIDGET-POOL`, `USER`, `USER-ID`, `USERID`, `USING`, `V6DISPLAY`, `V6FRAME`, `VALID-EVENT`, `VALID-HANDLE`, `VALID-OBJECT`, `VALIDATE`, `VALIDATE-EXPRESSION`, `VALIDATE-MESSAGE`, `VALIDATE-SEAL`, `VALIDATION-ENABLED`, `VALUE`, `VALUE-CHANGED`, `VALUES`, `VAR`, `VARI`, `VARIA`, `VARIAB`, `VARIABL`, `VARIABLE`, `VERBOSE`, `VERSION`, `VERT`, `VERTI`, `VERTIC`, `VERTICA`, `VERTICAL`, `VIEW`, `VIEW-AS`, `VIEW-FIRST-COLUMN-ON-REOPEN`, `VIRTUAL-HEIGHT`, `VIRTUAL-HEIGHT-`, `VIRTUAL-HEIGHT-C`, `VIRTUAL-HEIGHT-CH`, `VIRTUAL-HEIGHT-CHA`, `VIRTUAL-HEIGHT-CHAR`, `VIRTUAL-HEIGHT-CHARS`, `VIRTUAL-HEIGHT-P`, `VIRTUAL-HEIGHT-PI`, `VIRTUAL-HEIGHT-PIX`, `VIRTUAL-HEIGHT-PIXE`, `VIRTUAL-HEIGHT-PIXEL`, `VIRTUAL-HEIGHT-PIXELS`, `VIRTUAL-WIDTH`, `VIRTUAL-WIDTH-`, `VIRTUAL-WIDTH-C`, `VIRTUAL-WIDTH-CH`, `VIRTUAL-WIDTH-CHA`, `VIRTUAL-WIDTH-CHAR`, `VIRTUAL-WIDTH-CHARS`, `VIRTUAL-WIDTH-P`, `VIRTUAL-WIDTH-PI`, `VIRTUAL-WIDTH-PIX`, `VIRTUAL-WIDTH-PIXE`, `VIRTUAL-WIDTH-PIXEL`, `VIRTUAL-WIDTH-PIXELS`, `VISIBLE`, `VOID`, `WAIT`, `WAIT-FOR`, `WARNING`, `WEB-CONTEXT`, `WEEKDAY`, `WHEN`, `WHERE`, `WHILE`, `WIDGET`, `WIDGET-E`, `WIDGET-EN`, `WIDGET-ENT`, `WIDGET-ENTE`, `WIDGET-ENTER`, `WIDGET-ID`, `WIDGET-L`, `WIDGET-LE`, `WIDGET-LEA`, `WIDGET-LEAV`, `WIDGET-LEAVE`, `WIDGET-POOL`, `WIDTH`, `WIDTH-`, `WIDTH-C`, `WIDTH-CH`, `WIDTH-CHA`, `WIDTH-CHAR`, `WIDTH-CHARS`, `WIDTH-P`, `WIDTH-PI`, `WIDTH-PIX`, `WIDTH-PIXE`, `WIDTH-PIXEL`, `WIDTH-PIXELS`, `WINDOW`, `WINDOW-MAXIM`, `WINDOW-MAXIMI`, `WINDOW-MAXIMIZ`, `WINDOW-MAXIMIZE`, `WINDOW-MAXIMIZED`, `WINDOW-MINIM`, `WINDOW-MINIMI`, `WINDOW-MINIMIZ`, `WINDOW-MINIMIZE`, `WINDOW-MINIMIZED`, `WINDOW-NAME`, `WINDOW-NORMAL`, `WINDOW-STA`, `WINDOW-STAT`, `WINDOW-STATE`, `WINDOW-SYSTEM`, `WITH`, `WORD-INDEX`, `WORD-WRAP`, `WORK-AREA-HEIGHT-PIXELS`, `WORK-AREA-WIDTH-PIXELS`, `WORK-AREA-X`, `WORK-AREA-Y`, `WORK-TAB`, `WORK-TABL`, `WORK-TABLE`, `WORKFILE`, `WRITE`, `WRITE-CDATA`, `WRITE-CHARACTERS`, `WRITE-COMMENT`, `WRITE-DATA-ELEMENT`, `WRITE-EMPTY-ELEMENT`, `WRITE-ENTITY-REF`, `WRITE-EXTERNAL-DTD`, `WRITE-FRAGMENT`, `WRITE-JSON`, `WRITE-MESSAGE`, `WRITE-PROCESSING-INSTRUCTION`, `WRITE-STATUS`, `WRITE-XML`, `WRITE-XMLSCHEMA`, `X`, `X-OF`, `XCODE`, `XML-DATA-TYPE`, `XML-ENTITY-EXPANSION-LIMIT`, `XML-NODE-TYPE`, `XML-SCHEMA-PATH`, `XML-STRICT-ENTITY-RESOLUTION`, `XML-SUPPRESS-NAMESPACE-PROCESSING`, `XREF`, `XREF-XML`, `Y`, `Y-OF`, `YEAR`, `YEAR-OFFSET`, `YES`, `YES-NO`, `YES-NO-CANCEL`), KeywordReserved, nil}, //nolint - {`"(\\\\|\\[^\\]|[^"\\])*"`, LiteralStringDouble, nil}, - {`'(\\\\|\\[^\\]|[^'\\])*'`, LiteralStringSingle, nil}, - {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`\s+`, Text, nil}, - {`[+*/=-]`, Operator, nil}, - {`[.:()]`, Punctuation, nil}, - {`.`, NameVariable, nil}, - }, - "comment": { - {`[^*/]`, CommentMultiline, nil}, - {`/\*`, CommentMultiline, Push()}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[*/]`, CommentMultiline, nil}, - {`\/\/`, CommentSingle, nil}, - }, - "preprocessor": { - {`[^{}]`, CommentPreproc, nil}, - {`\{`, CommentPreproc, Push()}, - {`\}`, CommentPreproc, Pop(1)}, - }, - } - }, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go b/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go deleted file mode 100644 index d18b9caab4c9..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go +++ /dev/null @@ -1,47 +0,0 @@ -package o - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var OpenSCAD = internal.Register(MustNewLazyLexer( - &Config{ - Name: "OpenSCAD", - Aliases: []string{"openscad"}, - Filenames: []string{"*.scad"}, - MimeTypes: []string{"text/x-scad"}, - }, - openSCADRules, -)) - -func openSCADRules() Rules { - return Rules{ - "root": { - {`[^\S\n]+`, Text, nil}, - {`\n`, Text, nil}, - {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, - {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, - {`[{}\[\]\(\),;:]`, Punctuation, nil}, - {`[*!#%\-+=?/]`, Operator, nil}, - {`<|<=|==|!=|>=|>|&&|\|\|`, Operator, nil}, - {`\$(f[asn]|t|vp[rtd]|children)`, NameVariableMagic, nil}, - {Words(``, `\b`, `PI`, `undef`), KeywordConstant, nil}, - {`(use|include)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("includes")}, - {`(module)(\s*)([^\s\(]+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, - {`(function)(\s*)([^\s\(]+)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, - {`\b(true|false)\b`, Literal, nil}, - {`\b(function|module|include|use|for|intersection_for|if|else|return)\b`, Keyword, nil}, - {`\b(circle|square|polygon|text|sphere|cube|cylinder|polyhedron|translate|rotate|scale|resize|mirror|multmatrix|color|offset|hull|minkowski|union|difference|intersection|abs|sign|sin|cos|tan|acos|asin|atan|atan2|floor|round|ceil|ln|log|pow|sqrt|exp|rands|min|max|concat|lookup|str|chr|search|version|version_num|norm|cross|parent_module|echo|import|import_dxf|dxf_linear_extrude|linear_extrude|rotate_extrude|surface|projection|render|dxf_cross|dxf_dim|let|assign|len)\b`, NameBuiltin, nil}, - {`\bchildren\b`, NameBuiltinPseudo, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, - {`-?\d+(\.\d+)?(e[+-]?\d+)?`, Number, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - }, - "includes": { - {"(<)([^>]*)(>)", ByGroups(Punctuation, CommentPreprocFile, Punctuation), nil}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/org.go b/vendor/github.com/alecthomas/chroma/lexers/o/org.go deleted file mode 100644 index 00f6df44502a..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/o/org.go +++ /dev/null @@ -1,108 +0,0 @@ -package o - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Org mode lexer. -var Org = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Org Mode", - Aliases: []string{"org", "orgmode"}, - Filenames: []string{"*.org"}, - MimeTypes: []string{"text/org"}, // https://lists.gnu.org/r/emacs-orgmode/2017-09/msg00087.html - }, - orgRules, -)) - -func orgRules() Rules { - return Rules{ - "root": { - {`^# .*$`, Comment, nil}, - // Headings - {`^(\*)( COMMENT)( .*)$`, ByGroups(GenericHeading, NameEntity, GenericStrong), nil}, - {`^(\*\*+)( COMMENT)( .*)$`, ByGroups(GenericSubheading, NameEntity, Text), nil}, - {`^(\*)( DONE)( .*)$`, ByGroups(GenericHeading, LiteralStringRegex, GenericStrong), nil}, - {`^(\*\*+)( DONE)( .*)$`, ByGroups(GenericSubheading, LiteralStringRegex, Text), nil}, - {`^(\*)( TODO)( .*)$`, ByGroups(GenericHeading, Error, GenericStrong), nil}, - {`^(\*\*+)( TODO)( .*)$`, ByGroups(GenericSubheading, Error, Text), nil}, - {`^(\*)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericHeading, GenericStrong, GenericEmph), nil}, // Level 1 heading with tags - {`^(\*)( .+)$`, ByGroups(GenericHeading, GenericStrong), nil}, // // Level 1 heading with NO tags - {`^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericSubheading, Text, GenericEmph), nil}, // Level 2+ heading with tags - {`^(\*\*+)( .+)$`, ByGroups(GenericSubheading, Text), nil}, // Level 2+ heading with NO tags - // Checkbox lists - {`^( *)([+-] )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, - {`^( +)(\* )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, - // Definition lists - {`^( *)([+-] )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, - {`^( +)(\* )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, - // Unordered lists - {`^( *)([+-] )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, - {`^( +)(\* )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, - // Ordered lists - {`^( *)([0-9]+[.)])( \[@[0-9]+\])( .+)$`, ByGroups(Text, Keyword, GenericEmph, UsingSelf("inline")), nil}, - {`^( *)([0-9]+[.)])( .+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, - // Dynamic Blocks - {`(?i)^( *#\+begin: )([^ ]+)([\w\W]*?\n)([\w\W]*?)(^ *#\+end: *$)`, ByGroups(Comment, CommentSpecial, Comment, UsingSelf("inline"), Comment), nil}, - // Blocks - // - Comment Blocks - {`(?i)^( *#\+begin_comment *\n)([\w\W]*?)(^ *#\+end_comment *$)`, ByGroups(Comment, Comment, Comment), nil}, - // - Src Blocks - { - `(?i)^( *#\+begin_src )([^ \n]+)(.*?\n)([\w\W]*?)(^ *#\+end_src *$)`, - UsingByGroup( - internal.Get, - 2, 4, - Comment, CommentSpecial, Comment, Text, Comment, - ), - nil, - }, - // - Export Blocks - { - `(?i)^( *#\+begin_export )(\w+)( *\n)([\w\W]*?)(^ *#\+end_export *$)`, - UsingByGroup( - internal.Get, - 2, 4, - Comment, CommentSpecial, Text, Text, Comment, - ), - nil, - }, - // - Org Special, Example, Verse, etc. Blocks - {`(?i)^( *#\+begin_)(\w+)( *\n)([\w\W]*?)(^ *#\+end_\2)( *$)`, ByGroups(Comment, Comment, Text, Text, Comment, Text), nil}, - // Keywords - {`^(#\+\w+)(:.*)$`, ByGroups(CommentSpecial, Comment), nil}, // Other Org keywords like #+title - // Properties and Drawers - {`(?i)^( *:\w+: *\n)([\w\W]*?)(^ *:end: *$)`, ByGroups(Comment, CommentSpecial, Comment), nil}, - // Line break operator - {`^(.*)(\\\\)$`, ByGroups(UsingSelf("inline"), Operator), nil}, - // Deadline/Scheduled - {`(?i)^( *(?:DEADLINE|SCHEDULED): )(<[^<>]+?> *)$`, ByGroups(Comment, CommentSpecial), nil}, // DEADLINE/SCHEDULED: - // DONE state CLOSED - {`(?i)^( *CLOSED: )(\[[^][]+?\] *)$`, ByGroups(Comment, CommentSpecial), nil}, // CLOSED: [datestamp] - // All other lines - Include("inline"), - }, - "inline": { - {`(\s)*(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))`, ByGroups(Text, GenericStrong, Text), nil}, // Bold - {`(\s)*(/[^/]+?/)((?=\W|\n|$))`, ByGroups(Text, GenericEmph, Text), nil}, // Italic - {`(\s)*(=[^\n=]+?=)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Verbatim - {`(\s)*(~[^\n~]+?~)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Code - {`(\s)*(\+[^+]+?\+)((?=\W|\n|$))`, ByGroups(Text, GenericDeleted, Text), nil}, // Strikethrough - {`(\s)*(_[^_]+?_)((?=\W|\n|$))`, ByGroups(Text, GenericUnderline, Text), nil}, // Underline - {`(<)([^<>]+?)(>)`, ByGroups(Text, String, Text), nil}, // - {`[{]{3}[^}]+[}]{3}`, NameBuiltin, nil}, // {{{macro(foo,1)}}} - {`([^[])(\[fn:)([^]]+?)(\])([^]])`, ByGroups(Text, NameBuiltinPseudo, LiteralString, NameBuiltinPseudo, Text), nil}, // [fn:1] - // Links - {`(\[\[)([^][]+?)(\]\[)([^][]+)(\]\])`, ByGroups(Text, NameAttribute, Text, NameTag, Text), nil}, // [[link][descr]] - {`(\[\[)([^][]+?)(\]\])`, ByGroups(Text, NameAttribute, Text), nil}, // [[link]] - {`(<<)([^<>]+?)(>>)`, ByGroups(Text, NameAttribute, Text), nil}, // <> - // Tables - {`^( *)(\|[ -].*?[ -]\|)$`, ByGroups(Text, String), nil}, - // Blank lines, newlines - {`\n`, Text, nil}, - // Any other text - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go b/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go deleted file mode 100644 index 00c6255af6f5..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go +++ /dev/null @@ -1,30 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Pacmanconf lexer. -var Pacmanconf = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PacmanConf", - Aliases: []string{"pacmanconf"}, - Filenames: []string{"pacman.conf"}, - MimeTypes: []string{}, - }, - pacmanconfRules, -)) - -func pacmanconfRules() Rules { - return Rules{ - "root": { - {`#.*$`, CommentSingle, nil}, - {`^\s*\[.*?\]\s*$`, Keyword, nil}, - {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, - {`^(\s*)(\w+)(\s*)$`, ByGroups(Text, NameAttribute, Text), nil}, - {Words(``, `\b`, `$repo`, `$arch`, `%o`, `%u`), NameVariable, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/perl.go b/vendor/github.com/alecthomas/chroma/lexers/p/perl.go deleted file mode 100644 index 6aa338ad116b..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/perl.go +++ /dev/null @@ -1,142 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Perl lexer. -var Perl = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Perl", - Aliases: []string{"perl", "pl"}, - Filenames: []string{"*.pl", "*.pm", "*.t"}, - MimeTypes: []string{"text/x-perl", "application/x-perl"}, - DotAll: true, - }, - perlRules, -)) - -func perlRules() Rules { - return Rules{ - "balanced-regex": { - {`/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`!(\\\\|\\[^\\]|[^\\!])*![egimosx]*`, LiteralStringRegex, Pop(1)}, - {`\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*`, LiteralStringRegex, Pop(1)}, - {`\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, Pop(1)}, - {`\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*`, LiteralStringRegex, Pop(1)}, - }, - "root": { - {`\A\#!.+?$`, CommentHashbang, nil}, - {`\#.*?$`, CommentSingle, nil}, - {`^=[a-zA-Z0-9]+\s+.*?\n=cut`, CommentMultiline, nil}, - {Words(``, `\b`, `case`, `continue`, `do`, `else`, `elsif`, `for`, `foreach`, `if`, `last`, `my`, `next`, `our`, `redo`, `reset`, `then`, `unless`, `until`, `while`, `print`, `new`, `BEGIN`, `CHECK`, `INIT`, `END`, `return`), Keyword, nil}, - {`(format)(\s+)(\w+)(\s*)(=)(\s*\n)`, ByGroups(Keyword, Text, Name, Text, Punctuation, Text), Push("format")}, - {`(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b`, OperatorWord, nil}, - {`s/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, nil}, - {`s!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*`, LiteralStringRegex, nil}, - {`s\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, nil}, - {`s@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, nil}, - {`s%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, nil}, - {`s\{(\\\\|\\[^\\]|[^\\}])*\}\s*`, LiteralStringRegex, Push("balanced-regex")}, - {`s<(\\\\|\\[^\\]|[^\\>])*>\s*`, LiteralStringRegex, Push("balanced-regex")}, - {`s\[(\\\\|\\[^\\]|[^\\\]])*\]\s*`, LiteralStringRegex, Push("balanced-regex")}, - {`s\((\\\\|\\[^\\]|[^\\)])*\)\s*`, LiteralStringRegex, Push("balanced-regex")}, - {`m?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*`, LiteralStringRegex, nil}, - {`m(?=[/!\\{<\[(@%$])`, LiteralStringRegex, Push("balanced-regex")}, - {`((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*`, LiteralStringRegex, nil}, - {`\s+`, Text, nil}, - {Words(``, `\b`, `abs`, `accept`, `alarm`, `atan2`, `bind`, `binmode`, `bless`, `caller`, `chdir`, `chmod`, `chomp`, `chop`, `chown`, `chr`, `chroot`, `close`, `closedir`, `connect`, `continue`, `cos`, `crypt`, `dbmclose`, `dbmopen`, `defined`, `delete`, `die`, `dump`, `each`, `endgrent`, `endhostent`, `endnetent`, `endprotoent`, `endpwent`, `endservent`, `eof`, `eval`, `exec`, `exists`, `exit`, `exp`, `fcntl`, `fileno`, `flock`, `fork`, `format`, `formline`, `getc`, `getgrent`, `getgrgid`, `getgrnam`, `gethostbyaddr`, `gethostbyname`, `gethostent`, `getlogin`, `getnetbyaddr`, `getnetbyname`, `getnetent`, `getpeername`, `getpgrp`, `getppid`, `getpriority`, `getprotobyname`, `getprotobynumber`, `getprotoent`, `getpwent`, `getpwnam`, `getpwuid`, `getservbyname`, `getservbyport`, `getservent`, `getsockname`, `getsockopt`, `glob`, `gmtime`, `goto`, `grep`, `hex`, `import`, `index`, `int`, `ioctl`, `join`, `keys`, `kill`, `last`, `lc`, `lcfirst`, `length`, `link`, `listen`, `local`, `localtime`, `log`, `lstat`, `map`, `mkdir`, `msgctl`, `msgget`, `msgrcv`, `msgsnd`, `my`, `next`, `oct`, `open`, `opendir`, `ord`, `our`, `pack`, `pipe`, `pop`, `pos`, `printf`, `prototype`, `push`, `quotemeta`, `rand`, `read`, `readdir`, `readline`, `readlink`, `readpipe`, `recv`, `redo`, `ref`, `rename`, `reverse`, `rewinddir`, `rindex`, `rmdir`, `scalar`, `seek`, `seekdir`, `select`, `semctl`, `semget`, `semop`, `send`, `setgrent`, `sethostent`, `setnetent`, `setpgrp`, `setpriority`, `setprotoent`, `setpwent`, `setservent`, `setsockopt`, `shift`, `shmctl`, `shmget`, `shmread`, `shmwrite`, `shutdown`, `sin`, `sleep`, `socket`, `socketpair`, `sort`, `splice`, `split`, `sprintf`, `sqrt`, `srand`, `stat`, `study`, `substr`, `symlink`, `syscall`, `sysopen`, `sysread`, `sysseek`, `system`, `syswrite`, `tell`, `telldir`, `tie`, `tied`, `time`, `times`, `tr`, `truncate`, `uc`, `ucfirst`, `umask`, `undef`, `unlink`, `unpack`, `unshift`, `untie`, `utime`, `values`, `vec`, `wait`, `waitpid`, `wantarray`, `warn`, `write`), NameBuiltin, nil}, - {`((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b`, NameBuiltinPseudo, nil}, - {`(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Text), nil}, - {`__END__`, CommentPreproc, Push("end-part")}, - {`\$\^[ADEFHILMOPSTWX]`, NameVariableGlobal, nil}, - {"\\$[\\\\\\\"\\[\\]'&`+*.,;=%~?@$!<>(^|/-](?!\\w)", NameVariableGlobal, nil}, - {`[$@%#]+`, NameVariable, Push("varname")}, - {`0_?[0-7]+(_[0-7]+)*`, LiteralNumberOct, nil}, - {`0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*`, LiteralNumberHex, nil}, - {`0b[01]+(_[01]+)*`, LiteralNumberBin, nil}, - {`(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, - {`(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*`, LiteralNumberFloat, nil}, - {`\d+(_\d+)*`, LiteralNumberInteger, nil}, - {`'(\\\\|\\[^\\]|[^'\\])*'`, LiteralString, nil}, - {`"(\\\\|\\[^\\]|[^"\\])*"`, LiteralString, nil}, - {"`(\\\\\\\\|\\\\[^\\\\]|[^`\\\\])*`", LiteralStringBacktick, nil}, - {`<([^\s>]+)>`, LiteralStringRegex, nil}, - {`(q|qq|qw|qr|qx)\{`, LiteralStringOther, Push("cb-string")}, - {`(q|qq|qw|qr|qx)\(`, LiteralStringOther, Push("rb-string")}, - {`(q|qq|qw|qr|qx)\[`, LiteralStringOther, Push("sb-string")}, - {`(q|qq|qw|qr|qx)\<`, LiteralStringOther, Push("lt-string")}, - {`(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2`, LiteralStringOther, nil}, - {`(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, - {`(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, - {`(sub)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, - {Words(``, `\b`, `no`, `package`, `require`, `use`), Keyword, nil}, - {`(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|!~|&&?|\|\||\.{1,3})`, Operator, nil}, - {`[-+/*%=<>&^|!\\~]=?`, Operator, nil}, - {`[()\[\]:;,<>/?{}]`, Punctuation, nil}, - {`(?=\w)`, Name, Push("name")}, - }, - "format": { - {`\.\n`, LiteralStringInterpol, Pop(1)}, - {`[^\n]*\n`, LiteralStringInterpol, nil}, - }, - "varname": { - {`\s+`, Text, nil}, - {`\{`, Punctuation, Pop(1)}, - {`\)|,`, Punctuation, Pop(1)}, - {`\w+::`, NameNamespace, nil}, - {`[\w:]+`, NameVariable, Pop(1)}, - }, - "name": { - {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)`, NameNamespace, Pop(1)}, - {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::`, NameNamespace, Pop(1)}, - {`[\w:]+`, Name, Pop(1)}, - {`[A-Z_]+(?=\W)`, NameConstant, Pop(1)}, - {`(?=\W)`, Text, Pop(1)}, - }, - "funcname": { - {`[a-zA-Z_]\w*[!?]?`, NameFunction, nil}, - {`\s+`, Text, nil}, - {`(\([$@%]*\))(\s*)`, ByGroups(Punctuation, Text), nil}, - {`;`, Punctuation, Pop(1)}, - {`.*?\{`, Punctuation, Pop(1)}, - }, - "cb-string": { - {`\\[{}\\]`, LiteralStringOther, nil}, - {`\\`, LiteralStringOther, nil}, - {`\{`, LiteralStringOther, Push("cb-string")}, - {`\}`, LiteralStringOther, Pop(1)}, - {`[^{}\\]+`, LiteralStringOther, nil}, - }, - "rb-string": { - {`\\[()\\]`, LiteralStringOther, nil}, - {`\\`, LiteralStringOther, nil}, - {`\(`, LiteralStringOther, Push("rb-string")}, - {`\)`, LiteralStringOther, Pop(1)}, - {`[^()]+`, LiteralStringOther, nil}, - }, - "sb-string": { - {`\\[\[\]\\]`, LiteralStringOther, nil}, - {`\\`, LiteralStringOther, nil}, - {`\[`, LiteralStringOther, Push("sb-string")}, - {`\]`, LiteralStringOther, Pop(1)}, - {`[^\[\]]+`, LiteralStringOther, nil}, - }, - "lt-string": { - {`\\[<>\\]`, LiteralStringOther, nil}, - {`\\`, LiteralStringOther, nil}, - {`\<`, LiteralStringOther, Push("lt-string")}, - {`\>`, LiteralStringOther, Pop(1)}, - {`[^<>]+`, LiteralStringOther, nil}, - }, - "end-part": { - {`.+`, CommentPreproc, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pig.go deleted file mode 100644 index f2852c18ae9e..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/pig.go +++ /dev/null @@ -1,61 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Pig lexer. -var Pig = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Pig", - Aliases: []string{"pig"}, - Filenames: []string{"*.pig"}, - MimeTypes: []string{"text/x-pig"}, - CaseInsensitive: true, - }, - pigRules, -)) - -func pigRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`--.*`, Comment, nil}, - {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, - {`\\\n`, Text, nil}, - {`\\`, Text, nil}, - {`\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'`, LiteralString, nil}, - Include("keywords"), - Include("types"), - Include("builtins"), - Include("punct"), - Include("operators"), - {`[0-9]*\.[0-9]+(e[0-9]+)?[fd]?`, LiteralNumberFloat, nil}, - {`0x[0-9a-f]+`, LiteralNumberHex, nil}, - {`[0-9]+L?`, LiteralNumberInteger, nil}, - {`\n`, Text, nil}, - {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, - {`[()#:]`, Text, nil}, - {`[^(:#\'")\s]+`, Text, nil}, - {`\S+\s+`, Text, nil}, - }, - "keywords": { - {`(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|%declare|%default|define|dense|desc|describe|distinct|du|dump|eval|exex|explain|filter|flatten|foreach|full|generate|group|help|if|illustrate|import|inner|input|into|is|join|kill|left|limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|outer|output|parallel|pig|pwd|quit|register|returns|right|rm|rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|stream|through|union|using|void)\b`, Keyword, nil}, - }, - "builtins": { - {`(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|TOKENIZE)\b`, NameBuiltin, nil}, - }, - "types": { - {`(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|int|long|tuple)\b`, KeywordType, nil}, - }, - "punct": { - {`[;(){}\[\]]`, Punctuation, nil}, - }, - "operators": { - {`[#=,./%+\-?]`, Operator, nil}, - {`(eq|gt|lt|gte|lte|neq|matches)\b`, Operator, nil}, - {`(==|<=|<|>=|>|!=)`, Operator, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go deleted file mode 100644 index 9e024107f70f..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go +++ /dev/null @@ -1,45 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Pkgconfig lexer. -var Pkgconfig = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PkgConfig", - Aliases: []string{"pkgconfig"}, - Filenames: []string{"*.pc"}, - MimeTypes: []string{}, - }, - pkgconfigRules, -)) - -func pkgconfigRules() Rules { - return Rules{ - "root": { - {`#.*$`, CommentSingle, nil}, - {`^(\w+)(=)`, ByGroups(NameAttribute, Operator), nil}, - {`^([\w.]+)(:)`, ByGroups(NameTag, Punctuation), Push("spvalue")}, - Include("interp"), - {`[^${}#=:\n.]+`, Text, nil}, - {`.`, Text, nil}, - }, - "interp": { - {`\$\$`, Text, nil}, - {`\$\{`, LiteralStringInterpol, Push("curly")}, - }, - "curly": { - {`\}`, LiteralStringInterpol, Pop(1)}, - {`\w+`, NameAttribute, nil}, - }, - "spvalue": { - Include("interp"), - {`#.*$`, CommentSingle, Pop(1)}, - {`\n`, Text, Pop(1)}, - {`[^${}#\n]+`, Text, nil}, - {`.`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go b/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go deleted file mode 100644 index 058c02a89fec..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go +++ /dev/null @@ -1,17 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var Plaintext = internal.Register(MustNewLazyLexer( - &Config{ - Name: "plaintext", - Aliases: []string{"text", "plain", "no-highlight"}, - Filenames: []string{"*.txt"}, - MimeTypes: []string{"text/plain"}, - Priority: 0.1, - }, - internal.PlaintextRules, -)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go b/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go deleted file mode 100644 index 324d3feb2aee..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go +++ /dev/null @@ -1,62 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Pl/Pgsql lexer. -var PLpgSQL = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PL/pgSQL", - Aliases: []string{"plpgsql"}, - Filenames: []string{}, - MimeTypes: []string{"text/x-plpgsql"}, - NotMultiline: true, - CaseInsensitive: true, - }, - plpgSQLRules, -)) - -func plpgSQLRules() Rules { - return Rules{ - "root": { - {`\%[a-z]\w*\b`, NameBuiltin, nil}, - {`:=`, Operator, nil}, - {`\<\<[a-z]\w*\>\>`, NameLabel, nil}, - {`\#[a-z]\w*\b`, KeywordPseudo, nil}, - {`\s+`, TextWhitespace, nil}, - {`--.*\n?`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, - {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`, `ALIAS`, `CONSTANT`, `DIAGNOSTICS`, `ELSIF`, `EXCEPTION`, `EXIT`, `FOREACH`, `GET`, `LOOP`, `NOTICE`, `OPEN`, `PERFORM`, `QUERY`, `RAISE`, `RETURN`, `REVERSE`, `SQLSTATE`, `WHILE`), Keyword, nil}, - {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, - {`::`, Operator, nil}, - {`\$\d+`, NameVariable, nil}, - {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, - {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, - // { `(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, ?? ??, nil }, - {`[a-z_]\w*`, Name, nil}, - {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, - {`[;:()\[\]{},.]`, Punctuation, nil}, - }, - "multiline-comments": { - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[^/*]+`, CommentMultiline, nil}, - {`[/*]`, CommentMultiline, nil}, - }, - "string": { - {`[^']+`, LiteralStringSingle, nil}, - {`''`, LiteralStringSingle, nil}, - {`'`, LiteralStringSingle, Pop(1)}, - }, - "quoted-ident": { - {`[^"]+`, LiteralStringName, nil}, - {`""`, LiteralStringName, nil}, - {`"`, LiteralStringName, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go b/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go deleted file mode 100644 index b2ad4e108a5d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go +++ /dev/null @@ -1,76 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// nolint - -// Lexer for the Plutus Core Languages (version 2.1) -// -// including both Typed- and Untyped- versions -// based on “Formal Specification of the Plutus Core Language (version 2.1)”, published 6th April 2021: -// https://hydra.iohk.io/build/8205579/download/1/plutus-core-specification.pdf - -var PlutusCoreLang = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Plutus Core", - Aliases: []string{"plutus-core", "plc"}, - Filenames: []string{"*.plc"}, - MimeTypes: []string{"text/x-plutus-core", "application/x-plutus-core"}, - }, - plutusCoreRules, -)) - -func plutusCoreRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`(\(|\))`, Punctuation, nil}, - {`(\[|\])`, Punctuation, nil}, - {`({|})`, Punctuation, nil}, - - // Constants. Figure 1. - // For version, see handling of (program ...) below. - {`([+-]?\d+)`, LiteralNumberInteger, nil}, - {`(#([a-fA-F0-9][a-fA-F0-9])+)`, LiteralString, nil}, - {`(\(\))`, NameConstant, nil}, - {`(True|False)`, NameConstant, nil}, - - // Keywords. Figures 2 and 15. - // Special handling for program because it is followed by a version. - {`(con |abs |iwrap |unwrap |lam |builtin |delay |force |error)`, Keyword, nil}, - {`(fun |all |ifix |lam |con )`, Keyword, nil}, - {`(type|fun )`, Keyword, nil}, - {`(program )(\S+)`, ByGroups(Keyword, LiteralString), nil}, - - // Built-in Types. Figure 12. - {`(unit|bool|integer|bytestring|string)`, KeywordType, nil}, - - // Built-ins Functions. Figure 14 but, more importantly, implementation: - // https://github.com/input-output-hk/plutus/blob/6d759c4/plutus-core/plutus-core/src/PlutusCore/Default/Builtins.hs#L42-L111 - {`(addInteger |subtractInteger |multiplyInteger |divideInteger |quotientInteger |remainderInteger |modInteger |equalsInteger |lessThanInteger |lessThanEqualsInteger )`, NameBuiltin, nil}, - {`(appendByteString |consByteString |sliceByteString |lengthOfByteString |indexByteString |equalsByteString |lessThanByteString |lessThanEqualsByteString )`, NameBuiltin, nil}, - {`(sha2_256 |sha3_256 |blake2b_256 |verifySignature )`, NameBuiltin, nil}, - {`(appendString |equalsString |encodeUtf8 |decodeUtf8 )`, NameBuiltin, nil}, - {`(ifThenElse )`, NameBuiltin, nil}, - {`(chooseUnit )`, NameBuiltin, nil}, - {`(trace )`, NameBuiltin, nil}, - {`(fstPair |sndPair )`, NameBuiltin, nil}, - {`(chooseList |mkCons |headList |tailList |nullList )`, NameBuiltin, nil}, - {`(chooseData |constrData |mapData |listData |iData |bData |unConstrData |unMapData |unListData |unIData |unBData |equalsData )`, NameBuiltin, nil}, - {`(mkPairData |mkNilData |mkNilPairData )`, NameBuiltin, nil}, - - // Name. Figure 1. - {`([a-zA-Z][a-zA-Z0-9_']*)`, Name, nil}, - - // Unicode String. Not in the specification. - {`"`, LiteralStringDouble, Push("string")}, - }, - "string": { - {`[^\\"]+`, LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pony.go b/vendor/github.com/alecthomas/chroma/lexers/p/pony.go deleted file mode 100644 index 41d568289a34..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/pony.go +++ /dev/null @@ -1,63 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Pony lexer. -var Pony = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Pony", - Aliases: []string{"pony"}, - Filenames: []string{"*.pony"}, - MimeTypes: []string{}, - }, - ponyRules, -)) - -func ponyRules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`[^\S\n]+`, Text, nil}, - {`//.*\n`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("nested_comment")}, - {`"""(?:.|\n)*?"""`, LiteralStringDoc, nil}, - {`"`, LiteralString, Push("string")}, - {`\'.*\'`, LiteralStringChar, nil}, - {`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil}, - {Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil}, - {`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil}, - {Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil}, - {`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")}, - {`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")}, - {Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil}, - {`_?[A-Z]\w*`, NameClass, nil}, - {`string\(\)`, NameOther, nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`(true|false)\b`, Keyword, nil}, - {`_\d*`, Name, nil}, - {`_?[a-z][\w\'_]*`, Name, nil}, - }, - "typename": { - {`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)}, - }, - "methodname": { - {`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)}, - }, - "nested_comment": { - {`[^*/]+`, CommentMultiline, nil}, - {`/\*`, CommentMultiline, Push()}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[*/]`, CommentMultiline, nil}, - }, - "string": { - {`"`, LiteralString, Pop(1)}, - {`\\"`, LiteralString, nil}, - {`[^\\"]+`, LiteralString, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go b/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go deleted file mode 100644 index 8977e2cfd401..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go +++ /dev/null @@ -1,83 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Postgresql Sql Dialect lexer. -var PostgreSQL = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PostgreSQL SQL dialect", - Aliases: []string{"postgresql", "postgres"}, - Filenames: []string{}, - MimeTypes: []string{"text/x-postgresql"}, - NotMultiline: true, - CaseInsensitive: true, - }, - postgreSQLRules, -)) - -func postgreSQLRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`--.*\n?`, CommentSingle, nil}, - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, - { - `(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)('?)(\w+)?('?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)`, - UsingByGroup( - internal.Get, - 6, 12, - Keyword, Text, Keyword, Text, // DO LANGUAGE - StringSingle, StringSingle, StringSingle, Text, // 'plpgsql' - StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ - StringHeredoc, // (code block) - StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ - ), - nil, - }, - {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`), Keyword, nil}, - {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, - {`::`, Operator, nil}, - {`\$\d+`, NameVariable, nil}, - {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, - {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, - { - `(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)('?)(\w+)?('?)`, - UsingByGroup(internal.Get, - 12, 4, - StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ - StringHeredoc, // (code block) - StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ - Text, Keyword, Text, // LANGUAGE - StringSingle, StringSingle, StringSingle, // 'type' - ), - nil, - }, - {`(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, LiteralStringHeredoc, nil}, - {`[a-z_]\w*`, Name, nil}, - {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, - {`[;:()\[\]{},.]`, Punctuation, nil}, - }, - "multiline-comments": { - {`/\*`, CommentMultiline, Push("multiline-comments")}, - {`\*/`, CommentMultiline, Pop(1)}, - {`[^/*]+`, CommentMultiline, nil}, - {`[/*]`, CommentMultiline, nil}, - }, - "string": { - {`[^']+`, LiteralStringSingle, nil}, - {`''`, LiteralStringSingle, nil}, - {`'`, LiteralStringSingle, Pop(1)}, - }, - "quoted-ident": { - {`[^"]+`, LiteralStringName, nil}, - {`""`, LiteralStringName, nil}, - {`"`, LiteralStringName, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go b/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go deleted file mode 100644 index 0b51ba5ef394..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go +++ /dev/null @@ -1,50 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Postscript lexer. -var Postscript = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PostScript", - Aliases: []string{"postscript", "postscr"}, - Filenames: []string{"*.ps", "*.eps"}, - MimeTypes: []string{"application/postscript"}, - }, - postscriptRules, -)) - -func postscriptRules() Rules { - return Rules{ - "root": { - {`^%!.+\n`, CommentPreproc, nil}, - {`%%.*\n`, CommentSpecial, nil}, - {`(^%.*\n){2,}`, CommentMultiline, nil}, - {`%.*\n`, CommentSingle, nil}, - {`\(`, LiteralString, Push("stringliteral")}, - {`[{}<>\[\]]`, Punctuation, nil}, - {`<[0-9A-Fa-f]+>(?=[()<>\[\]{}/%\s])`, LiteralNumberHex, nil}, - {`[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberOct, nil}, - {`(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberFloat, nil}, - {`(\-|\+)?[0-9]+(?=[()<>\[\]{}/%\s])`, LiteralNumberInteger, nil}, - {`\/[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameVariable, nil}, - {`[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameFunction, nil}, - {`(false|true)(?=[()<>\[\]{}/%\s])`, KeywordConstant, nil}, - {`(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)(?=[()<>\[\]{}/%\s])`, KeywordReserved, nil}, - {Words(``, `(?=[()<>\[\]{}/%\s])`, `abs`, `add`, `aload`, `arc`, `arcn`, `array`, `atan`, `begin`, `bind`, `ceiling`, `charpath`, `clip`, `closepath`, `concat`, `concatmatrix`, `copy`, `cos`, `currentlinewidth`, `currentmatrix`, `currentpoint`, `curveto`, `cvi`, `cvs`, `def`, `defaultmatrix`, `dict`, `dictstackoverflow`, `div`, `dtransform`, `dup`, `end`, `exch`, `exec`, `exit`, `exp`, `fill`, `findfont`, `floor`, `get`, `getinterval`, `grestore`, `gsave`, `gt`, `identmatrix`, `idiv`, `idtransform`, `index`, `invertmatrix`, `itransform`, `length`, `lineto`, `ln`, `load`, `log`, `loop`, `matrix`, `mod`, `moveto`, `mul`, `neg`, `newpath`, `pathforall`, `pathbbox`, `pop`, `print`, `pstack`, `put`, `quit`, `rand`, `rangecheck`, `rcurveto`, `repeat`, `restore`, `rlineto`, `rmoveto`, `roll`, `rotate`, `round`, `run`, `save`, `scale`, `scalefont`, `setdash`, `setfont`, `setgray`, `setlinecap`, `setlinejoin`, `setlinewidth`, `setmatrix`, `setrgbcolor`, `shfill`, `show`, `showpage`, `sin`, `sqrt`, `stack`, `stringwidth`, `stroke`, `strokepath`, `sub`, `syntaxerror`, `transform`, `translate`, `truncate`, `typecheck`, `undefined`, `undefinedfilename`, `undefinedresult`), NameBuiltin, nil}, - {`\s+`, Text, nil}, - }, - "stringliteral": { - {`[^()\\]+`, LiteralString, nil}, - {`\\`, LiteralStringEscape, Push("escape")}, - {`\(`, LiteralString, Push()}, - {`\)`, LiteralString, Pop(1)}, - }, - "escape": { - {`[0-8]{3}|n|r|t|b|f|\\|\(|\)`, LiteralStringEscape, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/povray.go b/vendor/github.com/alecthomas/chroma/lexers/p/povray.go deleted file mode 100644 index 2d870f1063a3..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/povray.go +++ /dev/null @@ -1,39 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Povray lexer. -var Povray = internal.Register(MustNewLazyLexer( - &Config{ - Name: "POVRay", - Aliases: []string{"pov"}, - Filenames: []string{"*.pov", "*.inc"}, - MimeTypes: []string{"text/x-povray"}, - }, - povrayRules, -)) - -func povrayRules() Rules { - return Rules{ - "root": { - {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, - {`//.*\n`, CommentSingle, nil}, - {`(?s)"(?:\\.|[^"\\])+"`, LiteralStringDouble, nil}, - {Words(`#`, `\b`, `break`, `case`, `debug`, `declare`, `default`, `define`, `else`, `elseif`, `end`, `error`, `fclose`, `fopen`, `for`, `if`, `ifdef`, `ifndef`, `include`, `local`, `macro`, `range`, `read`, `render`, `statistics`, `switch`, `undef`, `version`, `warning`, `while`, `write`), CommentPreproc, nil}, - {Words(`\b`, `\b`, `aa_level`, `aa_threshold`, `abs`, `acos`, `acosh`, `adaptive`, `adc_bailout`, `agate`, `agate_turb`, `all`, `alpha`, `ambient`, `ambient_light`, `angle`, `aperture`, `arc_angle`, `area_light`, `asc`, `asin`, `asinh`, `assumed_gamma`, `atan`, `atan2`, `atanh`, `atmosphere`, `atmospheric_attenuation`, `attenuating`, `average`, `background`, `black_hole`, `blue`, `blur_samples`, `bounded_by`, `box_mapping`, `bozo`, `break`, `brick`, `brick_size`, `brightness`, `brilliance`, `bumps`, `bumpy1`, `bumpy2`, `bumpy3`, `bump_map`, `bump_size`, `case`, `caustics`, `ceil`, `checker`, `chr`, `clipped_by`, `clock`, `color`, `color_map`, `colour`, `colour_map`, `component`, `composite`, `concat`, `confidence`, `conic_sweep`, `constant`, `control0`, `control1`, `cos`, `cosh`, `count`, `crackle`, `crand`, `cube`, `cubic_spline`, `cylindrical_mapping`, `debug`, `declare`, `default`, `degrees`, `dents`, `diffuse`, `direction`, `distance`, `distance_maximum`, `div`, `dust`, `dust_type`, `eccentricity`, `else`, `emitting`, `end`, `error`, `error_bound`, `exp`, `exponent`, `fade_distance`, `fade_power`, `falloff`, `falloff_angle`, `false`, `file_exists`, `filter`, `finish`, `fisheye`, `flatness`, `flip`, `floor`, `focal_point`, `fog`, `fog_alt`, `fog_offset`, `fog_type`, `frequency`, `gif`, `global_settings`, `glowing`, `gradient`, `granite`, `gray_threshold`, `green`, `halo`, `hexagon`, `hf_gray_16`, `hierarchy`, `hollow`, `hypercomplex`, `if`, `ifdef`, `iff`, `image_map`, `incidence`, `include`, `int`, `interpolate`, `inverse`, `ior`, `irid`, `irid_wavelength`, `jitter`, `lambda`, `leopard`, `linear`, `linear_spline`, `linear_sweep`, `location`, `log`, `looks_like`, `look_at`, `low_error_factor`, `mandel`, `map_type`, `marble`, `material_map`, `matrix`, `max`, `max_intersections`, `max_iteration`, `max_trace_level`, `max_value`, `metallic`, `min`, `minimum_reuse`, `mod`, `mortar`, `nearest_count`, `no`, `normal`, `normal_map`, `no_shadow`, `number_of_waves`, `octaves`, `off`, `offset`, `omega`, `omnimax`, `on`, `once`, `onion`, `open`, `orthographic`, `panoramic`, `pattern1`, `pattern2`, `pattern3`, `perspective`, `pgm`, `phase`, `phong`, `phong_size`, `pi`, `pigment`, `pigment_map`, `planar_mapping`, `png`, `point_at`, `pot`, `pow`, `ppm`, `precision`, `pwr`, `quadratic_spline`, `quaternion`, `quick_color`, `quick_colour`, `quilted`, `radial`, `radians`, `radiosity`, `radius`, `rainbow`, `ramp_wave`, `rand`, `range`, `reciprocal`, `recursion_limit`, `red`, `reflection`, `refraction`, `render`, `repeat`, `rgb`, `rgbf`, `rgbft`, `rgbt`, `right`, `ripples`, `rotate`, `roughness`, `samples`, `scale`, `scallop_wave`, `scattering`, `seed`, `shadowless`, `sin`, `sine_wave`, `sinh`, `sky`, `sky_sphere`, `slice`, `slope_map`, `smooth`, `specular`, `spherical_mapping`, `spiral`, `spiral1`, `spiral2`, `spotlight`, `spotted`, `sqr`, `sqrt`, `statistics`, `str`, `strcmp`, `strength`, `strlen`, `strlwr`, `strupr`, `sturm`, `substr`, `switch`, `sys`, `t`, `tan`, `tanh`, `test_camera_1`, `test_camera_2`, `test_camera_3`, `test_camera_4`, `texture`, `texture_map`, `tga`, `thickness`, `threshold`, `tightness`, `tile2`, `tiles`, `track`, `transform`, `translate`, `transmit`, `triangle_wave`, `true`, `ttf`, `turbulence`, `turb_depth`, `type`, `ultra_wide_angle`, `up`, `use_color`, `use_colour`, `use_index`, `u_steps`, `val`, `variance`, `vaxis_rotate`, `vcross`, `vdot`, `version`, `vlength`, `vnormalize`, `volume_object`, `volume_rendered`, `vol_with_light`, `vrotate`, `v_steps`, `warning`, `warp`, `water_level`, `waves`, `while`, `width`, `wood`, `wrinkles`, `yes`), Keyword, nil}, - {Words(``, `\b`, `bicubic_patch`, `blob`, `box`, `camera`, `cone`, `cubic`, `cylinder`, `difference`, `disc`, `height_field`, `intersection`, `julia_fractal`, `lathe`, `light_source`, `merge`, `mesh`, `object`, `plane`, `poly`, `polygon`, `prism`, `quadric`, `quartic`, `smooth_triangle`, `sor`, `sphere`, `superellipsoid`, `text`, `torus`, `triangle`, `union`), NameBuiltin, nil}, - {`[\[\](){}<>;,]`, Punctuation, nil}, - {`[-+*/=]`, Operator, nil}, - {`\b(x|y|z|u|v)\b`, NameBuiltinPseudo, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`[0-9]+\.[0-9]*`, LiteralNumberFloat, nil}, - {`\.[0-9]+`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`\s+`, Text, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go b/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go deleted file mode 100644 index 0302420ea94a..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go +++ /dev/null @@ -1,38 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// PowerQuery lexer. -var PowerQuery = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PowerQuery", - Aliases: []string{"powerquery", "pq"}, - Filenames: []string{"*.pq"}, - MimeTypes: []string{"text/x-powerquery"}, - DotAll: true, - CaseInsensitive: true, - }, - powerqueryRules, -)) - -func powerqueryRules() Rules { - return Rules{ - "root": { - {`\s+`, Text, nil}, - {`//.*?\n`, CommentSingle, nil}, - {`/\*.*?\*/`, CommentMultiline, nil}, - {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, - {`(and|as|each|else|error|false|if|in|is|let|meta|not|null|or|otherwise|section|shared|then|true|try|type)\b`, Keyword, nil}, - {`(#binary|#date|#datetime|#datetimezone|#duration|#infinity|#nan|#sections|#shared|#table|#time)\b`, KeywordType, nil}, - {`(([a-zA-Z]|_)[\w|._]*|#"[^"]+")`, Name, nil}, - {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, - {`([0-9]+\.[0-9]+|\.[0-9]+)([eE][0-9]+)?`, LiteralNumberFloat, nil}, - {`[0-9]+`, LiteralNumberInteger, nil}, - {`[\(\)\[\]\{\}]`, Punctuation, nil}, - {`\.\.|\.\.\.|=>|<=|>=|<>|[@!?,;=<>\+\-\*\/&]`, Operator, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go b/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go deleted file mode 100644 index 9b7b56ee0208..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go +++ /dev/null @@ -1,70 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Powershell lexer. -var Powershell = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PowerShell", - Aliases: []string{"powershell", "posh", "ps1", "psm1", "psd1"}, - Filenames: []string{"*.ps1", "*.psm1", "*.psd1"}, - MimeTypes: []string{"text/x-powershell"}, - DotAll: true, - CaseInsensitive: true, - }, - powershellRules, -)) - -func powershellRules() Rules { - return Rules{ - "root": { - {`\(`, Punctuation, Push("child")}, - {`\s+`, Text, nil}, - {`^(\s*#[#\s]*)(\.(?:component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis))([^\n]*$)`, ByGroups(Comment, LiteralStringDoc, Comment), nil}, - {`#[^\n]*?$`, Comment, nil}, - {`(<|<)#`, CommentMultiline, Push("multline")}, - {`(?i)([A-Z]:)`, Name, nil}, - {`@"\n`, LiteralStringHeredoc, Push("heredoc-double")}, - {`@'\n.*?\n'@`, LiteralStringHeredoc, nil}, - {"`[\\'\"$@-]", Punctuation, nil}, - {`"`, LiteralStringDouble, Push("string")}, - {`'([^']|'')*'`, LiteralStringSingle, nil}, - {`(\$|@@|@)((global|script|private|env):)?\w+`, NameVariable, nil}, - {`[a-z]\w*-[a-z]\w*\b`, NameBuiltin, nil}, - {`(while|validateset|validaterange|validatepattern|validatelength|validatecount|until|trap|switch|return|ref|process|param|parameter|in|if|global:|function|foreach|for|finally|filter|end|elseif|else|dynamicparam|do|default|continue|cmdletbinding|break|begin|alias|\?|%|#script|#private|#local|#global|mandatory|parametersetname|position|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|helpmessage|try|catch|throw)\b`, Keyword, nil}, - {`-(and|as|band|bnot|bor|bxor|casesensitive|ccontains|ceq|cge|cgt|cle|clike|clt|cmatch|cne|cnotcontains|cnotlike|cnotmatch|contains|creplace|eq|exact|f|file|ge|gt|icontains|ieq|ige|igt|ile|ilike|ilt|imatch|ine|inotcontains|inotlike|inotmatch|ireplace|is|isnot|le|like|lt|match|ne|not|notcontains|notlike|notmatch|or|regex|replace|wildcard)\b`, Operator, nil}, - {`(ac|asnp|cat|cd|cfs|chdir|clc|clear|clhy|cli|clp|cls|clv|cnsn|compare|copy|cp|cpi|cpp|curl|cvpa|dbp|del|diff|dir|dnsn|ebp|echo|epal|epcsv|epsn|erase|etsn|exsn|fc|fhx|fl|foreach|ft|fw|gal|gbp|gc|gci|gcm|gcs|gdr|ghy|gi|gjb|gl|gm|gmo|gp|gps|gpv|group|gsn|gsnp|gsv|gu|gv|gwmi|h|history|icm|iex|ihy|ii|ipal|ipcsv|ipmo|ipsn|irm|ise|iwmi|iwr|kill|lp|ls|man|md|measure|mi|mount|move|mp|mv|nal|ndr|ni|nmo|npssc|nsn|nv|ogv|oh|popd|ps|pushd|pwd|r|rbp|rcjb|rcsn|rd|rdr|ren|ri|rjb|rm|rmdir|rmo|rni|rnp|rp|rsn|rsnp|rujb|rv|rvpa|rwmi|sajb|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spjb|spps|spsv|start|sujb|sv|swmi|tee|trcm|type|wget|where|wjb|write)\s`, NameBuiltin, nil}, - {"\\[[a-z_\\[][\\w. `,\\[\\]]*\\]", NameConstant, nil}, - {`-[a-z_]\w*`, Name, nil}, - {`\w+`, Name, nil}, - {"[.,;@{}\\[\\]$()=+*/\\\\&%!~?^`|<>-]|::", Punctuation, nil}, - }, - "child": { - {`\)`, Punctuation, Pop(1)}, - Include("root"), - }, - "multline": { - {`[^#&.]+`, CommentMultiline, nil}, - {`#(>|>)`, CommentMultiline, Pop(1)}, - {`\.(component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis)`, LiteralStringDoc, nil}, - {`[#&.]`, CommentMultiline, nil}, - }, - "string": { - {"`[0abfnrtv'\\\"$`]", LiteralStringEscape, nil}, - {"[^$`\"]+", LiteralStringDouble, nil}, - {`\$\(`, Punctuation, Push("child")}, - {`""`, LiteralStringDouble, nil}, - {"[`$]", LiteralStringDouble, nil}, - {`"`, LiteralStringDouble, Pop(1)}, - }, - "heredoc-double": { - {`\n"@`, LiteralStringHeredoc, Pop(1)}, - {`\$\(`, Punctuation, Push("child")}, - {`[^@\n]+"]`, LiteralStringHeredoc, nil}, - {`.`, LiteralStringHeredoc, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go b/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go deleted file mode 100644 index a2f346b6263f..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go +++ /dev/null @@ -1,54 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Prolog lexer. -var Prolog = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Prolog", - Aliases: []string{"prolog"}, - Filenames: []string{"*.ecl", "*.prolog", "*.pro", "*.pl"}, - MimeTypes: []string{"text/x-prolog"}, - }, - prologRules, -)) - -func prologRules() Rules { - return Rules{ - "root": { - {`/\*`, CommentMultiline, Push("nested-comment")}, - {`%.*`, CommentSingle, nil}, - {`0\'.`, LiteralStringChar, nil}, - {`0b[01]+`, LiteralNumberBin, nil}, - {`0o[0-7]+`, LiteralNumberOct, nil}, - {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, - {`\d\d?\'[a-zA-Z0-9]+`, LiteralNumberInteger, nil}, - {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`\d+`, LiteralNumberInteger, nil}, - {`[\[\](){}|.,;!]`, Punctuation, nil}, - {`:-|-->`, Punctuation, nil}, - {`"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"`, LiteralStringDouble, nil}, - {`'(?:''|[^'])*'`, LiteralStringAtom, nil}, - {`is\b`, Operator, nil}, - {`(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])`, Operator, nil}, - {`(mod|div|not)\b`, Operator, nil}, - {`_`, Keyword, nil}, - {`([a-z]+)(:)`, ByGroups(NameNamespace, Punctuation), nil}, - {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(:-|-->)`, ByGroups(NameFunction, Text, Operator), nil}, - {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, - {`[a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*`, LiteralStringAtom, nil}, - {`[#&*+\-./:<=>?@\\^~¡-¿‐-〿]+`, LiteralStringAtom, nil}, - {`[A-Z_]\w*`, NameVariable, nil}, - {`\s+|[ -‏￰-￾￯]`, Text, nil}, - }, - "nested-comment": { - {`\*/`, CommentMultiline, Pop(1)}, - {`/\*`, CommentMultiline, Push()}, - {`[^*/]+`, CommentMultiline, nil}, - {`[*/]`, CommentMultiline, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/promql.go b/vendor/github.com/alecthomas/chroma/lexers/p/promql.go deleted file mode 100644 index e80b926c2edb..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/promql.go +++ /dev/null @@ -1,59 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Promql lexer. -var Promql = internal.Register(MustNewLazyLexer( - &Config{ - Name: "PromQL", - Aliases: []string{"promql"}, - Filenames: []string{"*.promql"}, - MimeTypes: []string{}, - }, - promqlRules, -)) - -func promqlRules() Rules { - return Rules{ - "root": { - {`\n`, TextWhitespace, nil}, - {`\s+`, TextWhitespace, nil}, - {`,`, Punctuation, nil}, - {Words(``, `\b`, `bool`, `by`, `group_left`, `group_right`, `ignoring`, `offset`, `on`, `without`), Keyword, nil}, - {Words(``, `\b`, `sum`, `min`, `max`, `avg`, `group`, `stddev`, `stdvar`, `count`, `count_values`, `bottomk`, `topk`, `quantile`), Keyword, nil}, - {Words(``, `\b`, `abs`, `absent`, `absent_over_time`, `avg_over_time`, `ceil`, `changes`, `clamp_max`, `clamp_min`, `count_over_time`, `day_of_month`, `day_of_week`, `days_in_month`, `delta`, `deriv`, `exp`, `floor`, `histogram_quantile`, `holt_winters`, `hour`, `idelta`, `increase`, `irate`, `label_join`, `label_replace`, `ln`, `log10`, `log2`, `max_over_time`, `min_over_time`, `minute`, `month`, `predict_linear`, `quantile_over_time`, `rate`, `resets`, `round`, `scalar`, `sort`, `sort_desc`, `sqrt`, `stddev_over_time`, `stdvar_over_time`, `sum_over_time`, `time`, `timestamp`, `vector`, `year`), KeywordReserved, nil}, - {`[1-9][0-9]*[smhdwy]`, LiteralString, nil}, - {`-?[0-9]+\.[0-9]+`, LiteralNumberFloat, nil}, - {`-?[0-9]+`, LiteralNumberInteger, nil}, - {`#.*?$`, CommentSingle, nil}, - {`(\+|\-|\*|\/|\%|\^)`, Operator, nil}, - {`==|!=|>=|<=|<|>`, Operator, nil}, - {`and|or|unless`, OperatorWord, nil}, - {`[_a-zA-Z][a-zA-Z0-9_]+`, NameVariable, nil}, - {`(["\'])(.*?)(["\'])`, ByGroups(Punctuation, LiteralString, Punctuation), nil}, - {`\(`, Operator, Push("function")}, - {`\)`, Operator, nil}, - {`\{`, Punctuation, Push("labels")}, - {`\[`, Punctuation, Push("range")}, - }, - "labels": { - {`\}`, Punctuation, Pop(1)}, - {`\n`, TextWhitespace, nil}, - {`\s+`, TextWhitespace, nil}, - {`,`, Punctuation, nil}, - {`([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|')(.*?)("|')`, ByGroups(NameLabel, TextWhitespace, Operator, TextWhitespace, Punctuation, LiteralString, Punctuation), nil}, - }, - "range": { - {`\]`, Punctuation, Pop(1)}, - {`[1-9][0-9]*[smhdwy]`, LiteralString, nil}, - }, - "function": { - {`\)`, Operator, Pop(1)}, - {`\(`, Operator, Push()}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go b/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go deleted file mode 100644 index 76576cb3501d..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go +++ /dev/null @@ -1,57 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// ProtocolBuffer lexer. -var ProtocolBuffer = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Protocol Buffer", - Aliases: []string{"protobuf", "proto"}, - Filenames: []string{"*.proto"}, - MimeTypes: []string{}, - }, - protocolBufferRules, -)) - -func protocolBufferRules() Rules { - return Rules{ - "root": { - {`[ \t]+`, Text, nil}, - {`[,;{}\[\]()<>]`, Punctuation, nil}, - {`/(\\\n)?/(\n|(.|\n)*?[^\\]\n)`, CommentSingle, nil}, - {`/(\\\n)?\*(.|\n)*?\*(\\\n)?/`, CommentMultiline, nil}, - {Words(`\b`, `\b`, `import`, `option`, `optional`, `required`, `repeated`, `default`, `packed`, `ctype`, `extensions`, `to`, `max`, `rpc`, `returns`, `oneof`), Keyword, nil}, - {Words(``, `\b`, `int32`, `int64`, `uint32`, `uint64`, `sint32`, `sint64`, `fixed32`, `fixed64`, `sfixed32`, `sfixed64`, `float`, `double`, `bool`, `string`, `bytes`), KeywordType, nil}, - {`(true|false)\b`, KeywordConstant, nil}, - {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("package")}, - {`(message|extend)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("message")}, - {`(enum|group|service)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("type")}, - {`\".*?\"`, LiteralString, nil}, - {`\'.*?\'`, LiteralString, nil}, - {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, - {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, - {`(\-?(inf|nan))\b`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, - {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, - {`\d+[LlUu]*`, LiteralNumberInteger, nil}, - {`[+-=]`, Operator, nil}, - {`([a-zA-Z_][\w.]*)([ \t]*)(=)`, ByGroups(Name, Text, Operator), nil}, - {`[a-zA-Z_][\w.]*`, Name, nil}, - }, - "package": { - {`[a-zA-Z_]\w*`, NameNamespace, Pop(1)}, - Default(Pop(1)), - }, - "message": { - {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, - Default(Pop(1)), - }, - "type": { - {`[a-zA-Z_]\w*`, Name, Pop(1)}, - Default(Pop(1)), - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go b/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go deleted file mode 100644 index b32caed09be9..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go +++ /dev/null @@ -1,60 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Puppet lexer. -var Puppet = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Puppet", - Aliases: []string{"puppet"}, - Filenames: []string{"*.pp"}, - MimeTypes: []string{}, - }, - puppetRules, -)) - -func puppetRules() Rules { - return Rules{ - "root": { - Include("comments"), - Include("keywords"), - Include("names"), - Include("numbers"), - Include("operators"), - Include("strings"), - {`[]{}:(),;[]`, Punctuation, nil}, - {`[^\S\n]+`, Text, nil}, - }, - "comments": { - {`\s*#.*$`, Comment, nil}, - {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, - }, - "operators": { - {`(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)`, Operator, nil}, - {`(in|and|or|not)\b`, OperatorWord, nil}, - }, - "names": { - {`[a-zA-Z_]\w*`, NameAttribute, nil}, - {`(\$\S+)(\[)(\S+)(\])`, ByGroups(NameVariable, Punctuation, LiteralString, Punctuation), nil}, - {`\$\S+`, NameVariable, nil}, - }, - "numbers": { - {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?`, LiteralNumberFloat, nil}, - {`\d+[eE][+-]?[0-9]+j?`, LiteralNumberFloat, nil}, - {`0[0-7]+j?`, LiteralNumberOct, nil}, - {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, - {`\d+L`, LiteralNumberIntegerLong, nil}, - {`\d+j?`, LiteralNumberInteger, nil}, - }, - "keywords": { - {Words(`(?i)`, `\b`, `absent`, `alert`, `alias`, `audit`, `augeas`, `before`, `case`, `check`, `class`, `computer`, `configured`, `contained`, `create_resources`, `crit`, `cron`, `debug`, `default`, `define`, `defined`, `directory`, `else`, `elsif`, `emerg`, `err`, `exec`, `extlookup`, `fail`, `false`, `file`, `filebucket`, `fqdn_rand`, `generate`, `host`, `if`, `import`, `include`, `info`, `inherits`, `inline_template`, `installed`, `interface`, `k5login`, `latest`, `link`, `loglevel`, `macauthorization`, `mailalias`, `maillist`, `mcx`, `md5`, `mount`, `mounted`, `nagios_command`, `nagios_contact`, `nagios_contactgroup`, `nagios_host`, `nagios_hostdependency`, `nagios_hostescalation`, `nagios_hostextinfo`, `nagios_hostgroup`, `nagios_service`, `nagios_servicedependency`, `nagios_serviceescalation`, `nagios_serviceextinfo`, `nagios_servicegroup`, `nagios_timeperiod`, `node`, `noop`, `notice`, `notify`, `package`, `present`, `purged`, `realize`, `regsubst`, `resources`, `role`, `router`, `running`, `schedule`, `scheduled_task`, `search`, `selboolean`, `selmodule`, `service`, `sha1`, `shellquote`, `split`, `sprintf`, `ssh_authorized_key`, `sshkey`, `stage`, `stopped`, `subscribe`, `tag`, `tagged`, `template`, `tidy`, `true`, `undef`, `unmounted`, `user`, `versioncmp`, `vlan`, `warning`, `yumrepo`, `zfs`, `zone`, `zpool`), Keyword, nil}, - }, - "strings": { - {`"([^"])*"`, LiteralString, nil}, - {`'(\\'|[^'])*'`, LiteralString, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/python.go b/vendor/github.com/alecthomas/chroma/lexers/p/python.go deleted file mode 100644 index ad10a322ed04..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/python.go +++ /dev/null @@ -1,211 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Python lexer. -var Python = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Python", - Aliases: []string{"python", "py", "sage", "python3", "py3"}, - Filenames: []string{"*.py", "*.pyi", "*.pyw", "*.jy", "*.sage", "*.sc", "SConstruct", "SConscript", "*.bzl", "BUCK", "BUILD", "BUILD.bazel", "WORKSPACE", "*.tac"}, - MimeTypes: []string{"text/x-python", "application/x-python", "text/x-python3", "application/x-python3"}, - }, - pythonRules, -)) - -func pythonRules() Rules { - const pythonIdentifier = `[_\p{L}][_\p{L}\p{N}]*` - - return Rules{ - "root": { - {`\n`, Text, nil}, - {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`\A#!.+$`, CommentHashbang, nil}, - {`#.*$`, CommentSingle, nil}, - {`\\\n`, Text, nil}, - {`\\`, Text, nil}, - Include("keywords"), - {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, - {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, - {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, - {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - Include("expr"), - }, - "expr": { - {`(?i)(rf|fr)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "tdqf")}, - {`(?i)(rf|fr)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "tsqf")}, - {`(?i)(rf|fr)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "dqf")}, - {`(?i)(rf|fr)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "sqf")}, - {`([fF])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "tdqf")}, - {`([fF])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "tsqf")}, - {`([fF])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "dqf")}, - {`([fF])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "sqf")}, - {`(?i)(rb|br|r)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, - {`(?i)(rb|br|r)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, - {`(?i)(rb|br|r)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, - {`(?i)(rb|br|r)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, - {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, - {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, - {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, - {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, - {`[^\S\n]+`, Text, nil}, - Include("numbers"), - {`!=|==|<<|>>|:=|[-~+/*%=<>&^|.]`, Operator, nil}, - {`[]{}:(),;[]`, Punctuation, nil}, - {`(in|is|and|or|not)\b`, OperatorWord, nil}, - Include("expr-keywords"), - Include("builtins"), - Include("magicfuncs"), - Include("magicvars"), - Include("name"), - }, - "expr-inside-fstring": { - {`[{([]`, Punctuation, Push("expr-inside-fstring-inner")}, - {`(=\s*)?(\![sraf])?\}`, LiteralStringInterpol, Pop(1)}, - {`(=\s*)?(\![sraf])?:`, LiteralStringInterpol, Pop(1)}, - {`\s+`, Text, nil}, - Include("expr"), - }, - "expr-inside-fstring-inner": { - {`[{([]`, Punctuation, Push("expr-inside-fstring-inner")}, - {`[])}]`, Punctuation, Pop(1)}, - {`\s+`, Text, nil}, - Include("expr"), - }, - "expr-keywords": { - {Words(``, `\b`, `async for`, `await`, `else`, `for`, `if`, `lambda`, `yield`, `yield from`), Keyword, nil}, - {Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil}, - }, - "keywords": { - {Words(``, `\b`, `assert`, `async`, `await`, `break`, `continue`, `del`, `elif`, `else`, `except`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `raise`, `nonlocal`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, - {Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil}, - }, - "builtins": { - {Words(`(?=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, - {`[^\\\'"%{\n]+`, LiteralStringSingle, nil}, - {`[\'"\\]`, LiteralStringSingle, nil}, - {`%|(\{{1,2})`, LiteralStringSingle, nil}, - }, - "strings-double": { - {`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsaux%]`, LiteralStringInterpol, nil}, - {`\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\![sra])?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, - {`[^\\\'"%{\n]+`, LiteralStringDouble, nil}, - {`[\'"\\]`, LiteralStringDouble, nil}, - {`%|(\{{1,2})`, LiteralStringDouble, nil}, - }, - "dqf": { - {`"`, LiteralStringDouble, Pop(1)}, - {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, - Include("fstrings-double"), - }, - "sqf": { - {`'`, LiteralStringSingle, Pop(1)}, - {`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, - Include("fstrings-single"), - }, - "dqs": { - {`"`, LiteralStringDouble, Pop(1)}, - {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, - Include("strings-double"), - }, - "sqs": { - {`'`, LiteralStringSingle, Pop(1)}, - {`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, - Include("strings-single"), - }, - "tdqf": { - {`"""`, LiteralStringDouble, Pop(1)}, - Include("fstrings-double"), - {`\n`, LiteralStringDouble, nil}, - }, - "tsqf": { - {`'''`, LiteralStringSingle, Pop(1)}, - Include("fstrings-single"), - {`\n`, LiteralStringSingle, nil}, - }, - "tdqs": { - {`"""`, LiteralStringDouble, Pop(1)}, - Include("strings-double"), - {`\n`, LiteralStringDouble, nil}, - }, - "tsqs": { - {`'''`, LiteralStringSingle, Pop(1)}, - Include("strings-single"), - {`\n`, LiteralStringSingle, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/python2.go b/vendor/github.com/alecthomas/chroma/lexers/p/python2.go deleted file mode 100644 index f1f60360b278..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/p/python2.go +++ /dev/null @@ -1,141 +0,0 @@ -package p - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Python2 lexer. -var Python2 = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Python 2", - Aliases: []string{"python2", "py2"}, - Filenames: []string{}, - MimeTypes: []string{"text/x-python2", "application/x-python2"}, - }, - python2Rules, -)) - -func python2Rules() Rules { - return Rules{ - "root": { - {`\n`, Text, nil}, - {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, - {`[^\S\n]+`, Text, nil}, - {`\A#!.+$`, CommentHashbang, nil}, - {`#.*$`, CommentSingle, nil}, - {`[]{}:(),;[]`, Punctuation, nil}, - {`\\\n`, Text, nil}, - {`\\`, Text, nil}, - {`(in|is|and|or|not)\b`, OperatorWord, nil}, - {`!=|==|<<|>>|[-~+/*%=<>&^|.]`, Operator, nil}, - Include("keywords"), - {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, - {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, - {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, - {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, - Include("builtins"), - Include("magicfuncs"), - Include("magicvars"), - Include("backtick"), - {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, - {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, - {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, - {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, - {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, - {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, - Include("name"), - Include("numbers"), - }, - "keywords": { - {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, - }, - "builtins": { - {Words(`(?>|[-~+/\\*%=<>&^|?:!.]`, Operator, nil}, - {`[\[\]{}(),;]`, Punctuation, nil}, - {`[\w]+`, NameVariableGlobal, nil}, - }, - "declarations": { - {`\b(DATA|LET)(?=\(|\b)`, KeywordDeclaration, nil}, - }, - "functions": { - {`\b(ABS|ASC|ATN|CDBL|CHR\$|CINT|CLNG|COMMAND\$|COS|CSNG|CSRLIN|CVD|CVDMBF|CVI|CVL|CVS|CVSMBF|DATE\$|ENVIRON\$|EOF|ERDEV|ERDEV\$|ERL|ERR|EXP|FILEATTR|FIX|FRE|FREEFILE|HEX\$|INKEY\$|INP|INPUT\$|INSTR|INT|IOCTL\$|LBOUND|LCASE\$|LEFT\$|LEN|LOC|LOF|LOG|LPOS|LTRIM\$|MID\$|MKD\$|MKDMBF\$|MKI\$|MKL\$|MKS\$|MKSMBF\$|OCT\$|PEEK|PEN|PLAY|PMAP|POINT|POS|RIGHT\$|RND|RTRIM\$|SADD|SCREEN|SEEK|SETMEM|SGN|SIN|SPACE\$|SPC|SQR|STICK|STR\$|STRIG|STRING\$|TAB|TAN|TIME\$|TIMER|UBOUND|UCASE\$|VAL|VARPTR|VARPTR\$|VARSEG)(?=\(|\b)`, KeywordReserved, nil}, - }, - "metacommands": { - {`\b(\$DYNAMIC|\$INCLUDE|\$STATIC)(?=\(|\b)`, KeywordConstant, nil}, - }, - "operators": { - {`\b(AND|EQV|IMP|NOT|OR|XOR)(?=\(|\b)`, OperatorWord, nil}, - }, - "statements": { - {`\b(BEEP|BLOAD|BSAVE|CALL|CALL\ ABSOLUTE|CALL\ INTERRUPT|CALLS|CHAIN|CHDIR|CIRCLE|CLEAR|CLOSE|CLS|COLOR|COM|COMMON|CONST|DATA|DATE\$|DECLARE|DEF\ FN|DEF\ SEG|DEFDBL|DEFINT|DEFLNG|DEFSNG|DEFSTR|DEF|DIM|DO|LOOP|DRAW|END|ENVIRON|ERASE|ERROR|EXIT|FIELD|FILES|FOR|NEXT|FUNCTION|GET|GOSUB|GOTO|IF|THEN|INPUT|INPUT\ \#|IOCTL|KEY|KEY|KILL|LET|LINE|LINE\ INPUT|LINE\ INPUT\ \#|LOCATE|LOCK|UNLOCK|LPRINT|LSET|MID\$|MKDIR|NAME|ON\ COM|ON\ ERROR|ON\ KEY|ON\ PEN|ON\ PLAY|ON\ STRIG|ON\ TIMER|ON\ UEVENT|ON|OPEN|OPEN\ COM|OPTION\ BASE|OUT|PAINT|PALETTE|PCOPY|PEN|PLAY|POKE|PRESET|PRINT|PRINT\ \#|PRINT\ USING|PSET|PUT|PUT|RANDOMIZE|READ|REDIM|REM|RESET|RESTORE|RESUME|RETURN|RMDIR|RSET|RUN|SCREEN|SEEK|SELECT\ CASE|SHARED|SHELL|SLEEP|SOUND|STATIC|STOP|STRIG|SUB|SWAP|SYSTEM|TIME\$|TIMER|TROFF|TRON|TYPE|UEVENT|UNLOCK|VIEW|WAIT|WHILE|WEND|WIDTH|WINDOW|WRITE)\b`, KeywordReserved, nil}, - }, - "keywords": { - {`\b(ACCESS|ALIAS|ANY|APPEND|AS|BASE|BINARY|BYVAL|CASE|CDECL|DOUBLE|ELSE|ELSEIF|ENDIF|INTEGER|IS|LIST|LOCAL|LONG|LOOP|MOD|NEXT|OFF|ON|OUTPUT|RANDOM|SIGNAL|SINGLE|STEP|STRING|THEN|TO|UNTIL|USING|WEND)\b`, Keyword, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/q/qml.go b/vendor/github.com/alecthomas/chroma/lexers/q/qml.go deleted file mode 100644 index dfd3c75374b3..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/q/qml.go +++ /dev/null @@ -1,58 +0,0 @@ -package q - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Qml lexer. -var Qml = internal.Register(MustNewLazyLexer( - &Config{ - Name: "QML", - Aliases: []string{"qml", "qbs"}, - Filenames: []string{"*.qml", "*.qbs"}, - MimeTypes: []string{"application/x-qml", "application/x-qt.qbs+qml"}, - DotAll: true, - }, - qmlRules, -)) - -func qmlRules() Rules { - return Rules{ - "commentsandwhitespace": { - {`\s+`, Text, nil}, - {``, Comment, Pop(1)}, - {`-`, Comment, nil}, - }, - "tag": { - {`\s+`, Text, nil}, - {`[\w.:-]+\s*=`, NameAttribute, Push("attr")}, - {`/?\s*>`, NameTag, Pop(1)}, - }, - "attr": { - {`\s+`, Text, nil}, - {`".*?"`, LiteralString, Pop(1)}, - {`'.*?'`, LiteralString, Pop(1)}, - {`[^\s>]+`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go b/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go deleted file mode 100644 index 7936a51ab2ff..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go +++ /dev/null @@ -1,29 +0,0 @@ -package x - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Xorg lexer. -var Xorg = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Xorg", - Aliases: []string{"xorg.conf"}, - Filenames: []string{"xorg.conf"}, - MimeTypes: []string{}, - }, - xorgRules, -)) - -func xorgRules() Rules { - return Rules{ - "root": { - {`\s+`, TextWhitespace, nil}, - {`#.*$`, Comment, nil}, - {`((|Sub)Section)(\s+)("\w+")`, ByGroups(KeywordNamespace, LiteralStringEscape, TextWhitespace, LiteralStringEscape), nil}, - {`(End(|Sub)Section)`, KeywordNamespace, nil}, - {`(\w+)(\s+)([^\n#]+)`, ByGroups(NameKeyword, TextWhitespace, LiteralString), nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go b/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go deleted file mode 100644 index 488f760049f7..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go +++ /dev/null @@ -1,58 +0,0 @@ -package y - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var YAML = internal.Register(MustNewLazyLexer( - &Config{ - Name: "YAML", - Aliases: []string{"yaml"}, - Filenames: []string{"*.yaml", "*.yml"}, - MimeTypes: []string{"text/x-yaml"}, - }, - yamlRules, -)) - -func yamlRules() Rules { - return Rules{ - "root": { - Include("whitespace"), - {`^---`, NameNamespace, nil}, - {`^\.\.\.`, NameNamespace, nil}, - {`[\n?]?\s*- `, Text, nil}, - {`#.*$`, Comment, nil}, - {`!![^\s]+`, CommentPreproc, nil}, - {`&[^\s]+`, CommentPreproc, nil}, - {`\*[^\s]+`, CommentPreproc, nil}, - {`^%include\s+[^\n\r]+`, CommentPreproc, nil}, - Include("key"), - Include("value"), - {`[?:,\[\]]`, Punctuation, nil}, - {`.`, Text, nil}, - }, - "value": { - {`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil}, - {Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null", - "y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO", - "on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil}, - {`"(?:\\.|[^"])*"`, StringDouble, nil}, - {`'(?:\\.|[^'])*'`, StringSingle, nil}, - {`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil}, - {`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil}, - {`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil}, - {`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil}, - }, - "key": { - {`"[^"\n].*": `, NameTag, nil}, - {`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil}, - {`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil}, - {`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil}, - }, - "whitespace": { - {`\s+`, Whitespace, nil}, - {`\n+`, Whitespace, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/y/yang.go b/vendor/github.com/alecthomas/chroma/lexers/y/yang.go deleted file mode 100644 index 36349eb9fb66..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/y/yang.go +++ /dev/null @@ -1,71 +0,0 @@ -package y - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -var YANG = internal.Register(MustNewLazyLexer( - &Config{ - Name: "YANG", - Aliases: []string{"yang"}, - Filenames: []string{"*.yang"}, - MimeTypes: []string{"application/yang"}, - }, - yangRules, -)) - -func yangRules() Rules { - return Rules{ - "root": { - {`\s+`, Whitespace, nil}, - {`[\{\}\;]+`, Punctuation, nil}, - {`(?<|^!?/\-*&~:]`, Operator, nil}, - {`[{}()\[\],.;]`, Punctuation, nil}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/lexers/z/zig.go b/vendor/github.com/alecthomas/chroma/lexers/z/zig.go deleted file mode 100644 index 56f54fd989f9..000000000000 --- a/vendor/github.com/alecthomas/chroma/lexers/z/zig.go +++ /dev/null @@ -1,58 +0,0 @@ -package z - -import ( - . "github.com/alecthomas/chroma" // nolint - "github.com/alecthomas/chroma/lexers/internal" -) - -// Zig lexer. -var Zig = internal.Register(MustNewLazyLexer( - &Config{ - Name: "Zig", - Aliases: []string{"zig"}, - Filenames: []string{"*.zig"}, - MimeTypes: []string{"text/zig"}, - }, - zigRules, -)) - -func zigRules() Rules { - return Rules{ - "root": { - {`\n`, TextWhitespace, nil}, - {`\s+`, TextWhitespace, nil}, - {`//.*?\n`, CommentSingle, nil}, - {Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil}, - {Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil}, - {Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil}, - {Words(``, `\b`, `while`, `for`), Keyword, nil}, - {Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil}, - {Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil}, - {Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil}, - {Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil}, - {`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil}, - {`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil}, - {`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, - {`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil}, - {`0b(?:_?[01])+`, LiteralNumberBin, nil}, - {`0o(?:_?[0-7])+`, LiteralNumberOct, nil}, - {`0x(?:_?[0-9a-fA-F])+`, LiteralNumberHex, nil}, - {`(?:_?[0-9])+`, LiteralNumberInteger, nil}, - {`@[a-zA-Z_]\w*`, NameBuiltin, nil}, - {`[a-zA-Z_]\w*`, Name, nil}, - {`\'\\\'\'`, LiteralStringEscape, nil}, - {`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil}, - {`\'[^\\\']\'`, LiteralString, nil}, - {`\\\\[^\n]*`, LiteralStringHeredoc, nil}, - {`c\\\\[^\n]*`, LiteralStringHeredoc, nil}, - {`c?"`, LiteralString, Push("string")}, - {`[+%=><|^!?/\-*&~:]`, Operator, nil}, - {`[{}()\[\],.;]`, Punctuation, nil}, - }, - "string": { - {`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil}, - {`[^\\"\n]+`, LiteralString, nil}, - {`"`, LiteralString, Pop(1)}, - }, - } -} diff --git a/vendor/github.com/alecthomas/chroma/mutators.go b/vendor/github.com/alecthomas/chroma/mutators.go deleted file mode 100644 index bd6d720e0fd9..000000000000 --- a/vendor/github.com/alecthomas/chroma/mutators.go +++ /dev/null @@ -1,131 +0,0 @@ -package chroma - -import ( - "fmt" - "strings" -) - -// A Mutator modifies the behaviour of the lexer. -type Mutator interface { - // Mutate the lexer state machine as it is processing. - Mutate(state *LexerState) error -} - -// A LexerMutator is an additional interface that a Mutator can implement -// to modify the lexer when it is compiled. -type LexerMutator interface { - // Rules are the lexer rules, state is the state key for the rule the mutator is associated with. - MutateLexer(rules CompiledRules, state string, rule int) error -} - -// A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing. -type MutatorFunc func(state *LexerState) error - -func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) } // nolint - -// Mutators applies a set of Mutators in order. -func Mutators(modifiers ...Mutator) MutatorFunc { - return func(state *LexerState) error { - for _, modifier := range modifiers { - if err := modifier.Mutate(state); err != nil { - return err - } - } - return nil - } -} - -type includeMutator struct { - state string -} - -// Include the given state. -func Include(state string) Rule { - return Rule{Mutator: &includeMutator{state}} -} - -func (i *includeMutator) Mutate(s *LexerState) error { - return fmt.Errorf("should never reach here Include(%q)", i.state) -} - -func (i *includeMutator) MutateLexer(rules CompiledRules, state string, rule int) error { - includedRules, ok := rules[i.state] - if !ok { - return fmt.Errorf("invalid include state %q", i.state) - } - rules[state] = append(rules[state][:rule], append(includedRules, rules[state][rule+1:]...)...) - return nil -} - -type combinedMutator struct { - states []string -} - -// Combined creates a new anonymous state from the given states, and pushes that state. -func Combined(states ...string) Mutator { - return &combinedMutator{states} -} - -func (c *combinedMutator) Mutate(s *LexerState) error { - return fmt.Errorf("should never reach here Combined(%v)", c.states) -} - -func (c *combinedMutator) MutateLexer(rules CompiledRules, state string, rule int) error { - name := "__combined_" + strings.Join(c.states, "__") - if _, ok := rules[name]; !ok { - combined := []*CompiledRule{} - for _, state := range c.states { - rules, ok := rules[state] - if !ok { - return fmt.Errorf("invalid combine state %q", state) - } - combined = append(combined, rules...) - } - rules[name] = combined - } - rules[state][rule].Mutator = Push(name) - return nil -} - -// Push states onto the stack. -func Push(states ...string) MutatorFunc { - return func(s *LexerState) error { - if len(states) == 0 { - s.Stack = append(s.Stack, s.State) - } else { - for _, state := range states { - if state == "#pop" { - s.Stack = s.Stack[:len(s.Stack)-1] - } else { - s.Stack = append(s.Stack, state) - } - } - } - return nil - } -} - -// Pop state from the stack when rule matches. -func Pop(n int) MutatorFunc { - return func(state *LexerState) error { - if len(state.Stack) == 0 { - return fmt.Errorf("nothing to pop") - } - state.Stack = state.Stack[:len(state.Stack)-n] - return nil - } -} - -// Default returns a Rule that applies a set of Mutators. -func Default(mutators ...Mutator) Rule { - return Rule{Mutator: Mutators(mutators...)} -} - -// Stringify returns the raw string for a set of tokens. -func Stringify(tokens ...Token) string { - out := []string{} - for _, t := range tokens { - out = append(out, t.Value) - } - return strings.Join(out, "") -} diff --git a/vendor/github.com/alecthomas/chroma/styles/abap.go b/vendor/github.com/alecthomas/chroma/styles/abap.go deleted file mode 100644 index b6d07fb271fa..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/abap.go +++ /dev/null @@ -1,18 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Abap style. -var Abap = Register(chroma.MustNewStyle("abap", chroma.StyleEntries{ - chroma.Comment: "italic #888", - chroma.CommentSpecial: "#888", - chroma.Keyword: "#00f", - chroma.OperatorWord: "#00f", - chroma.Name: "#000", - chroma.LiteralNumber: "#3af", - chroma.LiteralString: "#5a2", - chroma.Error: "#F00", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol.go b/vendor/github.com/alecthomas/chroma/styles/algol.go deleted file mode 100644 index 1e8a7b442094..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/algol.go +++ /dev/null @@ -1,25 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Algol style. -var Algol = Register(chroma.MustNewStyle("algol", chroma.StyleEntries{ - chroma.Comment: "italic #888", - chroma.CommentPreproc: "bold noitalic #888", - chroma.CommentSpecial: "bold noitalic #888", - chroma.Keyword: "underline bold", - chroma.KeywordDeclaration: "italic", - chroma.NameBuiltin: "bold italic", - chroma.NameBuiltinPseudo: "bold italic", - chroma.NameNamespace: "bold italic #666", - chroma.NameClass: "bold italic #666", - chroma.NameFunction: "bold italic #666", - chroma.NameVariable: "bold italic #666", - chroma.NameConstant: "bold italic #666", - chroma.OperatorWord: "bold", - chroma.LiteralString: "italic #666", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol_nu.go b/vendor/github.com/alecthomas/chroma/styles/algol_nu.go deleted file mode 100644 index f8c6f177e5ca..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/algol_nu.go +++ /dev/null @@ -1,25 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// AlgolNu style. -var AlgolNu = Register(chroma.MustNewStyle("algol_nu", chroma.StyleEntries{ - chroma.Comment: "italic #888", - chroma.CommentPreproc: "bold noitalic #888", - chroma.CommentSpecial: "bold noitalic #888", - chroma.Keyword: "bold", - chroma.KeywordDeclaration: "italic", - chroma.NameBuiltin: "bold italic", - chroma.NameBuiltinPseudo: "bold italic", - chroma.NameNamespace: "bold italic #666", - chroma.NameClass: "bold italic #666", - chroma.NameFunction: "bold italic #666", - chroma.NameVariable: "bold italic #666", - chroma.NameConstant: "bold italic #666", - chroma.OperatorWord: "bold", - chroma.LiteralString: "italic #666", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/api.go b/vendor/github.com/alecthomas/chroma/styles/api.go deleted file mode 100644 index f3ce67398503..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/api.go +++ /dev/null @@ -1,37 +0,0 @@ -package styles - -import ( - "sort" - - "github.com/alecthomas/chroma" -) - -// Registry of Styles. -var Registry = map[string]*chroma.Style{} - -// Fallback style. Reassign to change the default fallback style. -var Fallback = SwapOff - -// Register a chroma.Style. -func Register(style *chroma.Style) *chroma.Style { - Registry[style.Name] = style - return style -} - -// Names of all available styles. -func Names() []string { - out := []string{} - for name := range Registry { - out = append(out, name) - } - sort.Strings(out) - return out -} - -// Get named style, or Fallback. -func Get(name string) *chroma.Style { - if style, ok := Registry[name]; ok { - return style - } - return Fallback -} diff --git a/vendor/github.com/alecthomas/chroma/styles/arduino.go b/vendor/github.com/alecthomas/chroma/styles/arduino.go deleted file mode 100644 index 9e48fb4aeb03..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/arduino.go +++ /dev/null @@ -1,25 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Arduino style. -var Arduino = Register(chroma.MustNewStyle("arduino", chroma.StyleEntries{ - chroma.Error: "#a61717", - chroma.Comment: "#95a5a6", - chroma.CommentPreproc: "#728E00", - chroma.Keyword: "#728E00", - chroma.KeywordConstant: "#00979D", - chroma.KeywordPseudo: "#00979D", - chroma.KeywordReserved: "#00979D", - chroma.KeywordType: "#00979D", - chroma.Operator: "#728E00", - chroma.Name: "#434f54", - chroma.NameBuiltin: "#728E00", - chroma.NameFunction: "#D35400", - chroma.NameOther: "#728E00", - chroma.LiteralNumber: "#8A7B52", - chroma.LiteralString: "#7F8C8D", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/autumn.go b/vendor/github.com/alecthomas/chroma/styles/autumn.go deleted file mode 100644 index 3966372b9efc..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/autumn.go +++ /dev/null @@ -1,43 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Autumn style. -var Autumn = Register(chroma.MustNewStyle("autumn", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #aaaaaa", - chroma.CommentPreproc: "noitalic #4c8317", - chroma.CommentSpecial: "italic #0000aa", - chroma.Keyword: "#0000aa", - chroma.KeywordType: "#00aaaa", - chroma.OperatorWord: "#0000aa", - chroma.NameBuiltin: "#00aaaa", - chroma.NameFunction: "#00aa00", - chroma.NameClass: "underline #00aa00", - chroma.NameNamespace: "underline #00aaaa", - chroma.NameVariable: "#aa0000", - chroma.NameConstant: "#aa0000", - chroma.NameEntity: "bold #800", - chroma.NameAttribute: "#1e90ff", - chroma.NameTag: "bold #1e90ff", - chroma.NameDecorator: "#888888", - chroma.LiteralString: "#aa5500", - chroma.LiteralStringSymbol: "#0000aa", - chroma.LiteralStringRegex: "#009999", - chroma.LiteralNumber: "#009999", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#aa0000", - chroma.GenericInserted: "#00aa00", - chroma.GenericError: "#aa0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#555555", - chroma.GenericOutput: "#888888", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.Error: "#F00 bg:#FAA", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go b/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go deleted file mode 100644 index 160c75be8a83..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go +++ /dev/null @@ -1,81 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Base16Snazzy style -var Base16Snazzy = Register(chroma.MustNewStyle("base16-snazzy", chroma.StyleEntries{ - chroma.Comment: "#78787e", - chroma.CommentHashbang: "#78787e", - chroma.CommentMultiline: "#78787e", - chroma.CommentPreproc: "#78787e", - chroma.CommentSingle: "#78787e", - chroma.CommentSpecial: "#78787e", - chroma.Generic: "#e2e4e5", - chroma.GenericDeleted: "#ff5c57", - chroma.GenericEmph: "#e2e4e5 underline", - chroma.GenericError: "#ff5c57", - chroma.GenericHeading: "#e2e4e5 bold", - chroma.GenericInserted: "#e2e4e5 bold", - chroma.GenericOutput: "#43454f", - chroma.GenericPrompt: "#e2e4e5", - chroma.GenericStrong: "#e2e4e5 italic", - chroma.GenericSubheading: "#e2e4e5 bold", - chroma.GenericTraceback: "#e2e4e5", - chroma.GenericUnderline: "underline", - chroma.Error: "#ff5c57", - chroma.Keyword: "#ff6ac1", - chroma.KeywordConstant: "#ff6ac1", - chroma.KeywordDeclaration: "#ff5c57", - chroma.KeywordNamespace: "#ff6ac1", - chroma.KeywordPseudo: "#ff6ac1", - chroma.KeywordReserved: "#ff6ac1", - chroma.KeywordType: "#9aedfe", - chroma.Literal: "#e2e4e5", - chroma.LiteralDate: "#e2e4e5", - chroma.Name: "#e2e4e5", - chroma.NameAttribute: "#57c7ff", - chroma.NameBuiltin: "#ff5c57", - chroma.NameBuiltinPseudo: "#e2e4e5", - chroma.NameClass: "#f3f99d", - chroma.NameConstant: "#ff9f43", - chroma.NameDecorator: "#ff9f43", - chroma.NameEntity: "#e2e4e5", - chroma.NameException: "#e2e4e5", - chroma.NameFunction: "#57c7ff", - chroma.NameLabel: "#ff5c57", - chroma.NameNamespace: "#e2e4e5", - chroma.NameOther: "#e2e4e5", - chroma.NameTag: "#ff6ac1", - chroma.NameVariable: "#ff5c57", - chroma.NameVariableClass: "#ff5c57", - chroma.NameVariableGlobal: "#ff5c57", - chroma.NameVariableInstance: "#ff5c57", - chroma.LiteralNumber: "#ff9f43", - chroma.LiteralNumberBin: "#ff9f43", - chroma.LiteralNumberFloat: "#ff9f43", - chroma.LiteralNumberHex: "#ff9f43", - chroma.LiteralNumberInteger: "#ff9f43", - chroma.LiteralNumberIntegerLong: "#ff9f43", - chroma.LiteralNumberOct: "#ff9f43", - chroma.Operator: "#ff6ac1", - chroma.OperatorWord: "#ff6ac1", - chroma.Other: "#e2e4e5", - chroma.Punctuation: "#e2e4e5", - chroma.LiteralString: "#5af78e", - chroma.LiteralStringBacktick: "#5af78e", - chroma.LiteralStringChar: "#5af78e", - chroma.LiteralStringDoc: "#5af78e", - chroma.LiteralStringDouble: "#5af78e", - chroma.LiteralStringEscape: "#5af78e", - chroma.LiteralStringHeredoc: "#5af78e", - chroma.LiteralStringInterpol: "#5af78e", - chroma.LiteralStringOther: "#5af78e", - chroma.LiteralStringRegex: "#5af78e", - chroma.LiteralStringSingle: "#5af78e", - chroma.LiteralStringSymbol: "#5af78e", - chroma.Text: "#e2e4e5", - chroma.TextWhitespace: "#e2e4e5", - chroma.Background: " bg:#282a36", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/borland.go b/vendor/github.com/alecthomas/chroma/styles/borland.go deleted file mode 100644 index 9c0fff6fccdd..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/borland.go +++ /dev/null @@ -1,33 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Borland style. -var Borland = Register(chroma.MustNewStyle("borland", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #008800", - chroma.CommentPreproc: "noitalic #008080", - chroma.CommentSpecial: "noitalic bold", - chroma.LiteralString: "#0000FF", - chroma.LiteralStringChar: "#800080", - chroma.LiteralNumber: "#0000FF", - chroma.Keyword: "bold #000080", - chroma.OperatorWord: "bold", - chroma.NameTag: "bold #000080", - chroma.NameAttribute: "#FF0000", - chroma.GenericHeading: "#999999", - chroma.GenericSubheading: "#aaaaaa", - chroma.GenericDeleted: "bg:#ffdddd #000000", - chroma.GenericInserted: "bg:#ddffdd #000000", - chroma.GenericError: "#aa0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#555555", - chroma.GenericOutput: "#888888", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#e3d2d2 #a61717", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/bw.go b/vendor/github.com/alecthomas/chroma/styles/bw.go deleted file mode 100644 index 3e800d5a7ae2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/bw.go +++ /dev/null @@ -1,30 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// BlackWhite style. -var BlackWhite = Register(chroma.MustNewStyle("bw", chroma.StyleEntries{ - chroma.Comment: "italic", - chroma.CommentPreproc: "noitalic", - chroma.Keyword: "bold", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "nobold", - chroma.OperatorWord: "bold", - chroma.NameClass: "bold", - chroma.NameNamespace: "bold", - chroma.NameException: "bold", - chroma.NameEntity: "bold", - chroma.NameTag: "bold", - chroma.LiteralString: "italic", - chroma.LiteralStringInterpol: "bold", - chroma.LiteralStringEscape: "bold", - chroma.GenericHeading: "bold", - chroma.GenericSubheading: "bold", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/colorful.go b/vendor/github.com/alecthomas/chroma/styles/colorful.go deleted file mode 100644 index dc77c5bfec31..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/colorful.go +++ /dev/null @@ -1,59 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Colorful style. -var Colorful = Register(chroma.MustNewStyle("colorful", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "#888", - chroma.CommentPreproc: "#579", - chroma.CommentSpecial: "bold #cc0000", - chroma.Keyword: "bold #080", - chroma.KeywordPseudo: "#038", - chroma.KeywordType: "#339", - chroma.Operator: "#333", - chroma.OperatorWord: "bold #000", - chroma.NameBuiltin: "#007020", - chroma.NameFunction: "bold #06B", - chroma.NameClass: "bold #B06", - chroma.NameNamespace: "bold #0e84b5", - chroma.NameException: "bold #F00", - chroma.NameVariable: "#963", - chroma.NameVariableInstance: "#33B", - chroma.NameVariableClass: "#369", - chroma.NameVariableGlobal: "bold #d70", - chroma.NameConstant: "bold #036", - chroma.NameLabel: "bold #970", - chroma.NameEntity: "bold #800", - chroma.NameAttribute: "#00C", - chroma.NameTag: "#070", - chroma.NameDecorator: "bold #555", - chroma.LiteralString: "bg:#fff0f0", - chroma.LiteralStringChar: "#04D bg:", - chroma.LiteralStringDoc: "#D42 bg:", - chroma.LiteralStringInterpol: "bg:#eee", - chroma.LiteralStringEscape: "bold #666", - chroma.LiteralStringRegex: "bg:#fff0ff #000", - chroma.LiteralStringSymbol: "#A60 bg:", - chroma.LiteralStringOther: "#D20", - chroma.LiteralNumber: "bold #60E", - chroma.LiteralNumberInteger: "bold #00D", - chroma.LiteralNumberFloat: "bold #60E", - chroma.LiteralNumberHex: "bold #058", - chroma.LiteralNumberOct: "bold #40E", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#A00000", - chroma.GenericInserted: "#00A000", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #c65d09", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - chroma.Error: "#F00 bg:#FAA", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/doom-one.go b/vendor/github.com/alecthomas/chroma/styles/doom-one.go deleted file mode 100644 index 6450455504ad..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/doom-one.go +++ /dev/null @@ -1,58 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Doom One style. Inspired by Atom One and Doom Emacs's Atom One theme -var DoomOne = Register(chroma.MustNewStyle("doom-one", chroma.StyleEntries{ - chroma.Text: "#b0c4de", - chroma.Error: "#b0c4de", - chroma.Comment: "italic #8a93a5", - chroma.CommentHashbang: "bold", - chroma.Keyword: "#c678dd", - chroma.KeywordType: "#ef8383", - chroma.KeywordConstant: "bold #b756ff", - chroma.Operator: "#c7bf54", - chroma.OperatorWord: "bold #b756ff", - chroma.Punctuation: "#b0c4de", - chroma.Name: "#c1abea", - chroma.NameAttribute: "#b3d23c", - chroma.NameBuiltin: "#ef8383", - chroma.NameClass: "#76a9f9", - chroma.NameConstant: "bold #b756ff", - chroma.NameDecorator: "#e5c07b", - chroma.NameEntity: "#bda26f", - chroma.NameException: "bold #fd7474", - chroma.NameFunction: "#00b1f7", - chroma.NameProperty: "#cebc3a", - chroma.NameLabel: "#f5a40d", - chroma.NameNamespace: "#76a9f9", - chroma.NameTag: "#e06c75", - chroma.NameVariable: "#DCAEEA", - chroma.NameVariableGlobal: "bold #DCAEEA", - chroma.NameVariableInstance: "#e06c75", - chroma.Literal: "#98c379", - chroma.Number: "#d19a66", - chroma.String: "#98c379", - chroma.StringDoc: "#7e97c3", - chroma.StringDouble: "#63c381", - chroma.StringEscape: "bold #d26464", - chroma.StringHeredoc: "#98c379", - chroma.StringInterpol: "#98c379", - chroma.StringOther: "#70b33f", - chroma.StringRegex: "#56b6c2", - chroma.StringSingle: "#98c379", - chroma.StringSymbol: "#56b6c2", - chroma.Generic: "#b0c4de", - chroma.GenericEmph: "italic", - chroma.GenericHeading: "bold #a2cbff", - chroma.GenericInserted: "#a6e22e", - chroma.GenericOutput: "#a6e22e", - chroma.GenericUnderline: "underline", - chroma.GenericPrompt: "#a6e22e", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#a2cbff", - chroma.GenericTraceback: "#a2cbff", - chroma.Background: "#b0c4de bg:#282c34", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/doom-one2.go b/vendor/github.com/alecthomas/chroma/styles/doom-one2.go deleted file mode 100644 index 465417391e7b..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/doom-one2.go +++ /dev/null @@ -1,71 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Doom One 2 style. Inspired by Atom One and Doom Emacs's Atom One theme -var DoomOne2 = Register(chroma.MustNewStyle("doom-one2", chroma.StyleEntries{ - chroma.Text: "#b0c4de", - chroma.Error: "#b0c4de", - chroma.Comment: "italic #8a93a5", - chroma.CommentHashbang: "bold", - chroma.Keyword: "#76a9f9", - chroma.KeywordConstant: "#e5c07b", - chroma.KeywordType: "#e5c07b", - chroma.Operator: "#54b1c7", - chroma.OperatorWord: "bold #b756ff", - chroma.Punctuation: "#abb2bf", - chroma.Name: "#aa89ea", - chroma.NameAttribute: "#cebc3a", - chroma.NameBuiltin: "#e5c07b", - chroma.NameClass: "#ca72ff", - chroma.NameConstant: "bold", - chroma.NameDecorator: "#e5c07b", - chroma.NameEntity: "#bda26f", - chroma.NameException: "bold #fd7474", - chroma.NameFunction: "#00b1f7", - chroma.NameProperty: "#cebc3a", - chroma.NameLabel: "#f5a40d", - chroma.NameNamespace: "#ca72ff", - chroma.NameTag: "#76a9f9", - chroma.NameVariable: "#DCAEEA", - chroma.NameVariableClass: "#DCAEEA", - chroma.NameVariableGlobal: "bold #DCAEEA", - chroma.NameVariableInstance: "#e06c75", - chroma.NameVariableMagic: "#DCAEEA", - chroma.Literal: "#98c379", - chroma.LiteralDate: "#98c379", - chroma.Number: "#d19a66", - chroma.NumberBin: "#d19a66", - chroma.NumberFloat: "#d19a66", - chroma.NumberHex: "#d19a66", - chroma.NumberInteger: "#d19a66", - chroma.NumberIntegerLong: "#d19a66", - chroma.NumberOct: "#d19a66", - chroma.String: "#98c379", - chroma.StringAffix: "#98c379", - chroma.StringBacktick: "#98c379", - chroma.StringDelimiter: "#98c379", - chroma.StringDoc: "#7e97c3", - chroma.StringDouble: "#63c381", - chroma.StringEscape: "bold #d26464", - chroma.StringHeredoc: "#98c379", - chroma.StringInterpol: "#98c379", - chroma.StringOther: "#70b33f", - chroma.StringRegex: "#56b6c2", - chroma.StringSingle: "#98c379", - chroma.StringSymbol: "#56b6c2", - chroma.Generic: "#b0c4de", - chroma.GenericDeleted: "#b0c4de", - chroma.GenericEmph: "italic", - chroma.GenericHeading: "bold #a2cbff", - chroma.GenericInserted: "#a6e22e", - chroma.GenericOutput: "#a6e22e", - chroma.GenericUnderline: "underline", - chroma.GenericPrompt: "#a6e22e", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#a2cbff", - chroma.GenericTraceback: "#a2cbff", - chroma.Background: "#b0c4de bg:#282c34", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/dracula.go b/vendor/github.com/alecthomas/chroma/styles/dracula.go deleted file mode 100644 index d1542f2715f2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/dracula.go +++ /dev/null @@ -1,81 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Dracula Style -var Dracula = Register(chroma.MustNewStyle("dracula", chroma.StyleEntries{ - chroma.Comment: "#6272a4", - chroma.CommentHashbang: "#6272a4", - chroma.CommentMultiline: "#6272a4", - chroma.CommentPreproc: "#ff79c6", - chroma.CommentSingle: "#6272a4", - chroma.CommentSpecial: "#6272a4", - chroma.Generic: "#f8f8f2", - chroma.GenericDeleted: "#ff5555", - chroma.GenericEmph: "#f8f8f2 underline", - chroma.GenericError: "#f8f8f2", - chroma.GenericHeading: "#f8f8f2 bold", - chroma.GenericInserted: "#50fa7b bold", - chroma.GenericOutput: "#44475a", - chroma.GenericPrompt: "#f8f8f2", - chroma.GenericStrong: "#f8f8f2", - chroma.GenericSubheading: "#f8f8f2 bold", - chroma.GenericTraceback: "#f8f8f2", - chroma.GenericUnderline: "underline", - chroma.Error: "#f8f8f2", - chroma.Keyword: "#ff79c6", - chroma.KeywordConstant: "#ff79c6", - chroma.KeywordDeclaration: "#8be9fd italic", - chroma.KeywordNamespace: "#ff79c6", - chroma.KeywordPseudo: "#ff79c6", - chroma.KeywordReserved: "#ff79c6", - chroma.KeywordType: "#8be9fd", - chroma.Literal: "#f8f8f2", - chroma.LiteralDate: "#f8f8f2", - chroma.Name: "#f8f8f2", - chroma.NameAttribute: "#50fa7b", - chroma.NameBuiltin: "#8be9fd italic", - chroma.NameBuiltinPseudo: "#f8f8f2", - chroma.NameClass: "#50fa7b", - chroma.NameConstant: "#f8f8f2", - chroma.NameDecorator: "#f8f8f2", - chroma.NameEntity: "#f8f8f2", - chroma.NameException: "#f8f8f2", - chroma.NameFunction: "#50fa7b", - chroma.NameLabel: "#8be9fd italic", - chroma.NameNamespace: "#f8f8f2", - chroma.NameOther: "#f8f8f2", - chroma.NameTag: "#ff79c6", - chroma.NameVariable: "#8be9fd italic", - chroma.NameVariableClass: "#8be9fd italic", - chroma.NameVariableGlobal: "#8be9fd italic", - chroma.NameVariableInstance: "#8be9fd italic", - chroma.LiteralNumber: "#bd93f9", - chroma.LiteralNumberBin: "#bd93f9", - chroma.LiteralNumberFloat: "#bd93f9", - chroma.LiteralNumberHex: "#bd93f9", - chroma.LiteralNumberInteger: "#bd93f9", - chroma.LiteralNumberIntegerLong: "#bd93f9", - chroma.LiteralNumberOct: "#bd93f9", - chroma.Operator: "#ff79c6", - chroma.OperatorWord: "#ff79c6", - chroma.Other: "#f8f8f2", - chroma.Punctuation: "#f8f8f2", - chroma.LiteralString: "#f1fa8c", - chroma.LiteralStringBacktick: "#f1fa8c", - chroma.LiteralStringChar: "#f1fa8c", - chroma.LiteralStringDoc: "#f1fa8c", - chroma.LiteralStringDouble: "#f1fa8c", - chroma.LiteralStringEscape: "#f1fa8c", - chroma.LiteralStringHeredoc: "#f1fa8c", - chroma.LiteralStringInterpol: "#f1fa8c", - chroma.LiteralStringOther: "#f1fa8c", - chroma.LiteralStringRegex: "#f1fa8c", - chroma.LiteralStringSingle: "#f1fa8c", - chroma.LiteralStringSymbol: "#f1fa8c", - chroma.Text: "#f8f8f2", - chroma.TextWhitespace: "#f8f8f2", - chroma.Background: " bg:#282a36", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/emacs.go b/vendor/github.com/alecthomas/chroma/styles/emacs.go deleted file mode 100644 index 4835abd7146f..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/emacs.go +++ /dev/null @@ -1,51 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Emacs style. -var Emacs = Register(chroma.MustNewStyle("emacs", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #008800", - chroma.CommentPreproc: "noitalic", - chroma.CommentSpecial: "noitalic bold", - chroma.Keyword: "bold #AA22FF", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "bold #00BB00", - chroma.Operator: "#666666", - chroma.OperatorWord: "bold #AA22FF", - chroma.NameBuiltin: "#AA22FF", - chroma.NameFunction: "#00A000", - chroma.NameClass: "#0000FF", - chroma.NameNamespace: "bold #0000FF", - chroma.NameException: "bold #D2413A", - chroma.NameVariable: "#B8860B", - chroma.NameConstant: "#880000", - chroma.NameLabel: "#A0A000", - chroma.NameEntity: "bold #999999", - chroma.NameAttribute: "#BB4444", - chroma.NameTag: "bold #008000", - chroma.NameDecorator: "#AA22FF", - chroma.LiteralString: "#BB4444", - chroma.LiteralStringDoc: "italic", - chroma.LiteralStringInterpol: "bold #BB6688", - chroma.LiteralStringEscape: "bold #BB6622", - chroma.LiteralStringRegex: "#BB6688", - chroma.LiteralStringSymbol: "#B8860B", - chroma.LiteralStringOther: "#008000", - chroma.LiteralNumber: "#666666", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#A00000", - chroma.GenericInserted: "#00A000", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #000080", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#f8f8f8", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/friendly.go b/vendor/github.com/alecthomas/chroma/styles/friendly.go deleted file mode 100644 index ad02341d3a2d..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/friendly.go +++ /dev/null @@ -1,51 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Friendly style. -var Friendly = Register(chroma.MustNewStyle("friendly", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #60a0b0", - chroma.CommentPreproc: "noitalic #007020", - chroma.CommentSpecial: "noitalic bg:#fff0f0", - chroma.Keyword: "bold #007020", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "nobold #902000", - chroma.Operator: "#666666", - chroma.OperatorWord: "bold #007020", - chroma.NameBuiltin: "#007020", - chroma.NameFunction: "#06287e", - chroma.NameClass: "bold #0e84b5", - chroma.NameNamespace: "bold #0e84b5", - chroma.NameException: "#007020", - chroma.NameVariable: "#bb60d5", - chroma.NameConstant: "#60add5", - chroma.NameLabel: "bold #002070", - chroma.NameEntity: "bold #d55537", - chroma.NameAttribute: "#4070a0", - chroma.NameTag: "bold #062873", - chroma.NameDecorator: "bold #555555", - chroma.LiteralString: "#4070a0", - chroma.LiteralStringDoc: "italic", - chroma.LiteralStringInterpol: "italic #70a0d0", - chroma.LiteralStringEscape: "bold #4070a0", - chroma.LiteralStringRegex: "#235388", - chroma.LiteralStringSymbol: "#517918", - chroma.LiteralStringOther: "#c65d09", - chroma.LiteralNumber: "#40a070", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#A00000", - chroma.GenericInserted: "#00A000", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #c65d09", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#f0f0f0", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/fruity.go b/vendor/github.com/alecthomas/chroma/styles/fruity.go deleted file mode 100644 index c2577fa279c8..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/fruity.go +++ /dev/null @@ -1,26 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Fruity style. -var Fruity = Register(chroma.MustNewStyle("fruity", chroma.StyleEntries{ - chroma.TextWhitespace: "#888888", - chroma.Background: "#ffffff bg:#111111", - chroma.GenericOutput: "#444444 bg:#222222", - chroma.Keyword: "#fb660a bold", - chroma.KeywordPseudo: "nobold", - chroma.LiteralNumber: "#0086f7 bold", - chroma.NameTag: "#fb660a bold", - chroma.NameVariable: "#fb660a", - chroma.Comment: "#008800 bg:#0f140f italic", - chroma.NameAttribute: "#ff0086 bold", - chroma.LiteralString: "#0086d2", - chroma.NameFunction: "#ff0086 bold", - chroma.GenericHeading: "#ffffff bold", - chroma.KeywordType: "#cdcaa9 bold", - chroma.GenericSubheading: "#ffffff bold", - chroma.NameConstant: "#0086d2", - chroma.CommentPreproc: "#ff0007 bold", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/github.go b/vendor/github.com/alecthomas/chroma/styles/github.go deleted file mode 100644 index 7ef2481ca0c2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/github.go +++ /dev/null @@ -1,51 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// GitHub style. -var GitHub = Register(chroma.MustNewStyle("github", chroma.StyleEntries{ - chroma.CommentMultiline: "italic #999988", - chroma.CommentPreproc: "bold #999999", - chroma.CommentSingle: "italic #999988", - chroma.CommentSpecial: "bold italic #999999", - chroma.Comment: "italic #999988", - chroma.Error: "bg:#e3d2d2 #a61717", - chroma.GenericDeleted: "bg:#ffdddd #000000", - chroma.GenericEmph: "italic #000000", - chroma.GenericError: "#aa0000", - chroma.GenericHeading: "#999999", - chroma.GenericInserted: "bg:#ddffdd #000000", - chroma.GenericOutput: "#888888", - chroma.GenericPrompt: "#555555", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#aaaaaa", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.KeywordType: "bold #445588", - chroma.Keyword: "bold #000000", - chroma.LiteralNumber: "#009999", - chroma.LiteralStringRegex: "#009926", - chroma.LiteralStringSymbol: "#990073", - chroma.LiteralString: "#d14", - chroma.NameAttribute: "#008080", - chroma.NameBuiltinPseudo: "#999999", - chroma.NameBuiltin: "#0086B3", - chroma.NameClass: "bold #445588", - chroma.NameConstant: "#008080", - chroma.NameDecorator: "bold #3c5d5d", - chroma.NameEntity: "#800080", - chroma.NameException: "bold #990000", - chroma.NameFunction: "bold #990000", - chroma.NameLabel: "bold #990000", - chroma.NameNamespace: "#555555", - chroma.NameTag: "#000080", - chroma.NameVariableClass: "#008080", - chroma.NameVariableGlobal: "#008080", - chroma.NameVariableInstance: "#008080", - chroma.NameVariable: "#008080", - chroma.Operator: "bold #000000", - chroma.TextWhitespace: "#bbbbbb", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/hr_dark.go b/vendor/github.com/alecthomas/chroma/styles/hr_dark.go deleted file mode 100644 index 10bb64fbd3e1..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/hr_dark.go +++ /dev/null @@ -1,17 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Theme based on HackerRank Dark Editor theme -var HrDark = Register(chroma.MustNewStyle("hrdark", chroma.StyleEntries{ - chroma.Comment: "italic #828b96", - chroma.Keyword: "#ff636f", - chroma.OperatorWord: "#ff636f", - chroma.Name: "#58a1dd", - chroma.Literal: "#a6be9d", - chroma.Operator: "#ff636f", - chroma.Background: "#1d2432", - chroma.Other: "#fff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go b/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go deleted file mode 100644 index d1988589c73c..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go +++ /dev/null @@ -1,19 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Theme based on HackerRank High Contrast Editor Theme -var HrHighContrast = Register(chroma.MustNewStyle("hr_high_contrast", chroma.StyleEntries{ - chroma.Comment: "#5a8349", - chroma.Keyword: "#467faf", - chroma.OperatorWord: "#467faf", - chroma.Name: "#ffffff", - chroma.LiteralString: "#a87662", - chroma.LiteralNumber: "#fff", - chroma.LiteralStringBoolean: "#467faf", - chroma.Operator: "#e4e400", - chroma.Background: "#000", - chroma.Other: "#d5d500", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/igor.go b/vendor/github.com/alecthomas/chroma/styles/igor.go deleted file mode 100644 index 6a6d4cd0817a..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/igor.go +++ /dev/null @@ -1,16 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Igor style. -var Igor = Register(chroma.MustNewStyle("igor", chroma.StyleEntries{ - chroma.Comment: "italic #FF0000", - chroma.Keyword: "#0000FF", - chroma.NameFunction: "#C34E00", - chroma.NameDecorator: "#CC00A3", - chroma.NameClass: "#007575", - chroma.LiteralString: "#009C00", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/lovelace.go b/vendor/github.com/alecthomas/chroma/styles/lovelace.go deleted file mode 100644 index 074cc08967b4..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/lovelace.go +++ /dev/null @@ -1,60 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Lovelace style. -var Lovelace = Register(chroma.MustNewStyle("lovelace", chroma.StyleEntries{ - chroma.TextWhitespace: "#a89028", - chroma.Comment: "italic #888888", - chroma.CommentHashbang: "#287088", - chroma.CommentMultiline: "#888888", - chroma.CommentPreproc: "noitalic #289870", - chroma.Keyword: "#2838b0", - chroma.KeywordConstant: "italic #444444", - chroma.KeywordDeclaration: "italic", - chroma.KeywordType: "italic", - chroma.Operator: "#666666", - chroma.OperatorWord: "#a848a8", - chroma.Punctuation: "#888888", - chroma.NameAttribute: "#388038", - chroma.NameBuiltin: "#388038", - chroma.NameBuiltinPseudo: "italic", - chroma.NameClass: "#287088", - chroma.NameConstant: "#b85820", - chroma.NameDecorator: "#287088", - chroma.NameEntity: "#709030", - chroma.NameException: "#908828", - chroma.NameFunction: "#785840", - chroma.NameFunctionMagic: "#b85820", - chroma.NameLabel: "#289870", - chroma.NameNamespace: "#289870", - chroma.NameTag: "#2838b0", - chroma.NameVariable: "#b04040", - chroma.NameVariableGlobal: "#908828", - chroma.NameVariableMagic: "#b85820", - chroma.LiteralString: "#b83838", - chroma.LiteralStringAffix: "#444444", - chroma.LiteralStringChar: "#a848a8", - chroma.LiteralStringDelimiter: "#b85820", - chroma.LiteralStringDoc: "italic #b85820", - chroma.LiteralStringEscape: "#709030", - chroma.LiteralStringInterpol: "underline", - chroma.LiteralStringOther: "#a848a8", - chroma.LiteralStringRegex: "#a848a8", - chroma.LiteralNumber: "#444444", - chroma.GenericDeleted: "#c02828", - chroma.GenericEmph: "italic", - chroma.GenericError: "#c02828", - chroma.GenericHeading: "#666666", - chroma.GenericSubheading: "#444444", - chroma.GenericInserted: "#388038", - chroma.GenericOutput: "#666666", - chroma.GenericPrompt: "#444444", - chroma.GenericStrong: "bold", - chroma.GenericTraceback: "#2838b0", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#a848a8", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/manni.go b/vendor/github.com/alecthomas/chroma/styles/manni.go deleted file mode 100644 index 9942e7d091ac..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/manni.go +++ /dev/null @@ -1,51 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Manni style. -var Manni = Register(chroma.MustNewStyle("manni", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #0099FF", - chroma.CommentPreproc: "noitalic #009999", - chroma.CommentSpecial: "bold", - chroma.Keyword: "bold #006699", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "#007788", - chroma.Operator: "#555555", - chroma.OperatorWord: "bold #000000", - chroma.NameBuiltin: "#336666", - chroma.NameFunction: "#CC00FF", - chroma.NameClass: "bold #00AA88", - chroma.NameNamespace: "bold #00CCFF", - chroma.NameException: "bold #CC0000", - chroma.NameVariable: "#003333", - chroma.NameConstant: "#336600", - chroma.NameLabel: "#9999FF", - chroma.NameEntity: "bold #999999", - chroma.NameAttribute: "#330099", - chroma.NameTag: "bold #330099", - chroma.NameDecorator: "#9999FF", - chroma.LiteralString: "#CC3300", - chroma.LiteralStringDoc: "italic", - chroma.LiteralStringInterpol: "#AA0000", - chroma.LiteralStringEscape: "bold #CC3300", - chroma.LiteralStringRegex: "#33AAAA", - chroma.LiteralStringSymbol: "#FFCC33", - chroma.LiteralStringOther: "#CC3300", - chroma.LiteralNumber: "#FF6600", - chroma.GenericHeading: "bold #003300", - chroma.GenericSubheading: "bold #003300", - chroma.GenericDeleted: "border:#CC0000 bg:#FFCCCC", - chroma.GenericInserted: "border:#00CC00 bg:#CCFFCC", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #000099", - chroma.GenericOutput: "#AAAAAA", - chroma.GenericTraceback: "#99CC66", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#FFAAAA #AA0000", - chroma.Background: " bg:#f0f3f3", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokai.go b/vendor/github.com/alecthomas/chroma/styles/monokai.go deleted file mode 100644 index 2586795acee2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/monokai.go +++ /dev/null @@ -1,36 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Monokai style. -var Monokai = Register(chroma.MustNewStyle("monokai", chroma.StyleEntries{ - chroma.Text: "#f8f8f2", - chroma.Error: "#960050 bg:#1e0010", - chroma.Comment: "#75715e", - chroma.Keyword: "#66d9ef", - chroma.KeywordNamespace: "#f92672", - chroma.Operator: "#f92672", - chroma.Punctuation: "#f8f8f2", - chroma.Name: "#f8f8f2", - chroma.NameAttribute: "#a6e22e", - chroma.NameClass: "#a6e22e", - chroma.NameConstant: "#66d9ef", - chroma.NameDecorator: "#a6e22e", - chroma.NameException: "#a6e22e", - chroma.NameFunction: "#a6e22e", - chroma.NameOther: "#a6e22e", - chroma.NameTag: "#f92672", - chroma.LiteralNumber: "#ae81ff", - chroma.Literal: "#ae81ff", - chroma.LiteralDate: "#e6db74", - chroma.LiteralString: "#e6db74", - chroma.LiteralStringEscape: "#ae81ff", - chroma.GenericDeleted: "#f92672", - chroma.GenericEmph: "italic", - chroma.GenericInserted: "#a6e22e", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#75715e", - chroma.Background: "bg:#272822", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokailight.go b/vendor/github.com/alecthomas/chroma/styles/monokailight.go deleted file mode 100644 index 61818a6806b2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/monokailight.go +++ /dev/null @@ -1,33 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// MonokaiLight style. -var MonokaiLight = Register(chroma.MustNewStyle("monokailight", chroma.StyleEntries{ - chroma.Text: "#272822", - chroma.Error: "#960050 bg:#1e0010", - chroma.Comment: "#75715e", - chroma.Keyword: "#00a8c8", - chroma.KeywordNamespace: "#f92672", - chroma.Operator: "#f92672", - chroma.Punctuation: "#111111", - chroma.Name: "#111111", - chroma.NameAttribute: "#75af00", - chroma.NameClass: "#75af00", - chroma.NameConstant: "#00a8c8", - chroma.NameDecorator: "#75af00", - chroma.NameException: "#75af00", - chroma.NameFunction: "#75af00", - chroma.NameOther: "#75af00", - chroma.NameTag: "#f92672", - chroma.LiteralNumber: "#ae81ff", - chroma.Literal: "#ae81ff", - chroma.LiteralDate: "#d88200", - chroma.LiteralString: "#d88200", - chroma.LiteralStringEscape: "#8045FF", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.Background: " bg:#fafafa", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/murphy.go b/vendor/github.com/alecthomas/chroma/styles/murphy.go deleted file mode 100644 index 90e83c76aed3..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/murphy.go +++ /dev/null @@ -1,59 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Murphy style. -var Murphy = Register(chroma.MustNewStyle("murphy", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "#666 italic", - chroma.CommentPreproc: "#579 noitalic", - chroma.CommentSpecial: "#c00 bold", - chroma.Keyword: "bold #289", - chroma.KeywordPseudo: "#08f", - chroma.KeywordType: "#66f", - chroma.Operator: "#333", - chroma.OperatorWord: "bold #000", - chroma.NameBuiltin: "#072", - chroma.NameFunction: "bold #5ed", - chroma.NameClass: "bold #e9e", - chroma.NameNamespace: "bold #0e84b5", - chroma.NameException: "bold #F00", - chroma.NameVariable: "#036", - chroma.NameVariableInstance: "#aaf", - chroma.NameVariableClass: "#ccf", - chroma.NameVariableGlobal: "#f84", - chroma.NameConstant: "bold #5ed", - chroma.NameLabel: "bold #970", - chroma.NameEntity: "#800", - chroma.NameAttribute: "#007", - chroma.NameTag: "#070", - chroma.NameDecorator: "bold #555", - chroma.LiteralString: "bg:#e0e0ff", - chroma.LiteralStringChar: "#88F bg:", - chroma.LiteralStringDoc: "#D42 bg:", - chroma.LiteralStringInterpol: "bg:#eee", - chroma.LiteralStringEscape: "bold #666", - chroma.LiteralStringRegex: "bg:#e0e0ff #000", - chroma.LiteralStringSymbol: "#fc8 bg:", - chroma.LiteralStringOther: "#f88", - chroma.LiteralNumber: "bold #60E", - chroma.LiteralNumberInteger: "bold #66f", - chroma.LiteralNumberFloat: "bold #60E", - chroma.LiteralNumberHex: "bold #058", - chroma.LiteralNumberOct: "bold #40E", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#A00000", - chroma.GenericInserted: "#00A000", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #c65d09", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - chroma.Error: "#F00 bg:#FAA", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/native.go b/vendor/github.com/alecthomas/chroma/styles/native.go deleted file mode 100644 index 9fae09acae7a..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/native.go +++ /dev/null @@ -1,42 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Native style. -var Native = Register(chroma.MustNewStyle("native", chroma.StyleEntries{ - chroma.Background: "#d0d0d0 bg:#202020", - chroma.TextWhitespace: "#666666", - chroma.Comment: "italic #999999", - chroma.CommentPreproc: "noitalic bold #cd2828", - chroma.CommentSpecial: "noitalic bold #e50808 bg:#520000", - chroma.Keyword: "bold #6ab825", - chroma.KeywordPseudo: "nobold", - chroma.OperatorWord: "bold #6ab825", - chroma.LiteralString: "#ed9d13", - chroma.LiteralStringOther: "#ffa500", - chroma.LiteralNumber: "#3677a9", - chroma.NameBuiltin: "#24909d", - chroma.NameVariable: "#40ffff", - chroma.NameConstant: "#40ffff", - chroma.NameClass: "underline #447fcf", - chroma.NameFunction: "#447fcf", - chroma.NameNamespace: "underline #447fcf", - chroma.NameException: "#bbbbbb", - chroma.NameTag: "bold #6ab825", - chroma.NameAttribute: "#bbbbbb", - chroma.NameDecorator: "#ffa500", - chroma.GenericHeading: "bold #ffffff", - chroma.GenericSubheading: "underline #ffffff", - chroma.GenericDeleted: "#d22323", - chroma.GenericInserted: "#589819", - chroma.GenericError: "#d22323", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#aaaaaa", - chroma.GenericOutput: "#cccccc", - chroma.GenericTraceback: "#d22323", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#e3d2d2 #a61717", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/nord.go b/vendor/github.com/alecthomas/chroma/styles/nord.go deleted file mode 100644 index 0fcbc5d4059c..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/nord.go +++ /dev/null @@ -1,75 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -var ( - // colors and palettes based on https://www.nordtheme.com/docs/colors-and-palettes - nord0 = "#2e3440" - nord1 = "#3b4252" // nolint - nord2 = "#434c5e" // nolint - nord3 = "#4c566a" - nord3b = "#616e87" - - nord4 = "#d8dee9" - nord5 = "#e5e9f0" // nolint - nord6 = "#eceff4" - - nord7 = "#8fbcbb" - nord8 = "#88c0d0" - nord9 = "#81a1c1" - nord10 = "#5e81ac" - - nord11 = "#bf616a" - nord12 = "#d08770" - nord13 = "#ebcb8b" - nord14 = "#a3be8c" - nord15 = "#b48ead" -) - -// Nord, an arctic, north-bluish color palette -var Nord = Register(chroma.MustNewStyle("nord", chroma.StyleEntries{ - chroma.TextWhitespace: nord4, - chroma.Comment: "italic " + nord3b, - chroma.CommentPreproc: nord10, - chroma.Keyword: "bold " + nord9, - chroma.KeywordPseudo: "nobold " + nord9, - chroma.KeywordType: "nobold " + nord9, - chroma.Operator: nord9, - chroma.OperatorWord: "bold " + nord9, - chroma.Name: nord4, - chroma.NameBuiltin: nord9, - chroma.NameFunction: nord8, - chroma.NameClass: nord7, - chroma.NameNamespace: nord7, - chroma.NameException: nord11, - chroma.NameVariable: nord4, - chroma.NameConstant: nord7, - chroma.NameLabel: nord7, - chroma.NameEntity: nord12, - chroma.NameAttribute: nord7, - chroma.NameTag: nord9, - chroma.NameDecorator: nord12, - chroma.Punctuation: nord6, - chroma.LiteralString: nord14, - chroma.LiteralStringDoc: nord3b, - chroma.LiteralStringInterpol: nord14, - chroma.LiteralStringEscape: nord13, - chroma.LiteralStringRegex: nord13, - chroma.LiteralStringSymbol: nord14, - chroma.LiteralStringOther: nord14, - chroma.LiteralNumber: nord15, - chroma.GenericHeading: "bold " + nord8, - chroma.GenericSubheading: "bold " + nord8, - chroma.GenericDeleted: nord11, - chroma.GenericInserted: nord14, - chroma.GenericError: nord11, - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold " + nord3, - chroma.GenericOutput: nord4, - chroma.GenericTraceback: nord11, - chroma.Error: nord11, - chroma.Background: nord4 + " bg:" + nord0, -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go b/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go deleted file mode 100644 index 9048e38ad1dc..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go +++ /dev/null @@ -1,17 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// 1S:Designer color palette -var OnesEnterprise = Register(chroma.MustNewStyle("onesenterprise", chroma.StyleEntries{ - chroma.Text: "#000000", - chroma.Comment: "#008000", - chroma.CommentPreproc: "#963200", - chroma.Operator: "#FF0000", - chroma.Keyword: "#FF0000", - chroma.Punctuation: "#FF0000", - chroma.LiteralString: "#000000", - chroma.Name: "#0000FF", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go deleted file mode 100644 index c8cf473105eb..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go +++ /dev/null @@ -1,44 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// ParaisoDark style. -var ParaisoDark = Register(chroma.MustNewStyle("paraiso-dark", chroma.StyleEntries{ - chroma.Text: "#e7e9db", - chroma.Error: "#ef6155", - chroma.Comment: "#776e71", - chroma.Keyword: "#815ba4", - chroma.KeywordNamespace: "#5bc4bf", - chroma.KeywordType: "#fec418", - chroma.Operator: "#5bc4bf", - chroma.Punctuation: "#e7e9db", - chroma.Name: "#e7e9db", - chroma.NameAttribute: "#06b6ef", - chroma.NameClass: "#fec418", - chroma.NameConstant: "#ef6155", - chroma.NameDecorator: "#5bc4bf", - chroma.NameException: "#ef6155", - chroma.NameFunction: "#06b6ef", - chroma.NameNamespace: "#fec418", - chroma.NameOther: "#06b6ef", - chroma.NameTag: "#5bc4bf", - chroma.NameVariable: "#ef6155", - chroma.LiteralNumber: "#f99b15", - chroma.Literal: "#f99b15", - chroma.LiteralDate: "#48b685", - chroma.LiteralString: "#48b685", - chroma.LiteralStringChar: "#e7e9db", - chroma.LiteralStringDoc: "#776e71", - chroma.LiteralStringEscape: "#f99b15", - chroma.LiteralStringInterpol: "#f99b15", - chroma.GenericDeleted: "#ef6155", - chroma.GenericEmph: "italic", - chroma.GenericHeading: "bold #e7e9db", - chroma.GenericInserted: "#48b685", - chroma.GenericPrompt: "bold #776e71", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "bold #5bc4bf", - chroma.Background: "bg:#2f1e2e", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go deleted file mode 100644 index b514dfa16ac0..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go +++ /dev/null @@ -1,44 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// ParaisoLight style. -var ParaisoLight = Register(chroma.MustNewStyle("paraiso-light", chroma.StyleEntries{ - chroma.Text: "#2f1e2e", - chroma.Error: "#ef6155", - chroma.Comment: "#8d8687", - chroma.Keyword: "#815ba4", - chroma.KeywordNamespace: "#5bc4bf", - chroma.KeywordType: "#fec418", - chroma.Operator: "#5bc4bf", - chroma.Punctuation: "#2f1e2e", - chroma.Name: "#2f1e2e", - chroma.NameAttribute: "#06b6ef", - chroma.NameClass: "#fec418", - chroma.NameConstant: "#ef6155", - chroma.NameDecorator: "#5bc4bf", - chroma.NameException: "#ef6155", - chroma.NameFunction: "#06b6ef", - chroma.NameNamespace: "#fec418", - chroma.NameOther: "#06b6ef", - chroma.NameTag: "#5bc4bf", - chroma.NameVariable: "#ef6155", - chroma.LiteralNumber: "#f99b15", - chroma.Literal: "#f99b15", - chroma.LiteralDate: "#48b685", - chroma.LiteralString: "#48b685", - chroma.LiteralStringChar: "#2f1e2e", - chroma.LiteralStringDoc: "#8d8687", - chroma.LiteralStringEscape: "#f99b15", - chroma.LiteralStringInterpol: "#f99b15", - chroma.GenericDeleted: "#ef6155", - chroma.GenericEmph: "italic", - chroma.GenericHeading: "bold #2f1e2e", - chroma.GenericInserted: "#48b685", - chroma.GenericPrompt: "bold #8d8687", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "bold #5bc4bf", - chroma.Background: "bg:#e7e9db", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pastie.go b/vendor/github.com/alecthomas/chroma/styles/pastie.go deleted file mode 100644 index 9a6854439277..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/pastie.go +++ /dev/null @@ -1,52 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Pastie style. -var Pastie = Register(chroma.MustNewStyle("pastie", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "#888888", - chroma.CommentPreproc: "bold #cc0000", - chroma.CommentSpecial: "bg:#fff0f0 bold #cc0000", - chroma.LiteralString: "bg:#fff0f0 #dd2200", - chroma.LiteralStringRegex: "bg:#fff0ff #008800", - chroma.LiteralStringOther: "bg:#f0fff0 #22bb22", - chroma.LiteralStringSymbol: "#aa6600", - chroma.LiteralStringInterpol: "#3333bb", - chroma.LiteralStringEscape: "#0044dd", - chroma.OperatorWord: "#008800", - chroma.Keyword: "bold #008800", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "#888888", - chroma.NameClass: "bold #bb0066", - chroma.NameException: "bold #bb0066", - chroma.NameFunction: "bold #0066bb", - chroma.NameProperty: "bold #336699", - chroma.NameNamespace: "bold #bb0066", - chroma.NameBuiltin: "#003388", - chroma.NameVariable: "#336699", - chroma.NameVariableClass: "#336699", - chroma.NameVariableInstance: "#3333bb", - chroma.NameVariableGlobal: "#dd7700", - chroma.NameConstant: "bold #003366", - chroma.NameTag: "bold #bb0066", - chroma.NameAttribute: "#336699", - chroma.NameDecorator: "#555555", - chroma.NameLabel: "italic #336699", - chroma.LiteralNumber: "bold #0000DD", - chroma.GenericHeading: "#333", - chroma.GenericSubheading: "#666", - chroma.GenericDeleted: "bg:#ffdddd #000000", - chroma.GenericInserted: "bg:#ddffdd #000000", - chroma.GenericError: "#aa0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#555555", - chroma.GenericOutput: "#888888", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#e3d2d2 #a61717", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/perldoc.go b/vendor/github.com/alecthomas/chroma/styles/perldoc.go deleted file mode 100644 index e1372fdfb423..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/perldoc.go +++ /dev/null @@ -1,44 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Perldoc style. -var Perldoc = Register(chroma.MustNewStyle("perldoc", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "#228B22", - chroma.CommentPreproc: "#1e889b", - chroma.CommentSpecial: "#8B008B bold", - chroma.LiteralString: "#CD5555", - chroma.LiteralStringHeredoc: "#1c7e71 italic", - chroma.LiteralStringRegex: "#1c7e71", - chroma.LiteralStringOther: "#cb6c20", - chroma.LiteralNumber: "#B452CD", - chroma.OperatorWord: "#8B008B", - chroma.Keyword: "#8B008B bold", - chroma.KeywordType: "#00688B", - chroma.NameClass: "#008b45 bold", - chroma.NameException: "#008b45 bold", - chroma.NameFunction: "#008b45", - chroma.NameNamespace: "#008b45 underline", - chroma.NameVariable: "#00688B", - chroma.NameConstant: "#00688B", - chroma.NameDecorator: "#707a7c", - chroma.NameTag: "#8B008B bold", - chroma.NameAttribute: "#658b00", - chroma.NameBuiltin: "#658b00", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#aa0000", - chroma.GenericInserted: "#00aa00", - chroma.GenericError: "#aa0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#555555", - chroma.GenericOutput: "#888888", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#e3d2d2 #a61717", - chroma.Background: " bg:#eeeedd", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pygments.go b/vendor/github.com/alecthomas/chroma/styles/pygments.go deleted file mode 100644 index 327033b71790..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/pygments.go +++ /dev/null @@ -1,55 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Pygments default theme. -var Pygments = Register(chroma.MustNewStyle("pygments", chroma.StyleEntries{ - chroma.Whitespace: "#bbbbbb", - chroma.Comment: "italic #408080", - chroma.CommentPreproc: "noitalic #BC7A00", - - chroma.Keyword: "bold #008000", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "nobold #B00040", - - chroma.Operator: "#666666", - chroma.OperatorWord: "bold #AA22FF", - - chroma.NameBuiltin: "#008000", - chroma.NameFunction: "#0000FF", - chroma.NameClass: "bold #0000FF", - chroma.NameNamespace: "bold #0000FF", - chroma.NameException: "bold #D2413A", - chroma.NameVariable: "#19177C", - chroma.NameConstant: "#880000", - chroma.NameLabel: "#A0A000", - chroma.NameEntity: "bold #999999", - chroma.NameAttribute: "#7D9029", - chroma.NameTag: "bold #008000", - chroma.NameDecorator: "#AA22FF", - - chroma.String: "#BA2121", - chroma.StringDoc: "italic", - chroma.StringInterpol: "bold #BB6688", - chroma.StringEscape: "bold #BB6622", - chroma.StringRegex: "#BB6688", - chroma.StringSymbol: "#19177C", - chroma.StringOther: "#008000", - chroma.Number: "#666666", - - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#A00000", - chroma.GenericInserted: "#00A000", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #000080", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - - chroma.Error: "border:#FF0000", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go b/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go deleted file mode 100644 index 37d66ca25ba0..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go +++ /dev/null @@ -1,47 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// RainbowDash style. -var RainbowDash = Register(chroma.MustNewStyle("rainbow_dash", chroma.StyleEntries{ - chroma.Comment: "italic #0080ff", - chroma.CommentPreproc: "noitalic", - chroma.CommentSpecial: "bold", - chroma.Error: "bg:#cc0000 #ffffff", - chroma.GenericDeleted: "border:#c5060b bg:#ffcccc", - chroma.GenericEmph: "italic", - chroma.GenericError: "#ff0000", - chroma.GenericHeading: "bold #2c5dcd", - chroma.GenericInserted: "border:#00cc00 bg:#ccffcc", - chroma.GenericOutput: "#aaaaaa", - chroma.GenericPrompt: "bold #2c5dcd", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "bold #2c5dcd", - chroma.GenericTraceback: "#c5060b", - chroma.GenericUnderline: "underline", - chroma.Keyword: "bold #2c5dcd", - chroma.KeywordPseudo: "nobold", - chroma.KeywordType: "#5918bb", - chroma.NameAttribute: "italic #2c5dcd", - chroma.NameBuiltin: "bold #5918bb", - chroma.NameClass: "underline", - chroma.NameConstant: "#318495", - chroma.NameDecorator: "bold #ff8000", - chroma.NameEntity: "bold #5918bb", - chroma.NameException: "bold #5918bb", - chroma.NameFunction: "bold #ff8000", - chroma.NameTag: "bold #2c5dcd", - chroma.LiteralNumber: "bold #5918bb", - chroma.Operator: "#2c5dcd", - chroma.OperatorWord: "bold", - chroma.LiteralString: "#00cc66", - chroma.LiteralStringDoc: "italic", - chroma.LiteralStringEscape: "bold #c5060b", - chroma.LiteralStringOther: "#318495", - chroma.LiteralStringSymbol: "bold #c5060b", - chroma.Text: "#4d4d4d", - chroma.TextWhitespace: "#cbcbcb", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rrt.go b/vendor/github.com/alecthomas/chroma/styles/rrt.go deleted file mode 100644 index 2ccf2cadfcc3..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/rrt.go +++ /dev/null @@ -1,20 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Rrt style. -var Rrt = Register(chroma.MustNewStyle("rrt", chroma.StyleEntries{ - chroma.CommentPreproc: "#e5e5e5", - chroma.Comment: "#00ff00", - chroma.KeywordType: "#ee82ee", - chroma.Keyword: "#ff0000", - chroma.LiteralNumber: "#ff6600", - chroma.LiteralStringSymbol: "#ff6600", - chroma.LiteralString: "#87ceeb", - chroma.NameFunction: "#ffff00", - chroma.NameConstant: "#7fffd4", - chroma.NameVariable: "#eedd82", - chroma.Background: "#f8f8f2 bg:#000000", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go deleted file mode 100644 index 2724df24e23c..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go +++ /dev/null @@ -1,46 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// SolarizedDark style. -var SolarizedDark = Register(chroma.MustNewStyle("solarized-dark", chroma.StyleEntries{ - chroma.Keyword: "#719e07", - chroma.KeywordConstant: "#CB4B16", - chroma.KeywordDeclaration: "#268BD2", - chroma.KeywordReserved: "#268BD2", - chroma.KeywordType: "#DC322F", - chroma.NameAttribute: "#93A1A1", - chroma.NameBuiltin: "#B58900", - chroma.NameBuiltinPseudo: "#268BD2", - chroma.NameClass: "#268BD2", - chroma.NameConstant: "#CB4B16", - chroma.NameDecorator: "#268BD2", - chroma.NameEntity: "#CB4B16", - chroma.NameException: "#CB4B16", - chroma.NameFunction: "#268BD2", - chroma.NameTag: "#268BD2", - chroma.NameVariable: "#268BD2", - chroma.LiteralString: "#2AA198", - chroma.LiteralStringBacktick: "#586E75", - chroma.LiteralStringChar: "#2AA198", - chroma.LiteralStringDoc: "#93A1A1", - chroma.LiteralStringEscape: "#CB4B16", - chroma.LiteralStringHeredoc: "#93A1A1", - chroma.LiteralStringRegex: "#DC322F", - chroma.LiteralNumber: "#2AA198", - chroma.Operator: "#719e07", - chroma.Comment: "#586E75", - chroma.CommentPreproc: "#719e07", - chroma.CommentSpecial: "#719e07", - chroma.GenericDeleted: "#DC322F", - chroma.GenericEmph: "italic", - chroma.GenericError: "#DC322F bold", - chroma.GenericHeading: "#CB4B16", - chroma.GenericInserted: "#719e07", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#268BD2", - chroma.Background: "#93A1A1 bg:#002B36", - chroma.Other: "#CB4B16", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go deleted file mode 100644 index a24ddc153926..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go +++ /dev/null @@ -1,48 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// SolarizedDark256 style. -var SolarizedDark256 = Register(chroma.MustNewStyle("solarized-dark256", chroma.StyleEntries{ - chroma.Keyword: "#5f8700", - chroma.KeywordConstant: "#d75f00", - chroma.KeywordDeclaration: "#0087ff", - chroma.KeywordNamespace: "#d75f00", - chroma.KeywordReserved: "#0087ff", - chroma.KeywordType: "#af0000", - chroma.NameAttribute: "#8a8a8a", - chroma.NameBuiltin: "#0087ff", - chroma.NameBuiltinPseudo: "#0087ff", - chroma.NameClass: "#0087ff", - chroma.NameConstant: "#d75f00", - chroma.NameDecorator: "#0087ff", - chroma.NameEntity: "#d75f00", - chroma.NameException: "#af8700", - chroma.NameFunction: "#0087ff", - chroma.NameTag: "#0087ff", - chroma.NameVariable: "#0087ff", - chroma.LiteralString: "#00afaf", - chroma.LiteralStringBacktick: "#4e4e4e", - chroma.LiteralStringChar: "#00afaf", - chroma.LiteralStringDoc: "#00afaf", - chroma.LiteralStringEscape: "#af0000", - chroma.LiteralStringHeredoc: "#00afaf", - chroma.LiteralStringRegex: "#af0000", - chroma.LiteralNumber: "#00afaf", - chroma.Operator: "#8a8a8a", - chroma.OperatorWord: "#5f8700", - chroma.Comment: "#4e4e4e", - chroma.CommentPreproc: "#5f8700", - chroma.CommentSpecial: "#5f8700", - chroma.GenericDeleted: "#af0000", - chroma.GenericEmph: "italic", - chroma.GenericError: "#af0000 bold", - chroma.GenericHeading: "#d75f00", - chroma.GenericInserted: "#5f8700", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#0087ff", - chroma.Background: "#8a8a8a bg:#1c1c1c", - chroma.Other: "#d75f00", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-light.go b/vendor/github.com/alecthomas/chroma/styles/solarized-light.go deleted file mode 100644 index b6d5234a787d..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/solarized-light.go +++ /dev/null @@ -1,24 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// SolarizedLight style. -var SolarizedLight = Register(chroma.MustNewStyle("solarized-light", chroma.StyleEntries{ - chroma.Text: "bg: #eee8d5 #586e75", - chroma.Keyword: "#859900", - chroma.KeywordConstant: "bold", - chroma.KeywordNamespace: "#dc322f bold", - chroma.KeywordType: "bold", - chroma.Name: "#268bd2", - chroma.NameBuiltin: "#cb4b16", - chroma.NameClass: "#cb4b16", - chroma.NameTag: "bold", - chroma.Literal: "#2aa198", - chroma.LiteralNumber: "bold", - chroma.OperatorWord: "#859900", - chroma.Comment: "#93a1a1 italic", - chroma.Generic: "#d33682", - chroma.Background: " bg:#eee8d5", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/swapoff.go b/vendor/github.com/alecthomas/chroma/styles/swapoff.go deleted file mode 100644 index e4daae61c16f..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/swapoff.go +++ /dev/null @@ -1,25 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// SwapOff theme. -var SwapOff = Register(chroma.MustNewStyle("swapoff", chroma.StyleEntries{ - chroma.Background: "#lightgray bg:#black", - chroma.Number: "bold #ansiyellow", - chroma.Comment: "#ansiteal", - chroma.CommentPreproc: "bold #ansigreen", - chroma.String: "bold #ansiturquoise", - chroma.Keyword: "bold #ansiwhite", - chroma.NameKeyword: "bold #ansiwhite", - chroma.NameBuiltin: "bold #ansiwhite", - chroma.GenericHeading: "bold", - chroma.GenericSubheading: "bold", - chroma.GenericStrong: "bold", - chroma.GenericUnderline: "underline", - chroma.NameTag: "bold", - chroma.NameAttribute: "#ansiteal", - chroma.Error: "#ansired", - chroma.LiteralDate: "bold #ansiyellow", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/tango.go b/vendor/github.com/alecthomas/chroma/styles/tango.go deleted file mode 100644 index ae1afe064318..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/tango.go +++ /dev/null @@ -1,79 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Tango style. -var Tango = Register(chroma.MustNewStyle("tango", chroma.StyleEntries{ - chroma.TextWhitespace: "underline #f8f8f8", - chroma.Error: "#a40000 border:#ef2929", - chroma.Other: "#000000", - chroma.Comment: "italic #8f5902", - chroma.CommentMultiline: "italic #8f5902", - chroma.CommentPreproc: "italic #8f5902", - chroma.CommentSingle: "italic #8f5902", - chroma.CommentSpecial: "italic #8f5902", - chroma.Keyword: "bold #204a87", - chroma.KeywordConstant: "bold #204a87", - chroma.KeywordDeclaration: "bold #204a87", - chroma.KeywordNamespace: "bold #204a87", - chroma.KeywordPseudo: "bold #204a87", - chroma.KeywordReserved: "bold #204a87", - chroma.KeywordType: "bold #204a87", - chroma.Operator: "bold #ce5c00", - chroma.OperatorWord: "bold #204a87", - chroma.Punctuation: "bold #000000", - chroma.Name: "#000000", - chroma.NameAttribute: "#c4a000", - chroma.NameBuiltin: "#204a87", - chroma.NameBuiltinPseudo: "#3465a4", - chroma.NameClass: "#000000", - chroma.NameConstant: "#000000", - chroma.NameDecorator: "bold #5c35cc", - chroma.NameEntity: "#ce5c00", - chroma.NameException: "bold #cc0000", - chroma.NameFunction: "#000000", - chroma.NameProperty: "#000000", - chroma.NameLabel: "#f57900", - chroma.NameNamespace: "#000000", - chroma.NameOther: "#000000", - chroma.NameTag: "bold #204a87", - chroma.NameVariable: "#000000", - chroma.NameVariableClass: "#000000", - chroma.NameVariableGlobal: "#000000", - chroma.NameVariableInstance: "#000000", - chroma.LiteralNumber: "bold #0000cf", - chroma.LiteralNumberFloat: "bold #0000cf", - chroma.LiteralNumberHex: "bold #0000cf", - chroma.LiteralNumberInteger: "bold #0000cf", - chroma.LiteralNumberIntegerLong: "bold #0000cf", - chroma.LiteralNumberOct: "bold #0000cf", - chroma.Literal: "#000000", - chroma.LiteralDate: "#000000", - chroma.LiteralString: "#4e9a06", - chroma.LiteralStringBacktick: "#4e9a06", - chroma.LiteralStringChar: "#4e9a06", - chroma.LiteralStringDoc: "italic #8f5902", - chroma.LiteralStringDouble: "#4e9a06", - chroma.LiteralStringEscape: "#4e9a06", - chroma.LiteralStringHeredoc: "#4e9a06", - chroma.LiteralStringInterpol: "#4e9a06", - chroma.LiteralStringOther: "#4e9a06", - chroma.LiteralStringRegex: "#4e9a06", - chroma.LiteralStringSingle: "#4e9a06", - chroma.LiteralStringSymbol: "#4e9a06", - chroma.Generic: "#000000", - chroma.GenericDeleted: "#a40000", - chroma.GenericEmph: "italic #000000", - chroma.GenericError: "#ef2929", - chroma.GenericHeading: "bold #000080", - chroma.GenericInserted: "#00A000", - chroma.GenericOutput: "italic #000000", - chroma.GenericPrompt: "#8f5902", - chroma.GenericStrong: "bold #000000", - chroma.GenericSubheading: "bold #800080", - chroma.GenericTraceback: "bold #a40000", - chroma.GenericUnderline: "underline", - chroma.Background: " bg:#f8f8f8", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/trac.go b/vendor/github.com/alecthomas/chroma/styles/trac.go deleted file mode 100644 index 3b09c44e83fb..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/trac.go +++ /dev/null @@ -1,42 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Trac style. -var Trac = Register(chroma.MustNewStyle("trac", chroma.StyleEntries{ - chroma.TextWhitespace: "#bbbbbb", - chroma.Comment: "italic #999988", - chroma.CommentPreproc: "bold noitalic #999999", - chroma.CommentSpecial: "bold #999999", - chroma.Operator: "bold", - chroma.LiteralString: "#bb8844", - chroma.LiteralStringRegex: "#808000", - chroma.LiteralNumber: "#009999", - chroma.Keyword: "bold", - chroma.KeywordType: "#445588", - chroma.NameBuiltin: "#999999", - chroma.NameFunction: "bold #990000", - chroma.NameClass: "bold #445588", - chroma.NameException: "bold #990000", - chroma.NameNamespace: "#555555", - chroma.NameVariable: "#008080", - chroma.NameConstant: "#008080", - chroma.NameTag: "#000080", - chroma.NameAttribute: "#008080", - chroma.NameEntity: "#800080", - chroma.GenericHeading: "#999999", - chroma.GenericSubheading: "#aaaaaa", - chroma.GenericDeleted: "bg:#ffdddd #000000", - chroma.GenericInserted: "bg:#ddffdd #000000", - chroma.GenericError: "#aa0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "#555555", - chroma.GenericOutput: "#888888", - chroma.GenericTraceback: "#aa0000", - chroma.GenericUnderline: "underline", - chroma.Error: "bg:#e3d2d2 #a61717", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vim.go b/vendor/github.com/alecthomas/chroma/styles/vim.go deleted file mode 100644 index 6296042c20d2..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/vim.go +++ /dev/null @@ -1,36 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Vim style. -var Vim = Register(chroma.MustNewStyle("vim", chroma.StyleEntries{ - chroma.Background: "#cccccc bg:#000000", - chroma.Comment: "#000080", - chroma.CommentSpecial: "bold #cd0000", - chroma.Keyword: "#cdcd00", - chroma.KeywordDeclaration: "#00cd00", - chroma.KeywordNamespace: "#cd00cd", - chroma.KeywordType: "#00cd00", - chroma.Operator: "#3399cc", - chroma.OperatorWord: "#cdcd00", - chroma.NameClass: "#00cdcd", - chroma.NameBuiltin: "#cd00cd", - chroma.NameException: "bold #666699", - chroma.NameVariable: "#00cdcd", - chroma.LiteralString: "#cd0000", - chroma.LiteralNumber: "#cd00cd", - chroma.GenericHeading: "bold #000080", - chroma.GenericSubheading: "bold #800080", - chroma.GenericDeleted: "#cd0000", - chroma.GenericInserted: "#00cd00", - chroma.GenericError: "#FF0000", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold #000080", - chroma.GenericOutput: "#888", - chroma.GenericTraceback: "#04D", - chroma.GenericUnderline: "underline", - chroma.Error: "border:#FF0000", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vs.go b/vendor/github.com/alecthomas/chroma/styles/vs.go deleted file mode 100644 index acbcb9153f8a..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/vs.go +++ /dev/null @@ -1,23 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// VisualStudio style. -var VisualStudio = Register(chroma.MustNewStyle("vs", chroma.StyleEntries{ - chroma.Comment: "#008000", - chroma.CommentPreproc: "#0000ff", - chroma.Keyword: "#0000ff", - chroma.OperatorWord: "#0000ff", - chroma.KeywordType: "#2b91af", - chroma.NameClass: "#2b91af", - chroma.LiteralString: "#a31515", - chroma.GenericHeading: "bold", - chroma.GenericSubheading: "bold", - chroma.GenericEmph: "italic", - chroma.GenericStrong: "bold", - chroma.GenericPrompt: "bold", - chroma.Error: "border:#FF0000", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vulcan.go b/vendor/github.com/alecthomas/chroma/styles/vulcan.go deleted file mode 100644 index 82ad1a14b7b1..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/vulcan.go +++ /dev/null @@ -1,95 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -var ( - // inspired by Doom Emacs's One Doom Theme - black = "#282C34" - grey = "#3E4460" - grey2 = "#43454f" - white = "#C9C9C9" - red = "#CF5967" - yellow = "#ECBE7B" - green = "#82CC6A" - cyan = "#56B6C2" - blue = "#7FBAF5" - blue2 = "#57C7FF" - purple = "#BC74C4" -) - -var Vulcan = Register(chroma.MustNewStyle("vulcan", chroma.StyleEntries{ - chroma.Comment: grey, - chroma.CommentHashbang: grey + " italic", - chroma.CommentMultiline: grey, - chroma.CommentPreproc: blue, - chroma.CommentSingle: grey, - chroma.CommentSpecial: purple + " italic", - chroma.Generic: white, - chroma.GenericDeleted: red, - chroma.GenericEmph: white + " underline", - chroma.GenericError: red + " bold", - chroma.GenericHeading: yellow + " bold", - chroma.GenericInserted: yellow, - chroma.GenericOutput: grey2, - chroma.GenericPrompt: white, - chroma.GenericStrong: red + " bold", - chroma.GenericSubheading: red + " italic", - chroma.GenericTraceback: white, - chroma.GenericUnderline: "underline", - chroma.Error: red, - chroma.Keyword: blue, - chroma.KeywordConstant: red + " bg:" + grey2, - chroma.KeywordDeclaration: blue, - chroma.KeywordNamespace: purple, - chroma.KeywordPseudo: purple, - chroma.KeywordReserved: blue, - chroma.KeywordType: blue2 + " bold", - chroma.Literal: white, - chroma.LiteralDate: blue2, - chroma.Name: white, - chroma.NameAttribute: purple, - chroma.NameBuiltin: blue, - chroma.NameBuiltinPseudo: blue, - chroma.NameClass: yellow, - chroma.NameConstant: yellow, - chroma.NameDecorator: yellow, - chroma.NameEntity: white, - chroma.NameException: red, - chroma.NameFunction: blue2, - chroma.NameLabel: red, - chroma.NameNamespace: white, - chroma.NameOther: white, - chroma.NameTag: purple, - chroma.NameVariable: purple + " italic", - chroma.NameVariableClass: blue2 + " bold", - chroma.NameVariableGlobal: yellow, - chroma.NameVariableInstance: blue2, - chroma.LiteralNumber: cyan, - chroma.LiteralNumberBin: blue2, - chroma.LiteralNumberFloat: cyan, - chroma.LiteralNumberHex: blue2, - chroma.LiteralNumberInteger: cyan, - chroma.LiteralNumberIntegerLong: cyan, - chroma.LiteralNumberOct: blue2, - chroma.Operator: purple, - chroma.OperatorWord: purple, - chroma.Other: white, - chroma.Punctuation: cyan, - chroma.LiteralString: green, - chroma.LiteralStringBacktick: blue2, - chroma.LiteralStringChar: blue2, - chroma.LiteralStringDoc: green, - chroma.LiteralStringDouble: green, - chroma.LiteralStringEscape: cyan, - chroma.LiteralStringHeredoc: cyan, - chroma.LiteralStringInterpol: green, - chroma.LiteralStringOther: green, - chroma.LiteralStringRegex: blue2, - chroma.LiteralStringSingle: green, - chroma.LiteralStringSymbol: green, - chroma.Text: white, - chroma.TextWhitespace: white, - chroma.Background: " bg: " + black, -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/witchhazel.go b/vendor/github.com/alecthomas/chroma/styles/witchhazel.go deleted file mode 100644 index 4aea2788f48e..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/witchhazel.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2018 Alethea Katherine Flowers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// WitchHazel Style -var WitchHazel = Register(chroma.MustNewStyle("witchhazel", chroma.StyleEntries{ - chroma.Text: "#F8F8F2", - chroma.Whitespace: "#A8757B", - chroma.Error: "#960050 bg:#1e0010", - chroma.Comment: "#b0bec5", - chroma.Keyword: "#C2FFDF", - chroma.KeywordNamespace: "#FFB8D1", - chroma.Operator: "#FFB8D1", - chroma.Punctuation: "#F8F8F2", - chroma.Name: "#F8F8F2", - chroma.NameAttribute: "#ceb1ff", - chroma.NameBuiltinPseudo: "#80cbc4", - chroma.NameClass: "#ceb1ff", - chroma.NameConstant: "#C5A3FF", - chroma.NameDecorator: "#ceb1ff", - chroma.NameException: "#ceb1ff", - chroma.NameFunction: "#ceb1ff", - chroma.NameProperty: "#F8F8F2", - chroma.NameTag: "#FFB8D1", - chroma.NameVariable: "#F8F8F2", - chroma.Number: "#C5A3FF", - chroma.Literal: "#ae81ff", - chroma.LiteralDate: "#e6db74", - chroma.String: "#1bc5e0", - chroma.GenericDeleted: "#f92672", - chroma.GenericEmph: "italic", - chroma.GenericInserted: "#a6e22e", - chroma.GenericStrong: "bold", - chroma.GenericSubheading: "#75715e", - chroma.Background: " bg:#433e56", -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go b/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go deleted file mode 100644 index 9a9d7579a406..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go +++ /dev/null @@ -1,62 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -var ( - // Inspired by Apple's Xcode "Default (Dark)" Theme - background = "#1F1F24" - plainText = "#FFFFFF" - comments = "#6C7986" - strings = "#FC6A5D" - numbers = "#D0BF69" - keywords = "#FC5FA3" - preprocessorStatements = "#FD8F3F" - typeDeclarations = "#5DD8FF" - otherDeclarations = "#41A1C0" - otherFunctionAndMethodNames = "#A167E6" - otherTypeNames = "#D0A8FF" -) - -// Xcode dark style -var XcodeDark = Register(chroma.MustNewStyle("xcode-dark", chroma.StyleEntries{ - chroma.Background: plainText + " bg:" + background, - - chroma.Comment: comments, - chroma.CommentMultiline: comments, - chroma.CommentPreproc: preprocessorStatements, - chroma.CommentSingle: comments, - chroma.CommentSpecial: comments + " italic", - - chroma.Error: "#960050", - - chroma.Keyword: keywords, - chroma.KeywordConstant: keywords, - chroma.KeywordDeclaration: keywords, - chroma.KeywordReserved: keywords, - - chroma.LiteralNumber: numbers, - chroma.LiteralNumberBin: numbers, - chroma.LiteralNumberFloat: numbers, - chroma.LiteralNumberHex: numbers, - chroma.LiteralNumberInteger: numbers, - chroma.LiteralNumberOct: numbers, - - chroma.LiteralString: strings, - chroma.LiteralStringEscape: strings, - chroma.LiteralStringInterpol: plainText, - - chroma.Name: plainText, - chroma.NameBuiltin: otherTypeNames, - chroma.NameBuiltinPseudo: otherFunctionAndMethodNames, - chroma.NameClass: typeDeclarations, - chroma.NameFunction: otherDeclarations, - chroma.NameVariable: otherDeclarations, - - chroma.Operator: plainText, - - chroma.Punctuation: plainText, - - chroma.Text: plainText, -})) diff --git a/vendor/github.com/alecthomas/chroma/styles/xcode.go b/vendor/github.com/alecthomas/chroma/styles/xcode.go deleted file mode 100644 index 115cf71cd64f..000000000000 --- a/vendor/github.com/alecthomas/chroma/styles/xcode.go +++ /dev/null @@ -1,29 +0,0 @@ -package styles - -import ( - "github.com/alecthomas/chroma" -) - -// Xcode style. -var Xcode = Register(chroma.MustNewStyle("xcode", chroma.StyleEntries{ - chroma.Comment: "#177500", - chroma.CommentPreproc: "#633820", - chroma.LiteralString: "#C41A16", - chroma.LiteralStringChar: "#2300CE", - chroma.Operator: "#000000", - chroma.Keyword: "#A90D91", - chroma.Name: "#000000", - chroma.NameAttribute: "#836C28", - chroma.NameClass: "#3F6E75", - chroma.NameFunction: "#000000", - chroma.NameBuiltin: "#A90D91", - chroma.NameBuiltinPseudo: "#5B269A", - chroma.NameVariable: "#000000", - chroma.NameTag: "#000000", - chroma.NameDecorator: "#000000", - chroma.NameLabel: "#000000", - chroma.Literal: "#1C01CE", - chroma.LiteralNumber: "#1C01CE", - chroma.Error: "#000000", - chroma.Background: " bg:#ffffff", -})) diff --git a/vendor/github.com/alecthomas/chroma/tokentype_string.go b/vendor/github.com/alecthomas/chroma/tokentype_string.go deleted file mode 100644 index 9c302f9c9c17..000000000000 --- a/vendor/github.com/alecthomas/chroma/tokentype_string.go +++ /dev/null @@ -1,219 +0,0 @@ -// Code generated by "stringer -type TokenType"; DO NOT EDIT. - -package chroma - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[Background - -1] - _ = x[PreWrapper - -2] - _ = x[Line - -3] - _ = x[LineNumbers - -4] - _ = x[LineNumbersTable - -5] - _ = x[LineHighlight - -6] - _ = x[LineTable - -7] - _ = x[LineTableTD - -8] - _ = x[CodeLine - -9] - _ = x[Error - -10] - _ = x[Other - -11] - _ = x[None - -12] - _ = x[EOFType-0] - _ = x[Keyword-1000] - _ = x[KeywordConstant-1001] - _ = x[KeywordDeclaration-1002] - _ = x[KeywordNamespace-1003] - _ = x[KeywordPseudo-1004] - _ = x[KeywordReserved-1005] - _ = x[KeywordType-1006] - _ = x[Name-2000] - _ = x[NameAttribute-2001] - _ = x[NameBuiltin-2002] - _ = x[NameBuiltinPseudo-2003] - _ = x[NameClass-2004] - _ = x[NameConstant-2005] - _ = x[NameDecorator-2006] - _ = x[NameEntity-2007] - _ = x[NameException-2008] - _ = x[NameFunction-2009] - _ = x[NameFunctionMagic-2010] - _ = x[NameKeyword-2011] - _ = x[NameLabel-2012] - _ = x[NameNamespace-2013] - _ = x[NameOperator-2014] - _ = x[NameOther-2015] - _ = x[NamePseudo-2016] - _ = x[NameProperty-2017] - _ = x[NameTag-2018] - _ = x[NameVariable-2019] - _ = x[NameVariableAnonymous-2020] - _ = x[NameVariableClass-2021] - _ = x[NameVariableGlobal-2022] - _ = x[NameVariableInstance-2023] - _ = x[NameVariableMagic-2024] - _ = x[Literal-3000] - _ = x[LiteralDate-3001] - _ = x[LiteralOther-3002] - _ = x[LiteralString-3100] - _ = x[LiteralStringAffix-3101] - _ = x[LiteralStringAtom-3102] - _ = x[LiteralStringBacktick-3103] - _ = x[LiteralStringBoolean-3104] - _ = x[LiteralStringChar-3105] - _ = x[LiteralStringDelimiter-3106] - _ = x[LiteralStringDoc-3107] - _ = x[LiteralStringDouble-3108] - _ = x[LiteralStringEscape-3109] - _ = x[LiteralStringHeredoc-3110] - _ = x[LiteralStringInterpol-3111] - _ = x[LiteralStringName-3112] - _ = x[LiteralStringOther-3113] - _ = x[LiteralStringRegex-3114] - _ = x[LiteralStringSingle-3115] - _ = x[LiteralStringSymbol-3116] - _ = x[LiteralNumber-3200] - _ = x[LiteralNumberBin-3201] - _ = x[LiteralNumberFloat-3202] - _ = x[LiteralNumberHex-3203] - _ = x[LiteralNumberInteger-3204] - _ = x[LiteralNumberIntegerLong-3205] - _ = x[LiteralNumberOct-3206] - _ = x[Operator-4000] - _ = x[OperatorWord-4001] - _ = x[Punctuation-5000] - _ = x[Comment-6000] - _ = x[CommentHashbang-6001] - _ = x[CommentMultiline-6002] - _ = x[CommentSingle-6003] - _ = x[CommentSpecial-6004] - _ = x[CommentPreproc-6100] - _ = x[CommentPreprocFile-6101] - _ = x[Generic-7000] - _ = x[GenericDeleted-7001] - _ = x[GenericEmph-7002] - _ = x[GenericError-7003] - _ = x[GenericHeading-7004] - _ = x[GenericInserted-7005] - _ = x[GenericOutput-7006] - _ = x[GenericPrompt-7007] - _ = x[GenericStrong-7008] - _ = x[GenericSubheading-7009] - _ = x[GenericTraceback-7010] - _ = x[GenericUnderline-7011] - _ = x[Text-8000] - _ = x[TextWhitespace-8001] - _ = x[TextSymbol-8002] - _ = x[TextPunctuation-8003] -} - -const _TokenType_name = "NoneOtherErrorCodeLineLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation" - -var _TokenType_map = map[TokenType]string{ - -12: _TokenType_name[0:4], - -11: _TokenType_name[4:9], - -10: _TokenType_name[9:14], - -9: _TokenType_name[14:22], - -8: _TokenType_name[22:33], - -7: _TokenType_name[33:42], - -6: _TokenType_name[42:55], - -5: _TokenType_name[55:71], - -4: _TokenType_name[71:82], - -3: _TokenType_name[82:86], - -2: _TokenType_name[86:96], - -1: _TokenType_name[96:106], - 0: _TokenType_name[106:113], - 1000: _TokenType_name[113:120], - 1001: _TokenType_name[120:135], - 1002: _TokenType_name[135:153], - 1003: _TokenType_name[153:169], - 1004: _TokenType_name[169:182], - 1005: _TokenType_name[182:197], - 1006: _TokenType_name[197:208], - 2000: _TokenType_name[208:212], - 2001: _TokenType_name[212:225], - 2002: _TokenType_name[225:236], - 2003: _TokenType_name[236:253], - 2004: _TokenType_name[253:262], - 2005: _TokenType_name[262:274], - 2006: _TokenType_name[274:287], - 2007: _TokenType_name[287:297], - 2008: _TokenType_name[297:310], - 2009: _TokenType_name[310:322], - 2010: _TokenType_name[322:339], - 2011: _TokenType_name[339:350], - 2012: _TokenType_name[350:359], - 2013: _TokenType_name[359:372], - 2014: _TokenType_name[372:384], - 2015: _TokenType_name[384:393], - 2016: _TokenType_name[393:403], - 2017: _TokenType_name[403:415], - 2018: _TokenType_name[415:422], - 2019: _TokenType_name[422:434], - 2020: _TokenType_name[434:455], - 2021: _TokenType_name[455:472], - 2022: _TokenType_name[472:490], - 2023: _TokenType_name[490:510], - 2024: _TokenType_name[510:527], - 3000: _TokenType_name[527:534], - 3001: _TokenType_name[534:545], - 3002: _TokenType_name[545:557], - 3100: _TokenType_name[557:570], - 3101: _TokenType_name[570:588], - 3102: _TokenType_name[588:605], - 3103: _TokenType_name[605:626], - 3104: _TokenType_name[626:646], - 3105: _TokenType_name[646:663], - 3106: _TokenType_name[663:685], - 3107: _TokenType_name[685:701], - 3108: _TokenType_name[701:720], - 3109: _TokenType_name[720:739], - 3110: _TokenType_name[739:759], - 3111: _TokenType_name[759:780], - 3112: _TokenType_name[780:797], - 3113: _TokenType_name[797:815], - 3114: _TokenType_name[815:833], - 3115: _TokenType_name[833:852], - 3116: _TokenType_name[852:871], - 3200: _TokenType_name[871:884], - 3201: _TokenType_name[884:900], - 3202: _TokenType_name[900:918], - 3203: _TokenType_name[918:934], - 3204: _TokenType_name[934:954], - 3205: _TokenType_name[954:978], - 3206: _TokenType_name[978:994], - 4000: _TokenType_name[994:1002], - 4001: _TokenType_name[1002:1014], - 5000: _TokenType_name[1014:1025], - 6000: _TokenType_name[1025:1032], - 6001: _TokenType_name[1032:1047], - 6002: _TokenType_name[1047:1063], - 6003: _TokenType_name[1063:1076], - 6004: _TokenType_name[1076:1090], - 6100: _TokenType_name[1090:1104], - 6101: _TokenType_name[1104:1122], - 7000: _TokenType_name[1122:1129], - 7001: _TokenType_name[1129:1143], - 7002: _TokenType_name[1143:1154], - 7003: _TokenType_name[1154:1166], - 7004: _TokenType_name[1166:1180], - 7005: _TokenType_name[1180:1195], - 7006: _TokenType_name[1195:1208], - 7007: _TokenType_name[1208:1221], - 7008: _TokenType_name[1221:1234], - 7009: _TokenType_name[1234:1251], - 7010: _TokenType_name[1251:1267], - 7011: _TokenType_name[1267:1283], - 8000: _TokenType_name[1283:1287], - 8001: _TokenType_name[1287:1301], - 8002: _TokenType_name[1301:1311], - 8003: _TokenType_name[1311:1326], -} - -func (i TokenType) String() string { - if str, ok := _TokenType_map[i]; ok { - return str - } - return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")" -} diff --git a/vendor/github.com/alecthomas/chroma/v2/.editorconfig b/vendor/github.com/alecthomas/chroma/v2/.editorconfig new file mode 100644 index 000000000000..cfb2c669e7c4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/.editorconfig @@ -0,0 +1,17 @@ +root = true + +[*] +indent_style = tab +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.xml] +indent_style = space +indent_size = 2 +insert_final_newline = false + +[*.yml] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/alecthomas/chroma/.gitignore b/vendor/github.com/alecthomas/chroma/v2/.gitignore similarity index 88% rename from vendor/github.com/alecthomas/chroma/.gitignore rename to vendor/github.com/alecthomas/chroma/v2/.gitignore index ccacd12e9881..8cbdd75ea032 100644 --- a/vendor/github.com/alecthomas/chroma/.gitignore +++ b/vendor/github.com/alecthomas/chroma/v2/.gitignore @@ -1,4 +1,8 @@ # Binaries for programs and plugins +.git +.idea +.vscode +.hermit *.exe *.dll *.so @@ -17,3 +21,5 @@ _models/ _examples/ +*.min.* +build/ diff --git a/vendor/github.com/alecthomas/chroma/.golangci.yml b/vendor/github.com/alecthomas/chroma/v2/.golangci.yml similarity index 84% rename from vendor/github.com/alecthomas/chroma/.golangci.yml rename to vendor/github.com/alecthomas/chroma/v2/.golangci.yml index ba7557339e9d..668be374ab99 100644 --- a/vendor/github.com/alecthomas/chroma/.golangci.yml +++ b/vendor/github.com/alecthomas/chroma/v2/.golangci.yml @@ -36,6 +36,21 @@ linters: - ifshort - wrapcheck - stylecheck + - thelper + - nonamedreturns + - revive + - dupword + - exhaustruct + - varnamelen + - forcetypeassert + - ireturn + - maintidx + - govet + - nosnakecase + - testableexamples + - musttag + - depguard + - goconst linters-settings: govet: @@ -48,8 +63,8 @@ linters-settings: min-len: 8 min-occurrences: 3 forbidigo: - forbid: - - (Must)?NewLexer + #forbid: + # - (Must)?NewLexer$ exclude_godoc_examples: false @@ -74,3 +89,4 @@ issues: - 'methods on the same type should have the same receiver name' - '_TokenType_name should be _TokenTypeName' - '`_TokenType_map` should be `_TokenTypeMap`' + - 'rewrite if-else to switch statement' diff --git a/vendor/github.com/alecthomas/chroma/.goreleaser.yml b/vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml similarity index 100% rename from vendor/github.com/alecthomas/chroma/.goreleaser.yml rename to vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml diff --git a/vendor/github.com/alecthomas/chroma/v2/Bitfile b/vendor/github.com/alecthomas/chroma/v2/Bitfile new file mode 100644 index 000000000000..bf158633a1c7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/Bitfile @@ -0,0 +1,24 @@ +VERSION = %(git describe --tags --dirty --always)% +export CGOENABLED = 0 + +tokentype_enumer.go: types.go + build: go generate + +# Regenerate the list of lexers in the README +README.md: lexers/*.go lexers/*/*.xml table.py + build: ./table.py + -clean + +implicit %{1}%{2}.min.%{3}: **/*.{css,js} + build: esbuild --bundle %{IN} --minify --outfile=%{OUT} + +implicit build/%{1}: cmd/* + cd cmd/%{1} + inputs: cmd/%{1}/**/* **/*.go + build: go build -ldflags="-X 'main.version=%{VERSION}'" -o ../../build/%{1} . + +#upload: chromad +# build: +# scp chromad root@swapoff.org: +# ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' +# touch upload diff --git a/vendor/github.com/alecthomas/chroma/COPYING b/vendor/github.com/alecthomas/chroma/v2/COPYING similarity index 100% rename from vendor/github.com/alecthomas/chroma/COPYING rename to vendor/github.com/alecthomas/chroma/v2/COPYING diff --git a/vendor/github.com/alecthomas/chroma/v2/Makefile b/vendor/github.com/alecthomas/chroma/v2/Makefile new file mode 100644 index 000000000000..e2ff762e67d5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/Makefile @@ -0,0 +1,23 @@ +.PHONY: chromad upload all + +VERSION ?= $(shell git describe --tags --dirty --always) +export GOOS ?= linux +export GOARCH ?= amd64 + +all: README.md tokentype_string.go + +README.md: lexers/*/*.go + ./table.py + +tokentype_string.go: types.go + go generate + +chromad: + rm -rf build + esbuild --bundle cmd/chromad/static/index.js --minify --outfile=cmd/chromad/static/index.min.js + esbuild --bundle cmd/chromad/static/index.css --minify --outfile=cmd/chromad/static/index.min.css + (export CGOENABLED=0 ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../build/chromad .) + +upload: build/chromad + scp build/chromad root@swapoff.org: && \ + ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' diff --git a/vendor/github.com/alecthomas/chroma/v2/README.md b/vendor/github.com/alecthomas/chroma/v2/README.md new file mode 100644 index 000000000000..775d3af2f840 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/README.md @@ -0,0 +1,297 @@ +# Chroma — A general purpose syntax highlighter in pure Go + +[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/) + +Chroma takes source code and other structured text and converts it into syntax +highlighted HTML, ANSI-coloured text, etc. + +Chroma is based heavily on [Pygments](http://pygments.org/), and includes +translators for Pygments lexers and styles. + +## Table of Contents + + + +1. [Supported languages](#supported-languages) +2. [Try it](#try-it) +3. [Using the library](#using-the-library) + 1. [Quick start](#quick-start) + 2. [Identifying the language](#identifying-the-language) + 3. [Formatting the output](#formatting-the-output) + 4. [The HTML formatter](#the-html-formatter) +4. [More detail](#more-detail) + 1. [Lexers](#lexers) + 2. [Formatters](#formatters) + 3. [Styles](#styles) +5. [Command-line interface](#command-line-interface) +6. [Testing lexers](#testing-lexers) +7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) + + + +## Supported languages + +| Prefix | Language | +| :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk | +| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck | +| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython | +| D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan | +| E | EBNF, Elixir, Elm, EmacsLisp, Erlang | +| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp | +| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy | +| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy | +| I | Idris, Igor, INI, Io, ISCdhcpd | +| J | J, Java, JavaScript, JSON, Julia, Jungle | +| K | Kotlin | +| L | Lighttpd configuration file, LLVM, Lua | +| M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL | +| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix | +| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode | +| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 | +| Q | QBasic, QML | +| R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust | +| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog | +| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData | +| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue | +| W | WDTE, WebGPU Shading Language, Whiley | +| X | XML, Xorg | +| Y | YAML, YANG | +| Z | Z80 Assembly, Zed, Zig | + +_I will attempt to keep this section up to date, but an authoritative list can be +displayed with `chroma --list`._ + +## Try it + +Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). + +## Using the library + +This is version 2 of Chroma, use the import path: + +```go +import "github.com/alecthomas/chroma/v2" +``` + +Chroma, like Pygments, has the concepts of +[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), +[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and +[styles](https://github.com/alecthomas/chroma/tree/master/styles). + +Lexers convert source text into a stream of tokens, styles specify how token +types are mapped to colours, and formatters convert tokens and styles into +formatted output. + +A package exists for each of these, containing a global `Registry` variable +with all of the registered implementations. There are also helper functions +for using the registry in each package, such as looking up lexers by name or +matching filenames, etc. + +In all cases, if a lexer, formatter or style can not be determined, `nil` will +be returned. In this situation you may want to default to the `Fallback` +value in each respective package, which provides sane defaults. + +### Quick start + +A convenience function exists that can be used to simply format some source +text, without any effort: + +```go +err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") +``` + +### Identifying the language + +To highlight code, you'll first have to identify what language the code is +written in. There are three primary ways to do that: + +1. Detect the language from its filename. + + ```go + lexer := lexers.Match("foo.go") + ``` + +2. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). + + ```go + lexer := lexers.Get("go") + ``` + +3. Detect the language from its content. + + ```go + lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") + ``` + +In all cases, `nil` will be returned if the language can not be identified. + +```go +if lexer == nil { + lexer = lexers.Fallback +} +``` + +At this point, it should be noted that some lexers can be extremely chatty. To +mitigate this, you can use the coalescing lexer to coalesce runs of identical +token types into a single token: + +```go +lexer = chroma.Coalesce(lexer) +``` + +### Formatting the output + +Once a language is identified you will need to pick a formatter and a style (theme). + +```go +style := styles.Get("swapoff") +if style == nil { + style = styles.Fallback +} +formatter := formatters.Get("html") +if formatter == nil { + formatter = formatters.Fallback +} +``` + +Then obtain an iterator over the tokens: + +```go +contents, err := ioutil.ReadAll(r) +iterator, err := lexer.Tokenise(nil, string(contents)) +``` + +And finally, format the tokens from the iterator: + +```go +err := formatter.Format(w, style, iterator) +``` + +### The HTML formatter + +By default the `html` registered formatter generates standalone HTML with +embedded CSS. More flexibility is available through the `formatters/html` package. + +Firstly, the output generated by the formatter can be customised with the +following constructor options: + +- `Standalone()` - generate standalone HTML with embedded CSS. +- `WithClasses()` - use classes rather than inlined style attributes. +- `ClassPrefix(prefix)` - prefix each generated CSS class. +- `TabWidth(width)` - Set the rendered tab width, in characters. +- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). +- `WithLinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. +- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). +- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. + +If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: + +```go +formatter := html.New(html.WithClasses(true)) +err := formatter.WriteCSS(w, style) +``` + +## More detail + +### Lexers + +See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) +for details on implementing lexers. Most concepts apply directly to Chroma, +but see existing lexer implementations for real examples. + +In many cases lexers can be automatically converted directly from Pygments by +using the included Python 3 script `pygments2chroma_xml.py`. I use something like +the following: + +```sh +python3 _tools/pygments2chroma_xml.py \ + pygments.lexers.jvm.KotlinLexer \ + > lexers/embedded/kotlin.xml +``` + +See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) +for a list of lexers, and notes on some of the issues importing them. + +### Formatters + +Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. + +A `noop` formatter is included that outputs the token text only, and a `tokens` +formatter outputs raw tokens. The latter is useful for debugging lexers. + +### Styles + +Chroma styles are defined in XML. The style entries use the +[same syntax](http://pygments.org/docs/styles/) as Pygments. + +All Pygments styles have been converted to Chroma using the `_tools/style.py` +script. + +When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), +know that the `Background` token type provides the default style for tokens. It does so +by defining a foreground color and background color. + +For example, this gives each token name not defined in the style a default color +of `#f8f8f8` and uses `#000000` for the highlighted code block's background: + +```xml + +``` + +Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color. + +For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). + +## Command-line interface + +A command-line interface to Chroma is included. + +Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases). + +The CLI can be used as a preprocessor to colorise output of `less(1)`, +see documentation for the `LESSOPEN` environment variable. + +The `--fail` flag can be used to suppress output and return with exit status +1 to facilitate falling back to some other preprocessor in case chroma +does not resolve a specific lexer to use for the given file. For example: + +```shell +export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"' +``` + +Replace `cat` with your favourite fallback preprocessor. + +When invoked as `.lessfilter`, the `--fail` flag is automatically turned +on under the hood for easy integration with [lesspipe shipping with +Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); +for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. + +## Testing lexers + +If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run: + +```shell +go run . +``` + +A Link will be printed. Open it in your Browser. Now you can test on the Playground with your local changes. + +If you want to run the tests and the lexers, open a shell in the root directory and run: + +```shell +go test ./lexers +``` + +When updating or adding a lexer, please add tests. See [lexers/README.md](lexers/README.md) for more. + +## What's missing compared to Pygments? + +- Quite a few lexers, for various reasons (pull-requests welcome): + - Pygments lexers for complex languages often include custom code to + handle certain aspects, such as Raku's ability to nest code inside + regular expressions. These require time and effort to convert. + - I mostly only converted languages I had heard of, to reduce the porting cost. +- Some more esoteric features of Pygments are omitted for simplicity. +- Though the Chroma API supports content detection, very few languages support them. + I have plans to implement a statistical analyser at some point, but not enough time. diff --git a/vendor/github.com/alecthomas/chroma/coalesce.go b/vendor/github.com/alecthomas/chroma/v2/coalesce.go similarity index 100% rename from vendor/github.com/alecthomas/chroma/coalesce.go rename to vendor/github.com/alecthomas/chroma/v2/coalesce.go diff --git a/vendor/github.com/alecthomas/chroma/colour.go b/vendor/github.com/alecthomas/chroma/v2/colour.go similarity index 81% rename from vendor/github.com/alecthomas/chroma/colour.go rename to vendor/github.com/alecthomas/chroma/v2/colour.go index 15d794ce2a72..b7fd6e0e340f 100644 --- a/vendor/github.com/alecthomas/chroma/colour.go +++ b/vendor/github.com/alecthomas/chroma/v2/colour.go @@ -92,7 +92,7 @@ func (c Colour) Brighten(factor float64) Colour { return NewColour(uint8(r), uint8(g), uint8(b)) } -// BrightenOrDarken brightens a colour if it is < 0.5 brighteness or darkens if > 0.5 brightness. +// BrightenOrDarken brightens a colour if it is < 0.5 brightness or darkens if > 0.5 brightness. func (c Colour) BrightenOrDarken(factor float64) Colour { if c.Brightness() < 0.5 { return c.Brighten(factor) @@ -100,7 +100,35 @@ func (c Colour) BrightenOrDarken(factor float64) Colour { return c.Brighten(-factor) } -// Brightness of the colour (roughly) in the range 0.0 to 1.0 +// ClampBrightness returns a copy of this colour with its brightness adjusted such that +// it falls within the range [min, max] (or very close to it due to rounding errors). +// The supplied values use the same [0.0, 1.0] range as Brightness. +func (c Colour) ClampBrightness(min, max float64) Colour { + if !c.IsSet() { + return c + } + + min = math.Max(min, 0) + max = math.Min(max, 1) + current := c.Brightness() + target := math.Min(math.Max(current, min), max) + if current == target { + return c + } + + r := float64(c.Red()) + g := float64(c.Green()) + b := float64(c.Blue()) + rgb := r + g + b + if target > current { + // Solve for x: target == ((255-r)*x + r + (255-g)*x + g + (255-b)*x + b) / 255 / 3 + return c.Brighten((target*255*3 - rgb) / (255*3 - rgb)) + } + // Solve for x: target == (r*(x+1) + g*(x+1) + b*(x+1)) / 255 / 3 + return c.Brighten((target*255*3)/rgb - 1) +} + +// Brightness of the colour (roughly) in the range 0.0 to 1.0. func (c Colour) Brightness() float64 { return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0 } diff --git a/vendor/github.com/alecthomas/chroma/delegate.go b/vendor/github.com/alecthomas/chroma/v2/delegate.go similarity index 90% rename from vendor/github.com/alecthomas/chroma/delegate.go rename to vendor/github.com/alecthomas/chroma/v2/delegate.go index 5cef01bd78df..f848194f682f 100644 --- a/vendor/github.com/alecthomas/chroma/delegate.go +++ b/vendor/github.com/alecthomas/chroma/v2/delegate.go @@ -24,6 +24,21 @@ func DelegatingLexer(root Lexer, language Lexer) Lexer { } } +func (d *delegatingLexer) AnalyseText(text string) float32 { + return d.root.AnalyseText(text) +} + +func (d *delegatingLexer) SetAnalyser(analyser func(text string) float32) Lexer { + d.root.SetAnalyser(analyser) + return d +} + +func (d *delegatingLexer) SetRegistry(r *LexerRegistry) Lexer { + d.root.SetRegistry(r) + d.language.SetRegistry(r) + return d +} + func (d *delegatingLexer) Config() *Config { return d.language.Config() } diff --git a/vendor/github.com/alecthomas/chroma/doc.go b/vendor/github.com/alecthomas/chroma/v2/doc.go similarity index 100% rename from vendor/github.com/alecthomas/chroma/doc.go rename to vendor/github.com/alecthomas/chroma/v2/doc.go diff --git a/vendor/github.com/alecthomas/chroma/v2/emitters.go b/vendor/github.com/alecthomas/chroma/v2/emitters.go new file mode 100644 index 000000000000..0788b5b21f55 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/emitters.go @@ -0,0 +1,218 @@ +package chroma + +import ( + "fmt" +) + +// An Emitter takes group matches and returns tokens. +type Emitter interface { + // Emit tokens for the given regex groups. + Emit(groups []string, state *LexerState) Iterator +} + +// SerialisableEmitter is an Emitter that can be serialised and deserialised to/from JSON. +type SerialisableEmitter interface { + Emitter + EmitterKind() string +} + +// EmitterFunc is a function that is an Emitter. +type EmitterFunc func(groups []string, state *LexerState) Iterator + +// Emit tokens for groups. +func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator { + return e(groups, state) +} + +type Emitters []Emitter + +type byGroupsEmitter struct { + Emitters +} + +// ByGroups emits a token for each matching group in the rule's regex. +func ByGroups(emitters ...Emitter) Emitter { + return &byGroupsEmitter{Emitters: emitters} +} + +func (b *byGroupsEmitter) EmitterKind() string { return "bygroups" } + +func (b *byGroupsEmitter) Emit(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(groups)-1) + if len(b.Emitters) != len(groups)-1 { + iterators = append(iterators, Error.Emit(groups, state)) + // panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters)) + } else { + for i, group := range groups[1:] { + if b.Emitters[i] != nil { + iterators = append(iterators, b.Emitters[i].Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) +} + +// ByGroupNames emits a token for each named matching group in the rule's regex. +func ByGroupNames(emitters map[string]Emitter) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(state.NamedGroups)-1) + if len(state.NamedGroups)-1 == 0 { + if emitter, ok := emitters[`0`]; ok { + iterators = append(iterators, emitter.Emit(groups, state)) + } else { + iterators = append(iterators, Error.Emit(groups, state)) + } + } else { + ruleRegex := state.Rules[state.State][state.Rule].Regexp + for i := 1; i < len(state.NamedGroups); i++ { + groupName := ruleRegex.GroupNameFromNumber(i) + group := state.NamedGroups[groupName] + if emitter, ok := emitters[groupName]; ok { + if emitter != nil { + iterators = append(iterators, emitter.Emit([]string{group}, state)) + } + } else { + iterators = append(iterators, Error.Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) + }) +} + +// UsingByGroup emits tokens for the matched groups in the regex using a +// sublexer. Used when lexing code blocks where the name of a sublexer is +// contained within the block, for example on a Markdown text block or SQL +// language block. +// +// An attempt to load the sublexer will be made using the captured value from +// the text of the matched sublexerNameGroup. If a sublexer matching the +// sublexerNameGroup is available, then tokens for the matched codeGroup will +// be emitted using the sublexer. Otherwise, if no sublexer is available, then +// tokens will be emitted from the passed emitter. +// +// Example: +// +// var Markdown = internal.Register(MustNewLexer( +// &Config{ +// Name: "markdown", +// Aliases: []string{"md", "mkd"}, +// Filenames: []string{"*.md", "*.mkd", "*.markdown"}, +// MimeTypes: []string{"text/x-markdown"}, +// }, +// Rules{ +// "root": { +// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", +// UsingByGroup( +// 2, 4, +// String, String, String, Text, String, +// ), +// nil, +// }, +// }, +// }, +// )) +// +// See the lexers/markdown.go for the complete example. +// +// Note: panic's if the number of emitters does not equal the number of matched +// groups in the regex. +func UsingByGroup(sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter { + return &usingByGroup{ + SublexerNameGroup: sublexerNameGroup, + CodeGroup: codeGroup, + Emitters: emitters, + } +} + +type usingByGroup struct { + SublexerNameGroup int `xml:"sublexer_name_group"` + CodeGroup int `xml:"code_group"` + Emitters Emitters `xml:"emitters"` +} + +func (u *usingByGroup) EmitterKind() string { return "usingbygroup" } +func (u *usingByGroup) Emit(groups []string, state *LexerState) Iterator { + // bounds check + if len(u.Emitters) != len(groups)-1 { + panic("UsingByGroup expects number of emitters to be the same as len(groups)-1") + } + + // grab sublexer + sublexer := state.Registry.Get(groups[u.SublexerNameGroup]) + + // build iterators + iterators := make([]Iterator, len(groups)-1) + for i, group := range groups[1:] { + if i == u.CodeGroup-1 && sublexer != nil { + var err error + iterators[i], err = sublexer.Tokenise(nil, groups[u.CodeGroup]) + if err != nil { + panic(err) + } + } else if u.Emitters[i] != nil { + iterators[i] = u.Emitters[i].Emit([]string{group}, state) + } + } + return Concaterator(iterators...) +} + +// UsingLexer returns an Emitter that uses a given Lexer for parsing and emitting. +// +// This Emitter is not serialisable. +func UsingLexer(lexer Lexer) Emitter { + return EmitterFunc(func(groups []string, _ *LexerState) Iterator { + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +type usingEmitter struct { + Lexer string `xml:"lexer,attr"` +} + +func (u *usingEmitter) EmitterKind() string { return "using" } + +func (u *usingEmitter) Emit(groups []string, state *LexerState) Iterator { + if state.Registry == nil { + panic(fmt.Sprintf("no LexerRegistry available for Using(%q)", u.Lexer)) + } + lexer := state.Registry.Get(u.Lexer) + if lexer == nil { + panic(fmt.Sprintf("no such lexer %q", u.Lexer)) + } + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it +} + +// Using returns an Emitter that uses a given Lexer reference for parsing and emitting. +// +// The referenced lexer must be stored in the same LexerRegistry. +func Using(lexer string) Emitter { + return &usingEmitter{Lexer: lexer} +} + +type usingSelfEmitter struct { + State string `xml:"state,attr"` +} + +func (u *usingSelfEmitter) EmitterKind() string { return "usingself" } + +func (u *usingSelfEmitter) Emit(groups []string, state *LexerState) Iterator { + it, err := state.Lexer.Tokenise(&TokeniseOptions{State: u.State, Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it +} + +// UsingSelf is like Using, but uses the current Lexer. +func UsingSelf(stateName string) Emitter { + return &usingSelfEmitter{stateName} +} diff --git a/vendor/github.com/alecthomas/chroma/formatter.go b/vendor/github.com/alecthomas/chroma/v2/formatter.go similarity index 100% rename from vendor/github.com/alecthomas/chroma/formatter.go rename to vendor/github.com/alecthomas/chroma/v2/formatter.go diff --git a/vendor/github.com/alecthomas/chroma/formatters/api.go b/vendor/github.com/alecthomas/chroma/v2/formatters/api.go similarity index 90% rename from vendor/github.com/alecthomas/chroma/formatters/api.go rename to vendor/github.com/alecthomas/chroma/v2/formatters/api.go index 6da0a24cc1d8..9ca0d01ddc10 100644 --- a/vendor/github.com/alecthomas/chroma/formatters/api.go +++ b/vendor/github.com/alecthomas/chroma/v2/formatters/api.go @@ -4,9 +4,9 @@ import ( "io" "sort" - "github.com/alecthomas/chroma" - "github.com/alecthomas/chroma/formatters/html" - "github.com/alecthomas/chroma/formatters/svg" + "github.com/alecthomas/chroma/v2" + "github.com/alecthomas/chroma/v2/formatters/html" + "github.com/alecthomas/chroma/v2/formatters/svg" ) var ( diff --git a/vendor/github.com/alecthomas/chroma/formatters/html/html.go b/vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go similarity index 68% rename from vendor/github.com/alecthomas/chroma/formatters/html/html.go rename to vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go index 164c842d2415..92d784c2454b 100644 --- a/vendor/github.com/alecthomas/chroma/formatters/html/html.go +++ b/vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go @@ -5,9 +5,11 @@ import ( "html" "io" "sort" + "strconv" "strings" + "sync" - "github.com/alecthomas/chroma" + "github.com/alecthomas/chroma/v2" ) // Option sets an option of the HTML formatter. @@ -25,12 +27,21 @@ func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } } // WithAllClasses disables an optimisation that omits redundant CSS classes. func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } } +// WithCustomCSS sets user's custom CSS styles. +func WithCustomCSS(css map[chroma.TokenType]string) Option { + return func(f *Formatter) { + f.customCSS = css + } +} + // TabWidth sets the number of characters for a tab. Defaults to 8. func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } // PreventSurroundingPre prevents the surrounding pre tags around the generated code. func PreventSurroundingPre(b bool) Option { return func(f *Formatter) { + f.preventSurroundingPre = b + if b { f.preWrapper = nopPreWrapper } else { @@ -39,6 +50,29 @@ func PreventSurroundingPre(b bool) Option { } } +// InlineCode creates inline code wrapped in a code tag. +func InlineCode(b bool) Option { + return func(f *Formatter) { + f.inlineCode = b + f.preWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { + if code { + return fmt.Sprintf(``, styleAttr) + } + + return `` + }, + end: func(code bool) string { + if code { + return `` + } + + return `` + }, + } + } +} + // WithPreWrapper allows control of the surrounding pre tags. func WithPreWrapper(wrapper PreWrapper) Option { return func(f *Formatter) { @@ -68,9 +102,9 @@ func LineNumbersInTable(b bool) Option { } } -// LinkableLineNumbers decorates the line numbers HTML elements with an "id" +// WithLinkableLineNumbers decorates the line numbers HTML elements with an "id" // attribute so they can be linked. -func LinkableLineNumbers(b bool, prefix string) Option { +func WithLinkableLineNumbers(b bool, prefix string) Option { return func(f *Formatter) { f.linkableLineNumbers = b f.lineNumbersIDPrefix = prefix @@ -100,6 +134,7 @@ func New(options ...Option) *Formatter { baseLineNumber: 1, preWrapper: defaultPreWrapper, } + f.styleCache = newStyleCache(f) for _, option := range options { option(f) } @@ -139,10 +174,10 @@ var ( defaultPreWrapper = preWrapper{ start: func(code bool, styleAttr string) string { if code { - return fmt.Sprintf(`
`, styleAttr)
+				return fmt.Sprintf(``, styleAttr)
 			}
 
-			return fmt.Sprintf(`
`, styleAttr)
+			return fmt.Sprintf(``, styleAttr)
 		},
 		end: func(code bool) string {
 			if code {
@@ -156,19 +191,23 @@ var (
 
 // Formatter that generates HTML.
 type Formatter struct {
-	standalone          bool
-	prefix              string
-	Classes             bool // Exported field to detect when classes are being used
-	allClasses          bool
-	preWrapper          PreWrapper
-	tabWidth            int
-	wrapLongLines       bool
-	lineNumbers         bool
-	lineNumbersInTable  bool
-	linkableLineNumbers bool
-	lineNumbersIDPrefix string
-	highlightRanges     highlightRanges
-	baseLineNumber      int
+	styleCache            *styleCache
+	standalone            bool
+	prefix                string
+	Classes               bool // Exported field to detect when classes are being used
+	allClasses            bool
+	customCSS             map[chroma.TokenType]string
+	preWrapper            PreWrapper
+	inlineCode            bool
+	preventSurroundingPre bool
+	tabWidth              int
+	wrapLongLines         bool
+	lineNumbers           bool
+	lineNumbersInTable    bool
+	linkableLineNumbers   bool
+	lineNumbersIDPrefix   string
+	highlightRanges       highlightRanges
+	baseLineNumber        int
 }
 
 type highlightRanges [][2]int
@@ -185,12 +224,7 @@ func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Ite
 //
 // OTOH we need to be super careful about correct escaping...
 func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
-	css := f.styleToCSS(style)
-	if !f.Classes {
-		for t, style := range css {
-			css[t] = compressStyle(style)
-		}
-	}
+	css := f.styleCache.get(style, true)
 	if f.standalone {
 		fmt.Fprint(w, "\n")
 		if f.Classes {
@@ -208,7 +242,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 	wrapInTable := f.lineNumbers && f.lineNumbersInTable
 
 	lines := chroma.SplitTokensIntoLines(tokens)
-	lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
+	lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1))
 	highlightIndex := 0
 
 	if wrapInTable {
@@ -216,7 +250,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 		fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.PreWrapper))
 		fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineTable))
 		fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD))
-		fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
+		fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
 		for index := range lines {
 			line := f.baseLineNumber + index
 			highlight, next := f.shouldHighlight(highlightIndex, line)
@@ -227,7 +261,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 				fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineHighlight))
 			}
 
-			fmt.Fprintf(w, "%s\n", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
+			fmt.Fprintf(w, "%s\n", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line))
 
 			if highlight {
 				fmt.Fprintf(w, "")
@@ -238,7 +272,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 		fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
 	}
 
-	fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
+	fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
 
 	highlightIndex = 0
 	for index, tokens := range lines {
@@ -249,26 +283,29 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 			highlightIndex++
 		}
 
-		// Start of Line
-		fmt.Fprint(w, ``)
 			} else {
-				fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
+				fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
 			}
-			fmt.Fprint(w, `>`)
-		} else {
-			fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
-		}
 
-		// Line number
-		if f.lineNumbers && !wrapInTable {
-			fmt.Fprintf(w, "%s", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
-		}
+			// Line number
+			if f.lineNumbers && !wrapInTable {
+				fmt.Fprintf(w, "%s", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line))
+			}
 
-		fmt.Fprintf(w, ``, f.styleAttr(css, chroma.CodeLine))
+			fmt.Fprintf(w, ``, f.styleAttr(css, chroma.CodeLine))
+		}
 
 		for _, token := range tokens {
 			html := html.EscapeString(token.String())
@@ -279,12 +316,13 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
 			fmt.Fprint(w, html)
 		}
 
-		fmt.Fprint(w, ``) // End of CodeLine
+		if !(f.preventSurroundingPre || f.inlineCode) {
+			fmt.Fprint(w, ``) // End of CodeLine
 
-		fmt.Fprint(w, ``) // End of Line
+			fmt.Fprint(w, ``) // End of Line
+		}
 	}
-
-	fmt.Fprintf(w, f.preWrapper.End(true))
+	fmt.Fprintf(w, "%s", f.preWrapper.End(true))
 
 	if wrapInTable {
 		fmt.Fprint(w, "\n")
@@ -306,12 +344,12 @@ func (f *Formatter) lineIDAttribute(line int) string {
 	return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
 }
 
-func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
+func (f *Formatter) lineTitleWithLinkIfNeeded(css map[chroma.TokenType]string, lineDigits, line int) string {
 	title := fmt.Sprintf("%*d", lineDigits, line)
 	if !f.linkableLineNumbers {
 		return title
 	}
-	return fmt.Sprintf("%s", f.lineID(line), title)
+	return fmt.Sprintf("%s", f.styleAttr(css, chroma.LineLink), f.lineID(line), title)
 }
 
 func (f *Formatter) lineID(line int) string {
@@ -373,14 +411,14 @@ func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.Toke
 
 func (f *Formatter) tabWidthStyle() string {
 	if f.tabWidth != 0 && f.tabWidth != 8 {
-		return fmt.Sprintf("; -moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d", f.tabWidth)
+		return fmt.Sprintf("-moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d;", f.tabWidth)
 	}
 	return ""
 }
 
 // WriteCSS writes CSS style definitions (without any surrounding HTML).
 func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
-	css := f.styleToCSS(style)
+	css := f.styleCache.get(style, false)
 	// Special-case background as it is mapped to the outer ".chroma" class.
 	if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
 		return err
@@ -435,28 +473,53 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
 		if t != chroma.Background {
 			entry = entry.Sub(bg)
 		}
-		if !f.allClasses && entry.IsZero() {
+
+		// Inherit from custom CSS provided by user
+		tokenCategory := t.Category()
+		tokenSubCategory := t.SubCategory()
+		if t != tokenCategory {
+			if css, ok := f.customCSS[tokenCategory]; ok {
+				classes[t] = css
+			}
+		}
+		if tokenCategory != tokenSubCategory {
+			if css, ok := f.customCSS[tokenSubCategory]; ok {
+				classes[t] += css
+			}
+		}
+		// Add custom CSS provided by user
+		if css, ok := f.customCSS[t]; ok {
+			classes[t] += css
+		}
+
+		if !f.allClasses && entry.IsZero() && classes[t] == `` {
 			continue
 		}
-		classes[t] = StyleEntryToCSS(entry)
+
+		styleEntryCSS := StyleEntryToCSS(entry)
+		if styleEntryCSS != `` && classes[t] != `` {
+			styleEntryCSS += `;`
+		}
+		classes[t] = styleEntryCSS + classes[t]
 	}
-	classes[chroma.Background] += f.tabWidthStyle()
-	classes[chroma.PreWrapper] += classes[chroma.Background] + `;`
+	classes[chroma.Background] += `;` + f.tabWidthStyle()
+	classes[chroma.PreWrapper] += classes[chroma.Background]
 	// Make PreWrapper a grid to show highlight style with full width.
-	if len(f.highlightRanges) > 0 {
+	if len(f.highlightRanges) > 0 && f.customCSS[chroma.PreWrapper] == `` {
 		classes[chroma.PreWrapper] += `display: grid;`
 	}
 	// Make PreWrapper wrap long lines.
 	if f.wrapLongLines {
 		classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
 	}
-	lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
+	lineNumbersStyle := `white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
 	// All rules begin with default rules followed by user provided rules
 	classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
 	classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
 	classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
 	classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
 	classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
+	classes[chroma.LineLink] = "outline: none; text-decoration: none; color: inherit" + classes[chroma.LineLink]
 	return classes
 }
 
@@ -498,3 +561,63 @@ func compressStyle(s string) string {
 	}
 	return strings.Join(out, ";")
 }
+
+const styleCacheLimit = 32
+
+type styleCacheEntry struct {
+	style      *chroma.Style
+	compressed bool
+	cache      map[chroma.TokenType]string
+}
+
+type styleCache struct {
+	mu sync.Mutex
+	// LRU cache of compiled (and possibly compressed) styles. This is a slice
+	// because the cache size is small, and a slice is sufficiently fast for
+	// small N.
+	cache []styleCacheEntry
+	f     *Formatter
+}
+
+func newStyleCache(f *Formatter) *styleCache {
+	return &styleCache{f: f}
+}
+
+func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string {
+	l.mu.Lock()
+	defer l.mu.Unlock()
+
+	// Look for an existing entry.
+	for i := len(l.cache) - 1; i >= 0; i-- {
+		entry := l.cache[i]
+		if entry.style == style && entry.compressed == compress {
+			// Top of the cache, no need to adjust the order.
+			if i == len(l.cache)-1 {
+				return entry.cache
+			}
+			// Move this entry to the end of the LRU
+			copy(l.cache[i:], l.cache[i+1:])
+			l.cache[len(l.cache)-1] = entry
+			return entry.cache
+		}
+	}
+
+	// No entry, create one.
+	cached := l.f.styleToCSS(style)
+	if !l.f.Classes {
+		for t, style := range cached {
+			cached[t] = compressStyle(style)
+		}
+	}
+	if compress {
+		for t, style := range cached {
+			cached[t] = compressStyle(style)
+		}
+	}
+	// Evict the oldest entry.
+	if len(l.cache) >= styleCacheLimit {
+		l.cache = l.cache[0:copy(l.cache, l.cache[1:])]
+	}
+	l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress})
+	return cached
+}
diff --git a/vendor/github.com/alecthomas/chroma/formatters/json.go b/vendor/github.com/alecthomas/chroma/v2/formatters/json.go
similarity index 94%
rename from vendor/github.com/alecthomas/chroma/formatters/json.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/json.go
index 95df4bb676e6..f167bc062cd7 100644
--- a/vendor/github.com/alecthomas/chroma/formatters/json.go
+++ b/vendor/github.com/alecthomas/chroma/v2/formatters/json.go
@@ -5,7 +5,7 @@ import (
 	"fmt"
 	"io"
 
-	"github.com/alecthomas/chroma"
+	"github.com/alecthomas/chroma/v2"
 )
 
 // JSON formatter outputs the raw token structures as JSON.
diff --git a/vendor/github.com/alecthomas/chroma/formatters/svg/font_liberation_mono.go b/vendor/github.com/alecthomas/chroma/v2/formatters/svg/font_liberation_mono.go
similarity index 100%
rename from vendor/github.com/alecthomas/chroma/formatters/svg/font_liberation_mono.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/svg/font_liberation_mono.go
diff --git a/vendor/github.com/alecthomas/chroma/formatters/svg/svg.go b/vendor/github.com/alecthomas/chroma/v2/formatters/svg/svg.go
similarity index 98%
rename from vendor/github.com/alecthomas/chroma/formatters/svg/svg.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/svg/svg.go
index 5cfd950840f2..6d457f90a5bd 100644
--- a/vendor/github.com/alecthomas/chroma/formatters/svg/svg.go
+++ b/vendor/github.com/alecthomas/chroma/v2/formatters/svg/svg.go
@@ -6,11 +6,11 @@ import (
 	"errors"
 	"fmt"
 	"io"
-	"io/ioutil"
+	"os"
 	"path"
 	"strings"
 
-	"github.com/alecthomas/chroma"
+	"github.com/alecthomas/chroma/v2"
 )
 
 // Option sets an option of the SVG formatter.
@@ -34,7 +34,7 @@ func EmbedFontFile(fontFamily string, fileName string) (option Option, err error
 	}
 
 	var content []byte
-	if content, err = ioutil.ReadFile(fileName); err == nil {
+	if content, err = os.ReadFile(fileName); err == nil {
 		option = EmbedFont(fontFamily, base64.StdEncoding.EncodeToString(content), format)
 	}
 	return
diff --git a/vendor/github.com/alecthomas/chroma/formatters/tokens.go b/vendor/github.com/alecthomas/chroma/v2/formatters/tokens.go
similarity index 91%
rename from vendor/github.com/alecthomas/chroma/formatters/tokens.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/tokens.go
index 91d80d146c1e..3bdd57ccfcba 100644
--- a/vendor/github.com/alecthomas/chroma/formatters/tokens.go
+++ b/vendor/github.com/alecthomas/chroma/v2/formatters/tokens.go
@@ -4,7 +4,7 @@ import (
 	"fmt"
 	"io"
 
-	"github.com/alecthomas/chroma"
+	"github.com/alecthomas/chroma/v2"
 )
 
 // Tokens formatter outputs the raw token structures.
diff --git a/vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go b/vendor/github.com/alecthomas/chroma/v2/formatters/tty_indexed.go
similarity index 98%
rename from vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/tty_indexed.go
index 47fbb1a119da..d383d9850b8c 100644
--- a/vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
+++ b/vendor/github.com/alecthomas/chroma/v2/formatters/tty_indexed.go
@@ -5,7 +5,7 @@ import (
 	"io"
 	"math"
 
-	"github.com/alecthomas/chroma"
+	"github.com/alecthomas/chroma/v2"
 )
 
 type ttyTable struct {
@@ -242,12 +242,21 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
 	theme := styleToEscapeSequence(c.table, style)
 	for token := it(); token != chroma.EOF; token = it() {
 		clr, ok := theme[token.Type]
+
+		// This search mimics how styles.Get() is used in tty_truecolour.go.
 		if !ok {
 			clr, ok = theme[token.Type.SubCategory()]
 			if !ok {
-				clr = theme[token.Type.Category()]
+				clr, ok = theme[token.Type.Category()]
+				if !ok {
+					clr, ok = theme[chroma.Text]
+					if !ok {
+						clr = theme[chroma.Background]
+					}
+				}
 			}
 		}
+
 		if clr != "" {
 			fmt.Fprint(w, clr)
 		}
diff --git a/vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go b/vendor/github.com/alecthomas/chroma/v2/formatters/tty_truecolour.go
similarity index 96%
rename from vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go
rename to vendor/github.com/alecthomas/chroma/v2/formatters/tty_truecolour.go
index b02e6360e42c..7b43c6eb1b60 100644
--- a/vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go
+++ b/vendor/github.com/alecthomas/chroma/v2/formatters/tty_truecolour.go
@@ -4,7 +4,7 @@ import (
 	"fmt"
 	"io"
 
-	"github.com/alecthomas/chroma"
+	"github.com/alecthomas/chroma/v2"
 )
 
 // TTY16m is a true-colour terminal formatter.
diff --git a/vendor/github.com/alecthomas/chroma/iterator.go b/vendor/github.com/alecthomas/chroma/v2/iterator.go
similarity index 100%
rename from vendor/github.com/alecthomas/chroma/iterator.go
rename to vendor/github.com/alecthomas/chroma/v2/iterator.go
diff --git a/vendor/github.com/alecthomas/chroma/lexer.go b/vendor/github.com/alecthomas/chroma/v2/lexer.go
similarity index 59%
rename from vendor/github.com/alecthomas/chroma/lexer.go
rename to vendor/github.com/alecthomas/chroma/v2/lexer.go
index fe625198e6d6..eb027bf69877 100644
--- a/vendor/github.com/alecthomas/chroma/lexer.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexer.go
@@ -15,30 +15,30 @@ var (
 // Config for a lexer.
 type Config struct {
 	// Name of the lexer.
-	Name string
+	Name string `xml:"name,omitempty"`
 
 	// Shortcuts for the lexer
-	Aliases []string
+	Aliases []string `xml:"alias,omitempty"`
 
 	// File name globs
-	Filenames []string
+	Filenames []string `xml:"filename,omitempty"`
 
 	// Secondary file name globs
-	AliasFilenames []string
+	AliasFilenames []string `xml:"alias_filename,omitempty"`
 
 	// MIME types
-	MimeTypes []string
+	MimeTypes []string `xml:"mime_type,omitempty"`
 
 	// Regex matching is case-insensitive.
-	CaseInsensitive bool
+	CaseInsensitive bool `xml:"case_insensitive,omitempty"`
 
 	// Regex matches all characters.
-	DotAll bool
+	DotAll bool `xml:"dot_all,omitempty"`
 
 	// Regex does not match across lines ($ matches EOL).
 	//
 	// Defaults to multiline.
-	NotMultiline bool
+	NotMultiline bool `xml:"not_multiline,omitempty"`
 
 	// Don't strip leading and trailing newlines from the input.
 	// DontStripNL bool
@@ -48,7 +48,7 @@ type Config struct {
 
 	// Make sure that the input ends with a newline. This
 	// is required for some lexers that consume input linewise.
-	EnsureNL bool
+	EnsureNL bool `xml:"ensure_nl,omitempty"`
 
 	// If given and greater than 0, expand tabs in the input.
 	// TabSize int
@@ -56,7 +56,27 @@ type Config struct {
 	// Priority of lexer.
 	//
 	// If this is 0 it will be treated as a default of 1.
-	Priority float32
+	Priority float32 `xml:"priority,omitempty"`
+
+	// Analyse is a list of regexes to match against the input.
+	//
+	// If a match is found, the score is returned if single attribute is set to true,
+	// otherwise the sum of all the score of matching patterns will be
+	// used as the final score.
+	Analyse *AnalyseConfig `xml:"analyse,omitempty"`
+}
+
+// AnalyseConfig defines the list of regexes analysers.
+type AnalyseConfig struct {
+	Regexes []RegexConfig `xml:"regex,omitempty"`
+	// If true, the first matching score is returned.
+	First bool `xml:"first,attr"`
+}
+
+// RegexConfig defines a single regex pattern and its score in case of match.
+type RegexConfig struct {
+	Pattern string  `xml:"pattern,attr"`
+	Score   float32 `xml:"score,attr"`
 }
 
 // Token output to formatter.
@@ -94,6 +114,20 @@ type Lexer interface {
 	Config() *Config
 	// Tokenise returns an Iterator over tokens in text.
 	Tokenise(options *TokeniseOptions, text string) (Iterator, error)
+	// SetRegistry sets the registry this Lexer is associated with.
+	//
+	// The registry should be used by the Lexer if it needs to look up other
+	// lexers.
+	SetRegistry(registry *LexerRegistry) Lexer
+	// SetAnalyser sets a function the Lexer should use for scoring how
+	// likely a fragment of text is to match this lexer, between 0.0 and 1.0.
+	// A value of 1 indicates high confidence.
+	//
+	// Lexers may ignore this if they implement their own analysers.
+	SetAnalyser(analyser func(text string) float32) Lexer
+	// AnalyseText scores how likely a fragment of text is to match
+	// this lexer, between 0.0 and 1.0. A value of 1 indicates high confidence.
+	AnalyseText(text string) float32
 }
 
 // Lexers is a slice of lexers sortable by name.
diff --git a/vendor/github.com/alecthomas/chroma/lexers/README.md b/vendor/github.com/alecthomas/chroma/v2/lexers/README.md
similarity index 85%
rename from vendor/github.com/alecthomas/chroma/lexers/README.md
rename to vendor/github.com/alecthomas/chroma/v2/lexers/README.md
index b4ed292b1e9a..60a0055ad832 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/README.md
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/README.md
@@ -1,19 +1,24 @@
-# Lexer tests
+# Chroma lexers
+
+All lexers in Chroma should now be defined in XML unless they require custom code.
+
+## Lexer tests
 
 The tests in this directory feed a known input `testdata/.actual` into the parser for `` and check
-that its output matches `.exported`.
+that its output matches `.expected`.
 
 It is also possible to perform several tests on a same parser ``, by placing know inputs `*.actual` into a
 directory `testdata//`.
 
-## Running the tests
+### Running the tests
 
 Run the tests as normal:
 ```go
 go test ./lexers
 ```
 
-## Update existing tests
+### Update existing tests
+
 When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer.
 
 To regenerate all tests, type in your terminal:
@@ -26,7 +31,8 @@ This first sets the `RECORD` environment variable to `true`. Then it runs `go te
 
 (That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.)
 
-### Windows users
+#### Windows users
+
 Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell.
 
 Instead we have to perform both steps separately:
@@ -35,6 +41,6 @@ Instead we have to perform both steps separately:
 	+ In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more.
 	+ In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more.
 	+ You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how.
-- When the environment variable is set, run `go tests ./lexers`.
+- When the environment variable is set, run `go test ./lexers`.
 
 Chroma will now regenerate the test files and print its results to the console window.
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go b/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go
new file mode 100644
index 000000000000..82a7efa48787
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go
@@ -0,0 +1,275 @@
+package lexers
+
+import (
+	. "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// Matcher token stub for docs, or
+// Named matcher: @name, or
+// Path matcher: /foo, or
+// Wildcard path matcher: *
+// nolint: gosec
+var caddyfileMatcherTokenRegexp = `(\[\\]|@[^\s]+|/[^\s]+|\*)`
+
+// Comment at start of line, or
+// Comment preceded by whitespace
+var caddyfileCommentRegexp = `(^|\s+)#.*\n`
+
+// caddyfileCommon are the rules common to both of the lexer variants
+func caddyfileCommonRules() Rules {
+	return Rules{
+		"site_block_common": {
+			Include("site_body"),
+			// Any other directive
+			{`[^\s#]+`, Keyword, Push("directive")},
+			Include("base"),
+		},
+		"site_body": {
+			// Import keyword
+			{`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")},
+			// Matcher definition
+			{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
+			// Matcher token stub for docs
+			{`\[\\]`, NameDecorator, Push("matcher")},
+			// These cannot have matchers but may have things that look like
+			// matchers in their arguments, so we just parse as a subdirective.
+			{`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")},
+			// These are special, they can nest more directives
+			{`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")},
+			// uri directive has special syntax
+			{`\b(uri)\b`, Keyword, Push("uri_directive")},
+		},
+		"matcher": {
+			{`\{`, Punctuation, Push("block")},
+			// Not can be one-liner
+			{`not`, Keyword, Push("deep_not_matcher")},
+			// Heredoc for CEL expression
+			Include("heredoc"),
+			// Backtick for CEL expression
+			{"`", StringBacktick, Push("backticks")},
+			// Any other same-line matcher
+			{`[^\s#]+`, Keyword, Push("arguments")},
+			// Terminators
+			{`\s*\n`, Text, Pop(1)},
+			{`\}`, Punctuation, Pop(1)},
+			Include("base"),
+		},
+		"block": {
+			{`\}`, Punctuation, Pop(2)},
+			// Using double quotes doesn't stop at spaces
+			{`"`, StringDouble, Push("double_quotes")},
+			// Using backticks doesn't stop at spaces
+			{"`", StringBacktick, Push("backticks")},
+			// Not can be one-liner
+			{`not`, Keyword, Push("not_matcher")},
+			// Directives & matcher definitions
+			Include("site_body"),
+			// Any directive
+			{`[^\s#]+`, Keyword, Push("subdirective")},
+			Include("base"),
+		},
+		"nested_block": {
+			{`\}`, Punctuation, Pop(2)},
+			// Using double quotes doesn't stop at spaces
+			{`"`, StringDouble, Push("double_quotes")},
+			// Using backticks doesn't stop at spaces
+			{"`", StringBacktick, Push("backticks")},
+			// Not can be one-liner
+			{`not`, Keyword, Push("not_matcher")},
+			// Directives & matcher definitions
+			Include("site_body"),
+			// Any other subdirective
+			{`[^\s#]+`, Keyword, Push("directive")},
+			Include("base"),
+		},
+		"not_matcher": {
+			{`\}`, Punctuation, Pop(2)},
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{`[^\s#]+`, Keyword, Push("arguments")},
+			{`\s+`, Text, nil},
+		},
+		"deep_not_matcher": {
+			{`\}`, Punctuation, Pop(2)},
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{`[^\s#]+`, Keyword, Push("deep_subdirective")},
+			{`\s+`, Text, nil},
+		},
+		"directive": {
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(1)},
+			{`\s*\n`, Text, Pop(1)},
+			Include("base"),
+		},
+		"nested_directive": {
+			{`\{(?=\s)`, Punctuation, Push("nested_block")},
+			{caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(1)},
+			{`\s*\n`, Text, Pop(1)},
+			Include("base"),
+		},
+		"subdirective": {
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(1)},
+			{`\s*\n`, Text, Pop(1)},
+			Include("base"),
+		},
+		"arguments": {
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(2)},
+			{`\\\n`, Text, nil}, // Skip escaped newlines
+			{`\s*\n`, Text, Pop(2)},
+			Include("base"),
+		},
+		"nested_arguments": {
+			{`\{(?=\s)`, Punctuation, Push("nested_block")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(2)},
+			{`\\\n`, Text, nil}, // Skip escaped newlines
+			{`\s*\n`, Text, Pop(2)},
+			Include("base"),
+		},
+		"deep_subdirective": {
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(3)},
+			{`\s*\n`, Text, Pop(3)},
+			Include("base"),
+		},
+		"uri_directive": {
+			{`\{(?=\s)`, Punctuation, Push("block")},
+			{caddyfileMatcherTokenRegexp, NameDecorator, nil},
+			{`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")},
+			{caddyfileCommentRegexp, CommentSingle, Pop(1)},
+			{`\s*\n`, Text, Pop(1)},
+			Include("base"),
+		},
+		"double_quotes": {
+			Include("placeholder"),
+			{`\\"`, StringDouble, nil},
+			{`[^"]`, StringDouble, nil},
+			{`"`, StringDouble, Pop(1)},
+		},
+		"backticks": {
+			Include("placeholder"),
+			{"\\\\`", StringBacktick, nil},
+			{"[^`]", StringBacktick, nil},
+			{"`", StringBacktick, Pop(1)},
+		},
+		"optional": {
+			// Docs syntax for showing optional parts with [ ]
+			{`\[`, Punctuation, Push("optional")},
+			Include("name_constants"),
+			{`\|`, Punctuation, nil},
+			{`[^\[\]\|]+`, String, nil},
+			{`\]`, Punctuation, Pop(1)},
+		},
+		"heredoc": {
+			{`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil},
+		},
+		"name_constants": {
+			{`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil},
+		},
+		"placeholder": {
+			// Placeholder with dots, colon for default value, brackets for args[0:]
+			{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil},
+			// Handle opening brackets with no matching closing one
+			{`\{[^\}\s]*\b`, String, nil},
+		},
+		"base": {
+			{caddyfileCommentRegexp, CommentSingle, nil},
+			{`\[\\]`, NameDecorator, nil},
+			Include("name_constants"),
+			Include("heredoc"),
+			{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil},
+			{`\[`, Punctuation, Push("optional")},
+			{"`", StringBacktick, Push("backticks")},
+			{`"`, StringDouble, Push("double_quotes")},
+			Include("placeholder"),
+			{`[a-z-]+/[a-z-+]+`, String, nil},
+			{`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil},
+			{`[^\s\n#\{]+`, String, nil},
+			{`/[^\s#]*`, Name, nil},
+			{`\s+`, Text, nil},
+		},
+	}
+}
+
+// Caddyfile lexer.
+var Caddyfile = Register(MustNewLexer(
+	&Config{
+		Name:      "Caddyfile",
+		Aliases:   []string{"caddyfile", "caddy"},
+		Filenames: []string{"Caddyfile*"},
+		MimeTypes: []string{},
+	},
+	caddyfileRules,
+))
+
+func caddyfileRules() Rules {
+	return Rules{
+		"root": {
+			{caddyfileCommentRegexp, CommentSingle, nil},
+			// Global options block
+			{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
+			// Top level import
+			{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
+			// Snippets
+			{`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
+			// Site label
+			{`[^#{(\s,]+`, GenericHeading, Push("label")},
+			// Site label with placeholder
+			{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")},
+			{`\s+`, Text, nil},
+		},
+		"globals": {
+			{`\}`, Punctuation, Pop(1)},
+			// Global options are parsed as subdirectives (no matcher)
+			{`[^\s#]+`, Keyword, Push("subdirective")},
+			Include("base"),
+		},
+		"snippet": {
+			{`\}`, Punctuation, Pop(1)},
+			Include("site_body"),
+			// Any other directive
+			{`[^\s#]+`, Keyword, Push("directive")},
+			Include("base"),
+		},
+		"label": {
+			// Allow multiple labels, comma separated, newlines after
+			// a comma means another label is coming
+			{`,\s*\n?`, Text, nil},
+			{` `, Text, nil},
+			// Site label with placeholder
+			Include("placeholder"),
+			// Site label
+			{`[^#{(\s,]+`, GenericHeading, nil},
+			// Comment after non-block label (hack because comments end in \n)
+			{`#.*\n`, CommentSingle, Push("site_block")},
+			// Note: if \n, we'll never pop out of the site_block, it's valid
+			{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
+		},
+		"site_block": {
+			{`\}`, Punctuation, Pop(2)},
+			Include("site_block_common"),
+		},
+	}.Merge(caddyfileCommonRules())
+}
+
+// Caddyfile directive-only lexer.
+var CaddyfileDirectives = Register(MustNewLexer(
+	&Config{
+		Name:      "Caddyfile Directives",
+		Aliases:   []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
+		Filenames: []string{},
+		MimeTypes: []string{},
+	},
+	caddyfileDirectivesRules,
+))
+
+func caddyfileDirectivesRules() Rules {
+	return Rules{
+		// Same as "site_block" in Caddyfile
+		"root": {
+			Include("site_block_common"),
+		},
+	}.Merge(caddyfileCommonRules())
+}
diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cl.go b/vendor/github.com/alecthomas/chroma/v2/lexers/cl.go
similarity index 80%
rename from vendor/github.com/alecthomas/chroma/lexers/c/cl.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/cl.go
index bb162734ccae..3eb0c23070d8 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/c/cl.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/cl.go
@@ -1,8 +1,7 @@
-package c
+package lexers
 
 import (
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 var (
@@ -230,15 +229,9 @@ var (
 )
 
 // Common Lisp lexer.
-var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
-	&Config{
-		Name:            "Common Lisp",
-		Aliases:         []string{"common-lisp", "cl", "lisp"},
-		Filenames:       []string{"*.cl", "*.lisp"},
-		MimeTypes:       []string{"text/x-common-lisp"},
-		CaseInsensitive: true,
-	},
-	commonLispRules,
+var CommonLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
+	embedded,
+	"embedded/common_lisp.xml",
 ), TypeMapping{
 	{NameVariable, NameFunction, clBuiltinFunctions},
 	{NameVariable, Keyword, clSpecialForms},
@@ -248,63 +241,3 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
 	{NameVariable, KeywordType, clBuiltinTypes},
 	{NameVariable, NameClass, clBuiltinClasses},
 }))
-
-func commonLispRules() Rules {
-	return Rules{
-		"root": {
-			Default(Push("body")),
-		},
-		"multiline-comment": {
-			{`#\|`, CommentMultiline, Push()},
-			{`\|#`, CommentMultiline, Pop(1)},
-			{`[^|#]+`, CommentMultiline, nil},
-			{`[|#]`, CommentMultiline, nil},
-		},
-		"commented-form": {
-			{`\(`, CommentPreproc, Push()},
-			{`\)`, CommentPreproc, Pop(1)},
-			{`[^()]+`, CommentPreproc, nil},
-		},
-		"body": {
-			{`\s+`, Text, nil},
-			{`;.*$`, CommentSingle, nil},
-			{`#\|`, CommentMultiline, Push("multiline-comment")},
-			{`#\d*Y.*$`, CommentSpecial, nil},
-			{`"(\\.|\\\n|[^"\\])*"`, LiteralString, nil},
-			{`:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`::(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`:#(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`'(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`'`, Operator, nil},
-			{"`", Operator, nil},
-			{"[-+]?\\d+\\.?(?=[ \"()\\'\\n,;`])", LiteralNumberInteger, nil},
-			{"[-+]?\\d+/\\d+(?=[ \"()\\'\\n,;`])", LiteralNumber, nil},
-			{"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\'\\n,;`])", LiteralNumberFloat, nil},
-			{"#\\\\.(?=[ \"()\\'\\n,;`])", LiteralStringChar, nil},
-			{`#\\(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringChar, nil},
-			{`#\(`, Operator, Push("body")},
-			{`#\d*\*[01]*`, LiteralOther, nil},
-			{`#:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`#[.,]`, Operator, nil},
-			{`#\'`, NameFunction, nil},
-			{`#b[+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil},
-			{`#o[+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil},
-			{`#x[+-]?[0-9a-f]+(/[0-9a-f]+)?`, LiteralNumberHex, nil},
-			{`#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?`, LiteralNumber, nil},
-			{`(#c)(\()`, ByGroups(LiteralNumber, Punctuation), Push("body")},
-			{`(#\d+a)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")},
-			{`(#s)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")},
-			{`#p?"(\\.|[^"])*"`, LiteralOther, nil},
-			{`#\d+=`, Operator, nil},
-			{`#\d+#`, Operator, nil},
-			{"#+nil(?=[ \"()\\'\\n,;`])\\s*\\(", CommentPreproc, Push("commented-form")},
-			{`#[+-]`, Operator, nil},
-			{`(,@|,|\.)`, Operator, nil},
-			{"(t|nil)(?=[ \"()\\'\\n,;`])", NameConstant, nil},
-			{`\*(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)\*`, NameVariableGlobal, nil},
-			{`(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, NameVariable, nil},
-			{`\(`, Punctuation, Push("body")},
-			{`\)`, Punctuation, Pop(1)},
-		},
-	}
-}
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go b/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go
new file mode 100644
index 000000000000..7e699622ace7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go
@@ -0,0 +1,17 @@
+package lexers
+
+import (
+	"regexp"
+)
+
+// TODO(moorereason): can this be factored away?
+var zoneAnalyserRe = regexp.MustCompile(`(?m)^@\s+IN\s+SOA\s+`)
+
+func init() { // nolint: gochecknoinits
+	Get("dns").SetAnalyser(func(text string) float32 {
+		if zoneAnalyserRe.FindString(text) != "" {
+			return 1.0
+		}
+		return 0.0
+	})
+}
diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go b/vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go
similarity index 92%
rename from vendor/github.com/alecthomas/chroma/lexers/e/emacs.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go
index 51c49101dc40..869b0f3f43df 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go
@@ -1,8 +1,7 @@
-package e
+package lexers
 
 import (
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 var (
@@ -522,14 +521,9 @@ var (
 )
 
 // EmacsLisp lexer.
-var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
-	&Config{
-		Name:      "EmacsLisp",
-		Aliases:   []string{"emacs", "elisp", "emacs-lisp"},
-		Filenames: []string{"*.el"},
-		MimeTypes: []string{"text/x-elisp", "application/x-elisp"},
-	},
-	emacsLispRules,
+var EmacsLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
+	embedded,
+	"embedded/emacslisp.xml",
 ), TypeMapping{
 	{NameVariable, NameFunction, emacsBuiltinFunction},
 	{NameVariable, NameBuiltin, emacsSpecialForms},
@@ -537,50 +531,3 @@ var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
 	{NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
 	{NameVariable, KeywordPseudo, emacsLambdaListKeywords},
 }))
-
-func emacsLispRules() Rules {
-	return Rules{
-		"root": {
-			Default(Push("body")),
-		},
-		"body": {
-			{`\s+`, Text, nil},
-			{`;.*$`, CommentSingle, nil},
-			{`"`, LiteralString, Push("string")},
-			{`\?([^\\]|\\.)`, LiteralStringChar, nil},
-			{`:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameBuiltin, nil},
-			{`::((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`'((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`'`, Operator, nil},
-			{"`", Operator, nil},
-			{"[-+]?\\d+\\.?(?=[ \"()\\]\\'\\n,;`])", LiteralNumberInteger, nil},
-			{"[-+]?\\d+/\\d+(?=[ \"()\\]\\'\\n,;`])", LiteralNumber, nil},
-			{"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\]\\'\\n,;`])", LiteralNumberFloat, nil},
-			{`\[|\]`, Punctuation, nil},
-			{`#:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil},
-			{`#\^\^?`, Operator, nil},
-			{`#\'`, NameFunction, nil},
-			{`#[bB][+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil},
-			{`#[oO][+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil},
-			{`#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?`, LiteralNumberHex, nil},
-			{`#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?`, LiteralNumber, nil},
-			{`#\d+=`, Operator, nil},
-			{`#\d+#`, Operator, nil},
-			{`(,@|,|\.|:)`, Operator, nil},
-			{"(t|nil)(?=[ \"()\\]\\'\\n,;`])", NameConstant, nil},
-			{`\*((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\*`, NameVariableGlobal, nil},
-			{`((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameVariable, nil},
-			{`#\(`, Operator, Push("body")},
-			{`\(`, Punctuation, Push("body")},
-			{`\)`, Punctuation, Pop(1)},
-		},
-		"string": {
-			{"[^\"\\\\`]+", LiteralString, nil},
-			{"`((?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|])(?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\\'", LiteralStringSymbol, nil},
-			{"`", LiteralString, nil},
-			{`\\.`, LiteralString, nil},
-			{`\\\n`, LiteralString, nil},
-			{`"`, LiteralString, Pop(1)},
-		},
-	}
-}
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml
new file mode 100644
index 000000000000..e8140b738ca8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml
@@ -0,0 +1,154 @@
+
+  
+    ABAP
+    abap
+    *.abap
+    *.ABAP
+    text/x-abap
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml
new file mode 100644
index 000000000000..3ffd51c6c6b5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml
@@ -0,0 +1,66 @@
+
+  
+    ABNF
+    abnf
+    *.abnf
+    text/x-abnf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml
new file mode 100644
index 000000000000..d6727a103baf
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml
@@ -0,0 +1,68 @@
+
+  
+    ActionScript
+    as
+    actionscript
+    *.as
+    application/x-actionscript
+    text/x-actionscript
+    text/actionscript
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml
new file mode 100644
index 000000000000..e5f653848aa8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml
@@ -0,0 +1,163 @@
+
+  
+    ActionScript 3
+    as3
+    actionscript3
+    *.as
+    application/x-actionscript3
+    text/x-actionscript3
+    text/actionscript3
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml
new file mode 100644
index 000000000000..5854a20e9cb7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml
@@ -0,0 +1,321 @@
+
+  
+    Ada
+    ada
+    ada95
+    ada2005
+    *.adb
+    *.ads
+    *.ada
+    text/x-ada
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml
new file mode 100644
index 000000000000..6f2b2d50882c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml
@@ -0,0 +1,66 @@
+
+  
+    Agda
+    agda
+    *.agda
+    text/x-agda
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml
new file mode 100644
index 000000000000..30bad5ae9962
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml
@@ -0,0 +1,75 @@
+
+  
+    AL
+    al
+    *.al
+    *.dal
+    text/x-al
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml
new file mode 100644
index 000000000000..1de9ea6cc391
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml
@@ -0,0 +1,58 @@
+
+
+  
+    Alloy
+    alloy
+    *.als
+    text/x-alloy
+    true
+  
+  
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml
new file mode 100644
index 000000000000..84fe20b344d0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml
@@ -0,0 +1,108 @@
+
+  
+    Angular2
+    ng2
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml
new file mode 100644
index 000000000000..e57edd4047d0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml
@@ -0,0 +1,317 @@
+
+  
+    ANTLR
+    antlr
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml
new file mode 100644
index 000000000000..7643541c1bcb
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml
@@ -0,0 +1,74 @@
+
+  
+    ApacheConf
+    apacheconf
+    aconf
+    apache
+    .htaccess
+    apache.conf
+    apache2.conf
+    text/x-apacheconf
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml
new file mode 100644
index 000000000000..959448ca4038
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml
@@ -0,0 +1,59 @@
+
+  
+    APL
+    apl
+    *.apl
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml
new file mode 100644
index 000000000000..1de6c67ee102
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml
@@ -0,0 +1,130 @@
+
+  
+    AppleScript
+    applescript
+    *.applescript
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml
new file mode 100644
index 000000000000..e7119737e1c3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml
@@ -0,0 +1,174 @@
+
+  
+    ArangoDB AQL
+    aql
+    *.aql
+    text/x-aql
+    true
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml
new file mode 100644
index 000000000000..00399c2c8ed8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml
@@ -0,0 +1,309 @@
+
+  
+    Arduino
+    arduino
+    *.ino
+    text/x-arduino
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml
new file mode 100644
index 000000000000..e5966cf62d27
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml
@@ -0,0 +1,126 @@
+
+  
+    ArmAsm
+    armasm
+    *.s
+    *.S
+    text/x-armasm
+    text/x-asm
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml
new file mode 100644
index 000000000000..6ec94edeb8ff
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml
@@ -0,0 +1,78 @@
+
+
+  
+    AutoHotkey
+    autohotkey
+    ahk
+    *.ahk
+    *.ahkl
+    text/x-autohotkey
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml
new file mode 100644
index 000000000000..1f7e15df3879
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml
@@ -0,0 +1,70 @@
+
+
+  
+    AutoIt
+    autoit
+    *.au3
+    text/x-autoit
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml
new file mode 100644
index 000000000000..07476ff745ad
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml
@@ -0,0 +1,95 @@
+
+  
+    Awk
+    awk
+    gawk
+    mawk
+    nawk
+    *.awk
+    application/x-awk
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml
new file mode 100644
index 000000000000..d13c12319181
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml
@@ -0,0 +1,97 @@
+
+  
+    Ballerina
+    ballerina
+    *.bal
+    text/x-ballerina
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml
new file mode 100644
index 000000000000..d704a8ffaa40
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml
@@ -0,0 +1,220 @@
+
+  
+    Bash
+    bash
+    sh
+    ksh
+    zsh
+    shell
+    *.sh
+    *.ksh
+    *.bash
+    *.ebuild
+    *.eclass
+    .env
+    *.env
+    *.exheres-0
+    *.exlib
+    *.zsh
+    *.zshrc
+    .bashrc
+    bashrc
+    .bash_*
+    bash_*
+    zshrc
+    .zshrc
+    PKGBUILD
+    application/x-sh
+    application/x-shellscript
+    
+      
+    
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml
new file mode 100644
index 000000000000..82c5fd6d0ff4
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml
@@ -0,0 +1,25 @@
+
+  
+    Bash Session
+    bash-session
+    console
+    shell-session
+    *.sh-session
+    text/x-sh
+    true
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml
new file mode 100644
index 000000000000..d3e0627280e9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml
@@ -0,0 +1,660 @@
+
+  
+    Batchfile
+    bat
+    batch
+    dosbatch
+    winbatch
+    *.bat
+    *.cmd
+    application/x-dos-batch
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml
new file mode 100644
index 000000000000..8fde161b36ce
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml
@@ -0,0 +1,152 @@
+
+  
+    BibTeX
+    bib
+    bibtex
+    *.bib
+    text/x-bibtex
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml
new file mode 100644
index 000000000000..db90f31b30a9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml
@@ -0,0 +1,84 @@
+
+  
+    Bicep
+    bicep
+    *.bicep
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml
new file mode 100644
index 000000000000..591b1ad0f4a2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml
@@ -0,0 +1,141 @@
+
+  
+    BlitzBasic
+    blitzbasic
+    b3d
+    bplus
+    *.bb
+    *.decls
+    text/x-bb
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml
new file mode 100644
index 000000000000..5c9842477b27
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml
@@ -0,0 +1,28 @@
+
+  
+    BNF
+    bnf
+    *.bnf
+    text/x-bnf
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml
new file mode 100644
index 000000000000..c1090ea5a3ef
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml
@@ -0,0 +1,83 @@
+
+  
+    BQN
+    bqn
+    *.bqn
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml
new file mode 100644
index 000000000000..4c84c33082f1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml
@@ -0,0 +1,51 @@
+
+  
+    Brainfuck
+    brainfuck
+    bf
+    *.bf
+    *.b
+    application/x-brainfuck
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml
new file mode 100644
index 000000000000..f1e21db03f05
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml
@@ -0,0 +1,121 @@
+
+  
+    C#
+    csharp
+    c#
+    *.cs
+    text/x-csharp
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml
new file mode 100644
index 000000000000..680a19afba52
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml
@@ -0,0 +1,331 @@
+
+  
+    C++
+    cpp
+    c++
+    *.cpp
+    *.hpp
+    *.c++
+    *.h++
+    *.cc
+    *.hh
+    *.cxx
+    *.hxx
+    *.C
+    *.H
+    *.cp
+    *.CPP
+    *.tpp
+    text/x-c++hdr
+    text/x-c++src
+    true
+    
+      
+      
+    
+  
+  
+    
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml
new file mode 100644
index 000000000000..35ee32dce68a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml
@@ -0,0 +1,260 @@
+
+  
+    C
+    c
+    *.c
+    *.h
+    *.idc
+    *.x[bp]m
+    text/x-chdr
+    text/x-csrc
+    image/x-xbitmap
+    image/x-xpixmap
+    true
+    
+      
+      
+    
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+          
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml
new file mode 100644
index 000000000000..3e7d1470bf75
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml
@@ -0,0 +1,122 @@
+
+  
+    Cap'n Proto
+    capnp
+    *.capnp
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml
new file mode 100644
index 000000000000..1a78f99a143d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml
@@ -0,0 +1,137 @@
+
+  
+    Cassandra CQL
+    cassandra
+    cql
+    *.cql
+    text/x-cql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          1
+          6
+          
+            
+            
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml
new file mode 100644
index 000000000000..4c4121835b20
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml
@@ -0,0 +1,151 @@
+
+  
+    Ceylon
+    ceylon
+    *.ceylon
+    text/x-ceylon
+    true
+  
+  
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml
new file mode 100644
index 000000000000..49503050506e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml
@@ -0,0 +1,197 @@
+
+  
+    CFEngine3
+    cfengine3
+    cf3
+    *.cf
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml
new file mode 100644
index 000000000000..46a84cf6ab73
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml
@@ -0,0 +1,92 @@
+
+  
+    cfstatement
+    cfs
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml
new file mode 100644
index 000000000000..860439aa8a91
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml
@@ -0,0 +1,134 @@
+
+  
+    ChaiScript
+    chai
+    chaiscript
+    *.chai
+    text/x-chaiscript
+    application/x-chaiscript
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml
new file mode 100644
index 000000000000..c89cafc6abfd
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml
@@ -0,0 +1,143 @@
+
+  
+    Chapel
+    chapel
+    chpl
+    *.chpl
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml
new file mode 100644
index 000000000000..284457c6c668
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml
@@ -0,0 +1,55 @@
+
+  
+    Cheetah
+    cheetah
+    spitfire
+    *.tmpl
+    *.spt
+    application/x-cheetah
+    application/x-spitfire
+  
+  
+    
+      
+        
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml
new file mode 100644
index 000000000000..967ba399cb62
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml
@@ -0,0 +1,71 @@
+
+  
+    Clojure
+    clojure
+    clj
+    edn
+    *.clj
+    *.edn
+    text/x-clojure
+    application/x-clojure
+    application/edn
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml
new file mode 100644
index 000000000000..b041cfd016e2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml
@@ -0,0 +1,90 @@
+
+  
+    CMake
+    cmake
+    *.cmake
+    CMakeLists.txt
+    text/x-cmake
+  
+  
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml
new file mode 100644
index 000000000000..a8a80291d43b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml
@@ -0,0 +1,90 @@
+
+  
+    COBOL
+    cobol
+    *.cob
+    *.COB
+    *.cpy
+    *.CPY
+    text/x-cobol
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml
new file mode 100644
index 000000000000..e29722fb83c6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml
@@ -0,0 +1,210 @@
+
+  
+    CoffeeScript
+    coffee-script
+    coffeescript
+    coffee
+    *.coffee
+    text/coffeescript
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml
new file mode 100644
index 000000000000..0fb9a7a4b337
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml
@@ -0,0 +1,184 @@
+
+  
+    Common Lisp
+    common-lisp
+    cl
+    lisp
+    *.cl
+    *.lisp
+    text/x-common-lisp
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml
new file mode 100644
index 000000000000..62f64ff9925e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml
@@ -0,0 +1,136 @@
+
+  
+    Coq
+    coq
+    *.v
+    text/x-coq
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml
new file mode 100644
index 000000000000..94853db33ff7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml
@@ -0,0 +1,762 @@
+
+  
+    Crystal
+    cr
+    crystal
+    *.cr
+    text/x-crystal
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml
new file mode 100644
index 000000000000..6e370c763ed9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml
@@ -0,0 +1,323 @@
+
+  
+    CSS
+    css
+    *.css
+    text/css
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml
new file mode 100644
index 000000000000..2a12f3953977
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml
@@ -0,0 +1,85 @@
+
+  
+    CUE
+    cue
+    *.cue
+    text/x-cue
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml
new file mode 100644
index 000000000000..15dfe4d4f2d8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml
@@ -0,0 +1,372 @@
+
+  
+    Cython
+    cython
+    pyx
+    pyrex
+    *.pyx
+    *.pxd
+    *.pxi
+    text/x-cython
+    application/x-cython
+  
+  
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml
new file mode 100644
index 000000000000..3c030e2272c5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml
@@ -0,0 +1,133 @@
+
+  
+    D
+    d
+    *.d
+    *.di
+    text/x-d
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml
new file mode 100644
index 000000000000..f1b454fafc94
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml
@@ -0,0 +1,213 @@
+
+  
+    Dart
+    dart
+    *.dart
+    text/x-dart
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml
new file mode 100644
index 000000000000..2bb3a1aa7f5b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml
@@ -0,0 +1,39 @@
+
+  
+    Dax
+    dax
+    *.dax
+  
+  
+    
+      
+      
+      
+      
+      
+        
+      
+      
+        
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml
new file mode 100644
index 000000000000..ad71ad471513
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml
@@ -0,0 +1,17 @@
+
+  
+    Desktop file
+    desktop
+    desktop_entry
+    *.desktop
+    application/x-desktop
+  
+  
+    
+      
+      
+      
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml
new file mode 100644
index 000000000000..dc0beb7fdfd2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml
@@ -0,0 +1,52 @@
+
+  
+    Diff
+    diff
+    udiff
+    *.diff
+    *.patch
+    text/x-diff
+    text/x-patch
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml
new file mode 100644
index 000000000000..3c97c222ea7d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml
@@ -0,0 +1,153 @@
+
+  
+    Django/Jinja
+    django
+    jinja
+    application/x-django-templating
+    application/x-jinja
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml
new file mode 100644
index 000000000000..ef8f663f968c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml
@@ -0,0 +1,44 @@
+
+
+  
+    dns
+    zone
+    bind
+    *.zone
+    text/dns
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml
new file mode 100644
index 000000000000..a73c52cd972e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml
@@ -0,0 +1,57 @@
+
+  
+    Docker
+    docker
+    dockerfile
+    Dockerfile
+    Dockerfile.*
+    *.Dockerfile
+    *.docker
+    text/x-dockerfile-config
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml
new file mode 100644
index 000000000000..0edbbdeacebb
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml
@@ -0,0 +1,168 @@
+
+  
+    DTD
+    dtd
+    *.dtd
+    application/xml-dtd
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml
new file mode 100644
index 000000000000..3660d1440428
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml
@@ -0,0 +1,176 @@
+
+  
+    Dylan
+    dylan
+    *.dylan
+    *.dyl
+    *.intr
+    text/x-dylan
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml
new file mode 100644
index 000000000000..df5d62ffa6d2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml
@@ -0,0 +1,90 @@
+
+  
+    EBNF
+    ebnf
+    *.ebnf
+    text/x-ebnf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml
new file mode 100644
index 000000000000..286f53a24150
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml
@@ -0,0 +1,744 @@
+
+  
+    Elixir
+    elixir
+    ex
+    exs
+    *.ex
+    *.eex
+    *.exs
+    text/x-elixir
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml
new file mode 100644
index 000000000000..ed65efc638d8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml
@@ -0,0 +1,119 @@
+
+  
+    Elm
+    elm
+    *.elm
+    text/x-elm
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml
new file mode 100644
index 000000000000..668bc621eae6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml
@@ -0,0 +1,132 @@
+
+  
+    EmacsLisp
+    emacs
+    elisp
+    emacs-lisp
+    *.el
+    text/x-elisp
+    application/x-elisp
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml
new file mode 100644
index 000000000000..b18658868248
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml
@@ -0,0 +1,166 @@
+
+  
+    Erlang
+    erlang
+    *.erl
+    *.hrl
+    *.es
+    *.escript
+    text/x-erlang
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml
new file mode 100644
index 000000000000..4743b9a2e8d0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml
@@ -0,0 +1,412 @@
+
+  
+    Factor
+    factor
+    *.factor
+    text/x-factor
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml
new file mode 100644
index 000000000000..b9b6d59506e9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml
@@ -0,0 +1,68 @@
+
+  
+    Fennel
+    fennel
+    fnl
+    *.fennel
+    text/x-fennel
+    application/x-fennel
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml
new file mode 100644
index 000000000000..deb78145e32a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml
@@ -0,0 +1,159 @@
+
+  
+    Fish
+    fish
+    fishshell
+    *.fish
+    *.load
+    application/x-fish
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml
new file mode 100644
index 000000000000..31096a225d15
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml
@@ -0,0 +1,78 @@
+
+  
+    Forth
+    forth
+    *.frt
+    *.fth
+    *.fs
+    application/x-forth
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml
new file mode 100644
index 000000000000..6140e704146d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml
@@ -0,0 +1,102 @@
+
+  
+    Fortran
+    fortran
+    f90
+    *.f03
+    *.f90
+    *.f95
+    *.F03
+    *.F90
+    *.F95
+    text/x-fortran
+    true
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml
new file mode 100644
index 000000000000..11343c0e7382
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml
@@ -0,0 +1,71 @@
+
+  
+    FortranFixed
+    fortranfixed
+    *.f
+    *.F
+    text/x-fortran
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml
new file mode 100644
index 000000000000..e1c19ffb4580
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml
@@ -0,0 +1,245 @@
+
+  
+    FSharp
+    fsharp
+    *.fs
+    *.fsi
+    text/x-fsharp
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml
new file mode 100644
index 000000000000..7557bce0f22f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml
@@ -0,0 +1,150 @@
+
+  
+    GAS
+    gas
+    asm
+    *.s
+    *.S
+    text/x-gas
+    0.1
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml
new file mode 100644
index 000000000000..811f38d02836
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml
@@ -0,0 +1,259 @@
+
+  
+    GDScript
+    gdscript
+    gd
+    *.gd
+    text/x-gdscript
+    application/x-gdscript
+    0.1
+    
+      
+    
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml
new file mode 100644
index 000000000000..b50c9dd7b6c5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml
@@ -0,0 +1,270 @@
+
+  
+    GDScript3
+    gdscript3
+    gd3
+    *.gd
+    text/x-gdscript
+    application/x-gdscript
+    
+      
+      
+      
+    
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml
new file mode 100644
index 000000000000..c53a2cbb5b84
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml
@@ -0,0 +1,263 @@
+
+  
+    Gherkin
+    cucumber
+    Cucumber
+    gherkin
+    Gherkin
+    *.feature
+    *.FEATURE
+    text/x-gherkin
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gleam.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gleam.xml
new file mode 100644
index 000000000000..c706e9620280
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gleam.xml
@@ -0,0 +1,117 @@
+
+  
+    Gleam
+    gleam>
+    *.gleam
+    text/x-gleam
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml
new file mode 100644
index 000000000000..ca0b696de615
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml
@@ -0,0 +1,65 @@
+
+  
+    GLSL
+    glsl
+    *.vert
+    *.frag
+    *.geo
+    text/x-glslsrc
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml
new file mode 100644
index 000000000000..ee6a245f38ae
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml
@@ -0,0 +1,289 @@
+
+  
+    Gnuplot
+    gnuplot
+    *.plot
+    *.plt
+    text/x-gnuplot
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml
new file mode 100644
index 000000000000..36f737b93f92
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml
@@ -0,0 +1,114 @@
+
+  
+    Go Template
+    go-template
+    *.gotmpl
+    *.go.tmpl
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml
new file mode 100644
index 000000000000..b06227357df8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml
@@ -0,0 +1,88 @@
+
+  
+    GraphQL
+    graphql
+    graphqls
+    gql
+    *.graphql
+    *.graphqls
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml
new file mode 100644
index 000000000000..3af0a43e7af7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml
@@ -0,0 +1,90 @@
+
+  
+    Groff
+    groff
+    nroff
+    man
+    *.[1-9]
+    *.1p
+    *.3pm
+    *.man
+    application/x-troff
+    text/troff
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml
new file mode 100644
index 000000000000..3cca2e9a5bc9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml
@@ -0,0 +1,135 @@
+
+  
+    Groovy
+    groovy
+    *.groovy
+    *.gradle
+    text/x-groovy
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml
new file mode 100644
index 000000000000..7cf2a648a9ba
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml
@@ -0,0 +1,147 @@
+
+  
+    Handlebars
+    handlebars
+    hbs
+    *.handlebars
+    *.hbs
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml
new file mode 100644
index 000000000000..ea63642284c3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml
@@ -0,0 +1,98 @@
+
+  
+    Hare
+    hare
+    *.ha
+    text/x-hare
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml
new file mode 100644
index 000000000000..5f805d6da0cb
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml
@@ -0,0 +1,272 @@
+
+  
+    Haskell
+    haskell
+    hs
+    *.hs
+    text/x-haskell
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml
new file mode 100644
index 000000000000..d3ed208af965
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml
@@ -0,0 +1,143 @@
+
+  
+    HCL
+    hcl
+    *.hcl
+    application/x-hcl
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml
new file mode 100644
index 000000000000..a6f28eab1ae2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml
@@ -0,0 +1,189 @@
+
+  
+    Hexdump
+    hexdump
+  
+  
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml
new file mode 100644
index 000000000000..64e667dd7c25
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml
@@ -0,0 +1,149 @@
+
+  
+    HLB
+    hlb
+    *.hlb
+  
+  
+    
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+        
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+        
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml
new file mode 100644
index 000000000000..41ab32395bc0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml
@@ -0,0 +1,110 @@
+
+  
+    HLSL
+    hlsl
+    *.hlsl
+    *.hlsli
+    *.cginc
+    *.fx
+    *.fxh
+    text/x-hlsl
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml
new file mode 100644
index 000000000000..cd2d9d16b2af
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml
@@ -0,0 +1,252 @@
+
+  
+    HolyC
+    holyc
+    *.HC
+    *.hc
+    *.HH
+    *.hh
+    *.hc.z
+    *.HC.Z
+    text/x-chdr
+    text/x-csrc
+    image/x-xbitmap
+    image/x-xpixmap
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+          
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml
new file mode 100644
index 000000000000..2f1a8a9790fe
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml
@@ -0,0 +1,159 @@
+
+  
+    HTML
+    html
+    *.html
+    *.htm
+    *.xhtml
+    *.xslt
+    text/html
+    application/xhtml+xml
+    true
+    true
+    true
+  
+  
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml
new file mode 100644
index 000000000000..a0dae46aec1f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml
@@ -0,0 +1,104 @@
+
+  
+    Hy
+    hylang
+    *.hy
+    text/x-hy
+    application/x-hy
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml
new file mode 100644
index 000000000000..9592d88228f2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml
@@ -0,0 +1,216 @@
+
+  
+    Idris
+    idris
+    idr
+    *.idr
+    text/x-idris
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml
new file mode 100644
index 000000000000..1cc020570583
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml
@@ -0,0 +1,47 @@
+
+  
+    Igor
+    igor
+    igorpro
+    *.ipf
+    text/ipf
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml
new file mode 100644
index 000000000000..08f3870be284
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml
@@ -0,0 +1,45 @@
+
+  
+    INI
+    ini
+    cfg
+    dosini
+    *.ini
+    *.cfg
+    *.inf
+    *.service
+    *.socket
+    .gitconfig
+    .editorconfig
+    pylintrc
+    .pylintrc
+    text/x-ini
+    text/inf
+    0.1 
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml
new file mode 100644
index 000000000000..9ad94fa52a25
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml
@@ -0,0 +1,71 @@
+
+  
+    Io
+    io
+    *.io
+    text/x-iosrc
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml
new file mode 100644
index 000000000000..645cb05d5098
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml
@@ -0,0 +1,96 @@
+
+  
+    ISCdhcpd
+    iscdhcpd
+    dhcpd.conf
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+         
+      
+      
+         
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml
new file mode 100644
index 000000000000..872d08122bbc
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml
@@ -0,0 +1,157 @@
+
+  
+    J
+    j
+    *.ijs
+    text/x-j
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml
new file mode 100644
index 000000000000..3ce33ff6dc61
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml
@@ -0,0 +1,193 @@
+
+  
+    Java
+    java
+    *.java
+    text/x-java
+    true
+  
+  
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml
new file mode 100644
index 000000000000..efe80ed37583
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml
@@ -0,0 +1,160 @@
+
+  
+    JavaScript
+    js
+    javascript
+    *.js
+    *.jsm
+    *.mjs
+    *.cjs
+    application/javascript
+    application/x-javascript
+    text/x-javascript
+    text/javascript
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml
new file mode 100644
index 000000000000..3473cfd14f49
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml
@@ -0,0 +1,111 @@
+
+  
+    JSON
+    json
+    *.json
+    *.avsc
+    application/json
+    true
+    true
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml
new file mode 100644
index 000000000000..776dcdbcbb27
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml
@@ -0,0 +1,400 @@
+
+  
+    Julia
+    julia
+    jl
+    *.jl
+    text/x-julia
+    application/x-julia
+  
+  
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml
new file mode 100644
index 000000000000..92c785d0c31e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml
@@ -0,0 +1,98 @@
+
+  
+    Jungle
+    jungle
+    *.jungle
+    text/x-jungle
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml
new file mode 100644
index 000000000000..09c638a94292
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml
@@ -0,0 +1,223 @@
+
+  
+    Kotlin
+    kotlin
+    *.kt
+    text/x-kotlin
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml
new file mode 100644
index 000000000000..1319e5c838c2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml
@@ -0,0 +1,42 @@
+
+  
+    Lighttpd configuration file
+    lighty
+    lighttpd
+    text/x-lighttpd-conf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml
new file mode 100644
index 000000000000..f24f1522b2a8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml
@@ -0,0 +1,73 @@
+
+  
+    LLVM
+    llvm
+    *.ll
+    text/x-llvm
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml
new file mode 100644
index 000000000000..903d4581feb8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml
@@ -0,0 +1,158 @@
+
+  
+    Lua
+    lua
+    *.lua
+    *.wlua
+    text/x-lua
+    application/x-lua
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml
new file mode 100644
index 000000000000..a82a7f8d5656
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml
@@ -0,0 +1,131 @@
+
+  
+    Makefile
+    make
+    makefile
+    mf
+    bsdmake
+    *.mak
+    *.mk
+    Makefile
+    makefile
+    Makefile.*
+    GNUmakefile
+    BSDmakefile
+    Justfile
+    justfile
+    .justfile
+    text/x-makefile
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml
new file mode 100644
index 000000000000..782414087b24
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml
@@ -0,0 +1,120 @@
+
+  
+    Mako
+    mako
+    *.mao
+    application/x-mako
+  
+  
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml
new file mode 100644
index 000000000000..5873f2afc53f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml
@@ -0,0 +1,89 @@
+
+  
+    Mason
+    mason
+    *.m
+    *.mhtml
+    *.mc
+    *.mi
+    autohandler
+    dhandler
+    application/x-mason
+    0.1
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml
new file mode 100644
index 000000000000..7b22a46c609c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml
@@ -0,0 +1,155 @@
+
+  
+    Materialize SQL dialect
+    materialize
+    mzsql
+    text/x-materializesql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          6
+          12
+          
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          12
+          4
+          
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml
new file mode 100644
index 000000000000..0b8dfb617ff3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml
@@ -0,0 +1,60 @@
+
+  
+    Mathematica
+    mathematica
+    mma
+    nb
+    *.cdf
+    *.m
+    *.ma
+    *.mt
+    *.mx
+    *.nb
+    *.nbp
+    *.wl
+    application/mathematica
+    application/vnd.wolfram.mathematica
+    application/vnd.wolfram.mathematica.package
+    application/vnd.wolfram.cdf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml
new file mode 100644
index 000000000000..ebb4e2c33188
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml
@@ -0,0 +1,114 @@
+
+  
+    Matlab
+    matlab
+    *.m
+    text/matlab
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml
new file mode 100644
index 000000000000..331052036d14
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml
@@ -0,0 +1,182 @@
+
+  
+    mcfunction
+    mcfunction
+    *.mcfunction
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml
new file mode 100644
index 000000000000..130047df6ade
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml
@@ -0,0 +1,85 @@
+
+  
+    Meson
+    meson
+    meson.build
+    meson.build
+    meson_options.txt
+    text/x-meson
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml
new file mode 100644
index 000000000000..62d04ba842d3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml
@@ -0,0 +1,270 @@
+
+  
+    Metal
+    metal
+    *.metal
+    text/x-metal
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml
new file mode 100644
index 000000000000..1ad6860f4dfc
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml
@@ -0,0 +1,82 @@
+
+  
+    MiniZinc
+    minizinc
+    MZN
+    mzn
+    *.mzn
+    *.dzn
+    *.fzn
+    text/minizinc
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml
new file mode 100644
index 000000000000..025c3dc56b4c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml
@@ -0,0 +1,73 @@
+
+  
+    MLIR
+    mlir
+    *.mlir
+    text/x-mlir
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml
new file mode 100644
index 000000000000..0bf37bcc3c4a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml
@@ -0,0 +1,245 @@
+
+  
+    Modula-2
+    modula2
+    m2
+    *.def
+    *.mod
+    text/x-modula2
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml
new file mode 100644
index 000000000000..7445a639d3cf
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml
@@ -0,0 +1,153 @@
+
+  
+    MonkeyC
+    monkeyc
+    *.mc
+    text/x-monkeyc
+  
+  
+    
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml
new file mode 100644
index 000000000000..724a19fc62e4
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml
@@ -0,0 +1,90 @@
+
+  
+    MorrowindScript
+    morrowind
+    mwscript
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml
new file mode 100644
index 000000000000..6d03917e5287
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml
@@ -0,0 +1,77 @@
+
+  
+    Myghty
+    myghty
+    *.myt
+    autodelegate
+    application/x-myghty
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml
new file mode 100644
index 000000000000..b6c2046d5e64
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml
@@ -0,0 +1,121 @@
+
+  
+    MySQL
+    mysql
+    mariadb
+    *.sql
+    text/x-mysql
+    text/x-mariadb
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml
new file mode 100644
index 000000000000..defe65b3c415
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml
@@ -0,0 +1,126 @@
+
+  
+    NASM
+    nasm
+    *.asm
+    *.ASM
+    *.nasm
+    text/x-nasm
+    true
+    1.0 
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml
new file mode 100644
index 000000000000..707252b4c188
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml
@@ -0,0 +1,143 @@
+
+  
+    Natural
+    natural
+    *.NSN
+    *.NSP
+    *.NSS
+    *.NSH
+    *.NSG
+    *.NSL
+    *.NSA
+    *.NSM
+    *.NSC
+    *.NS7
+    text/x-natural
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml
new file mode 100644
index 000000000000..74d443b64978
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml
@@ -0,0 +1,123 @@
+
+  
+    NDISASM
+    ndisasm
+    text/x-disasm
+    true
+    0.5 
+  
+  
+    
+        
+            
+            
+        
+    
+    
+        
+            
+            
+        
+        
+            
+        
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml
new file mode 100644
index 000000000000..b93265706b7f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml
@@ -0,0 +1,121 @@
+
+  
+    Newspeak
+    newspeak
+    *.ns2
+    text/x-newspeak
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml
new file mode 100644
index 000000000000..46bdf573dbf1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml
@@ -0,0 +1,98 @@
+
+  
+    Nginx configuration file
+    nginx
+    nginx.conf
+    text/x-nginx-conf
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml
new file mode 100644
index 000000000000..bfdd6156ab84
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml
@@ -0,0 +1,211 @@
+
+  
+    Nim
+    nim
+    nimrod
+    *.nim
+    *.nimrod
+    text/x-nim
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml
new file mode 100644
index 000000000000..0ed040ca63e3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml
@@ -0,0 +1,258 @@
+
+  
+    Nix
+    nixos
+    nix
+    *.nix
+    text/x-nix
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml
new file mode 100644
index 000000000000..0dc932857c93
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml
@@ -0,0 +1,510 @@
+
+  
+    Objective-C
+    objective-c
+    objectivec
+    obj-c
+    objc
+    *.m
+    *.h
+    text/x-objective-c
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml
new file mode 100644
index 000000000000..12af64b9df70
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml
@@ -0,0 +1,145 @@
+
+  
+    ObjectPascal
+    objectpascal
+    *.pas
+    *.pp
+    *.inc
+    *.dpr
+    *.dpk
+    *.lpr
+    *.lpk
+    text/x-pascal
+  
+  
+    
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+          
+          
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+            
+      
+      
+        
+      
+      
+      
+        
+            
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+      
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml
new file mode 100644
index 000000000000..77f67ac9c990
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml
@@ -0,0 +1,145 @@
+
+  
+    OCaml
+    ocaml
+    *.ml
+    *.mli
+    *.mll
+    *.mly
+    text/x-ocaml
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml
new file mode 100644
index 000000000000..0515d2898669
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml
@@ -0,0 +1,101 @@
+
+  
+    Octave
+    octave
+    *.m
+    text/octave
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml
new file mode 100644
index 000000000000..b9842638535e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml
@@ -0,0 +1,113 @@
+
+  
+    Odin
+    odin
+    *.odin
+    text/odin
+  
+  
+    
+        
+            
+            
+        
+        
+            
+            
+        
+        
+            
+        
+    
+    
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+        
+            
+        
+      
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml
new file mode 100644
index 000000000000..530bad70e75c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml
@@ -0,0 +1,92 @@
+
+  
+    OnesEnterprise
+    ones
+    onesenterprise
+    1S
+    1S:Enterprise
+    *.EPF
+    *.epf
+    *.ERF
+    *.erf
+    application/octet-stream
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml
new file mode 100644
index 000000000000..04a80f3ce275
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml
@@ -0,0 +1,101 @@
+
+  
+    OpenEdge ABL
+    openedge
+    abl
+    progress
+    openedgeabl
+    *.p
+    *.cls
+    *.w
+    *.i
+    text/x-openedge
+    application/x-openedge
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml
new file mode 100644
index 000000000000..84d0fe135ff6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml
@@ -0,0 +1,96 @@
+
+  
+    OpenSCAD
+    openscad
+    *.scad
+    text/x-scad
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml
new file mode 100644
index 000000000000..259e54ef59e7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml
@@ -0,0 +1,329 @@
+
+  
+    Org Mode
+    org
+    orgmode
+    *.org
+    text/org
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          2
+          4
+          
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+          2
+          4
+          
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml
new file mode 100644
index 000000000000..caf7236757e6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml
@@ -0,0 +1,37 @@
+
+  
+    PacmanConf
+    pacmanconf
+    pacman.conf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml
new file mode 100644
index 000000000000..8ac02ab40e68
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml
@@ -0,0 +1,400 @@
+
+  
+    Perl
+    perl
+    pl
+    *.pl
+    *.pm
+    *.t
+    text/x-perl
+    application/x-perl
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml
new file mode 100644
index 000000000000..c9e22ea5778a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml
@@ -0,0 +1,212 @@
+
+  
+    PHP
+    php
+    php3
+    php4
+    php5
+    *.php
+    *.php[345]
+    *.inc
+    text/x-php
+    true
+    true
+    true
+    3
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml
new file mode 100644
index 000000000000..5acd773991d1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml
@@ -0,0 +1,105 @@
+
+  
+    Pig
+    pig
+    *.pig
+    text/x-pig
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml
new file mode 100644
index 000000000000..875dcba6ae2e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml
@@ -0,0 +1,73 @@
+
+  
+    PkgConfig
+    pkgconfig
+    *.pc
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml
new file mode 100644
index 000000000000..e3e813ad5b95
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml
@@ -0,0 +1,119 @@
+
+  
+    PL/pgSQL
+    plpgsql
+    text/x-plpgsql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml
new file mode 100644
index 000000000000..d5e3243ee99b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml
@@ -0,0 +1,21 @@
+
+  
+    plaintext
+    text
+    plain
+    no-highlight
+    *.txt
+    text/plain
+    -1
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml
new file mode 100644
index 000000000000..4ff5a970393b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml
@@ -0,0 +1,105 @@
+
+  
+    Plutus Core
+    plutus-core
+    plc
+    *.plc
+    text/x-plutus-core
+    application/x-plutus-core
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml
new file mode 100644
index 000000000000..4efa9db50836
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml
@@ -0,0 +1,135 @@
+
+  
+    Pony
+    pony
+    *.pony
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml
new file mode 100644
index 000000000000..e901c1855a5d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml
@@ -0,0 +1,155 @@
+
+  
+    PostgreSQL SQL dialect
+    postgresql
+    postgres
+    text/x-postgresql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          6
+          12
+          
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          12
+          4
+          
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+            
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml
new file mode 100644
index 000000000000..15a3422d0e94
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml
@@ -0,0 +1,89 @@
+
+  
+    PostScript
+    postscript
+    postscr
+    *.ps
+    *.eps
+    application/postscript
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml
new file mode 100644
index 000000000000..f37dab908892
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml
@@ -0,0 +1,58 @@
+
+  
+    POVRay
+    pov
+    *.pov
+    *.inc
+    text/x-povray
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml
new file mode 100644
index 000000000000..0ff1e35574ae
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml
@@ -0,0 +1,51 @@
+
+  
+    PowerQuery
+    powerquery
+    pq
+    *.pq
+    text/x-powerquery
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml
new file mode 100644
index 000000000000..b63a15081df0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml
@@ -0,0 +1,230 @@
+
+  
+    PowerShell
+    powershell
+    posh
+    ps1
+    psm1
+    psd1
+    pwsh
+    *.ps1
+    *.psm1
+    *.psd1
+    text/x-powershell
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml
new file mode 100644
index 000000000000..391bae36b21b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml
@@ -0,0 +1,115 @@
+
+  
+    Prolog
+    prolog
+    *.ecl
+    *.prolog
+    *.pro
+    *.pl
+    text/x-prolog
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml
new file mode 100644
index 000000000000..84558c3b65d5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml
@@ -0,0 +1,119 @@
+
+
+  
+    Promela
+    promela
+    *.pml
+    *.prom
+    *.prm
+    *.promela
+    *.pr
+    *.pm
+    text/x-promela
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml
new file mode 100644
index 000000000000..e95e333d526f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml
@@ -0,0 +1,123 @@
+
+  
+    PromQL
+    promql
+    *.promql
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml
new file mode 100644
index 000000000000..d5ae0a283e18
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml
@@ -0,0 +1,45 @@
+
+  
+    properties
+    java-properties
+    *.properties
+    text/x-java-properties
+  
+  
+    
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+       
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml
new file mode 100644
index 000000000000..157d321f8b22
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml
@@ -0,0 +1,118 @@
+
+  
+    Protocol Buffer
+    protobuf
+    proto
+    *.proto
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml
new file mode 100644
index 000000000000..21f21c65c17c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml
@@ -0,0 +1,161 @@
+
+  
+    PRQL
+    prql
+    *.prql
+    application/prql
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+    
+    
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml
new file mode 100644
index 000000000000..ab375dae4755
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml
@@ -0,0 +1,213 @@
+
+  
+    PSL
+    psl
+    *.psl
+    *.BATCH
+    *.TRIG
+    *.PROC
+    text/x-psl
+  
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml
new file mode 100644
index 000000000000..fbb587cf5ae8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml
@@ -0,0 +1,100 @@
+
+  
+    Puppet
+    puppet
+    *.pp
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml
new file mode 100644
index 000000000000..a58686ff1091
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml
@@ -0,0 +1,593 @@
+
+  
+    Python
+    python
+    py
+    sage
+    python3
+    py3
+    *.py
+    *.pyi
+    *.pyw
+    *.jy
+    *.sage
+    *.sc
+    SConstruct
+    SConscript
+    *.bzl
+    BUCK
+    BUILD
+    BUILD.bazel
+    WORKSPACE
+    WORKSPACE.bzlmod
+    WORKSPACE.bazel
+    MODULE.bazel
+    REPO.bazel
+    *.tac
+    text/x-python
+    application/x-python
+    text/x-python3
+    application/x-python3
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml
new file mode 100644
index 000000000000..3297a2260e2e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml
@@ -0,0 +1,356 @@
+
+  
+    Python 2
+    python2
+    py2
+    text/x-python2
+    application/x-python2
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml
new file mode 100644
index 000000000000..193fe18074e6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml
@@ -0,0 +1,173 @@
+
+  
+    QBasic
+    qbasic
+    basic
+    *.BAS
+    *.bas
+    text/basic
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml
new file mode 100644
index 000000000000..43eb3eb3db5d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml
@@ -0,0 +1,113 @@
+
+  
+    QML
+    qml
+    qbs
+    *.qml
+    *.qbs
+    application/x-qml
+    application/x-qt.qbs+qml
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml
new file mode 100644
index 000000000000..c1fba4e5ef9a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml
@@ -0,0 +1,128 @@
+
+  
+    R
+    splus
+    s
+    r
+    *.S
+    *.R
+    *.r
+    .Rhistory
+    .Rprofile
+    .Renviron
+    text/S-plus
+    text/S
+    text/x-r-source
+    text/x-r
+    text/x-R
+    text/x-r-history
+    text/x-r-profile
+    0.1 
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml
new file mode 100644
index 000000000000..6cdd3031292f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml
@@ -0,0 +1,260 @@
+
+  
+    Racket
+    racket
+    rkt
+    *.rkt
+    *.rktd
+    *.rktl
+    text/x-racket
+    application/x-racket
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml
new file mode 100644
index 000000000000..69638d2ecd3c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml
@@ -0,0 +1,149 @@
+
+  
+    Ragel
+    ragel
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml
new file mode 100644
index 000000000000..a4109b0941da
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml
@@ -0,0 +1,236 @@
+
+  
+    react
+    jsx
+    react
+    *.jsx
+    *.react
+    text/jsx
+    text/typescript-jsx
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml
new file mode 100644
index 000000000000..8b7bcc50640d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml
@@ -0,0 +1,147 @@
+
+  
+    ReasonML
+    reason
+    reasonml
+    *.re
+    *.rei
+    text/x-reasonml
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml
new file mode 100644
index 000000000000..501d38033824
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml
@@ -0,0 +1,68 @@
+
+  
+    reg
+    registry
+    *.reg
+    text/x-windows-registry
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml
new file mode 100644
index 000000000000..517b7133b06b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml
@@ -0,0 +1,94 @@
+
+  
+    Rego
+    rego
+    *.rego
+  
+  
+    
+      
+        
+      
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml
new file mode 100644
index 000000000000..e682500cbfa1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml
@@ -0,0 +1,127 @@
+
+  
+    Rexx
+    rexx
+    arexx
+    *.rexx
+    *.rex
+    *.rx
+    *.arexx
+    text/x-rexx
+    true
+    true
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml
new file mode 100644
index 000000000000..8362772a6fbe
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml
@@ -0,0 +1,58 @@
+
+
+  
+    RPMSpec
+    spec
+    *.spec
+    text/x-rpm-spec
+  
+  
+    
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml
new file mode 100644
index 000000000000..baa7e435946a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml
@@ -0,0 +1,724 @@
+
+  
+    Ruby
+    rb
+    ruby
+    duby
+    *.rb
+    *.rbw
+    Rakefile
+    *.rake
+    *.gemspec
+    *.rbx
+    *.duby
+    Gemfile
+    Vagrantfile
+    text/x-ruby
+    application/x-ruby
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml
new file mode 100644
index 000000000000..083b96ffe0b7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml
@@ -0,0 +1,375 @@
+
+  
+    Rust
+    rust
+    rs
+    *.rs
+    *.rs.in
+    text/rust
+    text/x-rust
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml
new file mode 100644
index 000000000000..af1107bf9b30
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml
@@ -0,0 +1,191 @@
+
+  
+    SAS
+    sas
+    *.SAS
+    *.sas
+    text/x-sas
+    text/sas
+    application/x-sas
+    true
+  
+  
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml
new file mode 100644
index 000000000000..f80159491f38
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml
@@ -0,0 +1,362 @@
+
+  
+    Sass
+    sass
+    *.sass
+    text/x-sass
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml
new file mode 100644
index 000000000000..2f8ddd49ecac
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml
@@ -0,0 +1,274 @@
+
+  
+    Scala
+    scala
+    *.scala
+    text/x-scala
+    true
+  
+  
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml
new file mode 100644
index 000000000000..0198bd7226fe
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml
@@ -0,0 +1,106 @@
+
+  
+    Scheme
+    scheme
+    scm
+    *.scm
+    *.ss
+    text/x-scheme
+    application/x-scheme
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml
new file mode 100644
index 000000000000..9e109495fd17
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml
@@ -0,0 +1,98 @@
+
+  
+    Scilab
+    scilab
+    *.sci
+    *.sce
+    *.tst
+    text/scilab
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml
new file mode 100644
index 000000000000..ee060fc2b65b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml
@@ -0,0 +1,373 @@
+
+  
+    SCSS
+    scss
+    *.scss
+    text/x-scss
+    true
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml
new file mode 100644
index 000000000000..2209aa77f51d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml
@@ -0,0 +1,28 @@
+
+  
+    Sed
+    sed
+    gsed
+    ssed
+    *.sed
+    *.[gs]sed
+    text/x-sed
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      None
+      
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml
new file mode 100644
index 000000000000..fc605638a605
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml
@@ -0,0 +1,61 @@
+
+  
+    Sieve
+    sieve
+    *.siv
+    *.sieve
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml
new file mode 100644
index 000000000000..e468766d1e6d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml
@@ -0,0 +1,73 @@
+
+
+  
+    Smali
+    smali
+    *.smali
+    text/smali
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+    
+    
+      
+    
+    
+      
+      
+    
+    
+      
+    
+    
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml
new file mode 100644
index 000000000000..00271118f938
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml
@@ -0,0 +1,294 @@
+
+  
+    Smalltalk
+    smalltalk
+    squeak
+    st
+    *.st
+    text/x-smalltalk
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml
new file mode 100644
index 000000000000..dd7752c586bc
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml
@@ -0,0 +1,79 @@
+
+  
+    Smarty
+    smarty
+    *.tpl
+    application/x-smarty
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml
new file mode 100644
index 000000000000..f53dbcba124a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml
@@ -0,0 +1,95 @@
+
+  
+    Snobol
+    snobol
+    *.snobol
+    text/x-snobol
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml
new file mode 100644
index 000000000000..04403c84053b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml
@@ -0,0 +1,279 @@
+
+  
+    Solidity
+    sol
+    solidity
+    *.sol
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml
new file mode 100644
index 000000000000..caca401e3e3f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml
@@ -0,0 +1,59 @@
+
+  
+    SourcePawn
+    sp
+    *.sp
+    *.inc
+    text/x-sourcepawn
+  
+  
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml
new file mode 100644
index 000000000000..7dc65af71f59
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml
@@ -0,0 +1,160 @@
+
+  
+    SPARQL
+    sparql
+    *.rq
+    *.sparql
+    application/sparql-query
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml
new file mode 100644
index 000000000000..b542b65fe9cd
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml
@@ -0,0 +1,90 @@
+
+  
+    SQL
+    sql
+    *.sql
+    text/x-sql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml
new file mode 100644
index 000000000000..cbd8dbce3d9f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml
@@ -0,0 +1,63 @@
+
+  
+    SquidConf
+    squidconf
+    squid.conf
+    squid
+    squid.conf
+    text/x-squidconf
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml
new file mode 100644
index 000000000000..39cf4f2af2b9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml
@@ -0,0 +1,548 @@
+
+  
+    Standard ML
+    sml
+    *.sml
+    *.sig
+    *.fun
+    text/x-standardml
+    application/x-standardml
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml
new file mode 100644
index 000000000000..56b4f9238279
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml
@@ -0,0 +1,85 @@
+
+  
+    stas
+    *.stas
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml
new file mode 100644
index 000000000000..c2d88073acfa
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml
@@ -0,0 +1,132 @@
+
+  
+    Stylus
+    stylus
+    *.styl
+    text/x-styl
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml
new file mode 100644
index 000000000000..416bf90c54a5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml
@@ -0,0 +1,207 @@
+
+  
+    Swift
+    swift
+    *.swift
+    text/x-swift
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml
new file mode 100644
index 000000000000..e31bfc29f93f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml
@@ -0,0 +1,63 @@
+
+  
+    SYSTEMD
+    systemd
+    *.automount
+    *.device
+    *.dnssd
+    *.link
+    *.mount
+    *.netdev
+    *.network
+    *.path
+    *.scope
+    *.service
+    *.slice
+    *.socket
+    *.swap
+    *.target
+    *.timer
+    text/plain
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml
new file mode 100644
index 000000000000..fac3da2356af
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml
@@ -0,0 +1,181 @@
+
+  
+    systemverilog
+    systemverilog
+    sv
+    *.sv
+    *.svh
+    text/x-systemverilog
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml
new file mode 100644
index 000000000000..a020ce804b2b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml
@@ -0,0 +1,69 @@
+
+  
+    TableGen
+    tablegen
+    *.td
+    text/x-tablegen
+  
+  
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml
new file mode 100644
index 000000000000..a071d4c1a988
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml
@@ -0,0 +1,43 @@
+
+
+  
+    Tal
+    tal
+    uxntal
+    *.tal
+    text/x-uxntal
+  
+  
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+  
+
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml
new file mode 100644
index 000000000000..1347f53968fe
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml
@@ -0,0 +1,135 @@
+
+  
+    TASM
+    tasm
+    *.asm
+    *.ASM
+    *.tasm
+    text/x-tasm
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml
new file mode 100644
index 000000000000..7ed69bc26ffb
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml
@@ -0,0 +1,272 @@
+
+  
+    Tcl
+    tcl
+    *.tcl
+    *.rvt
+    text/x-tcl
+    text/x-script.tcl
+    application/x-tcl
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml
new file mode 100644
index 000000000000..9895643c4b75
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml
@@ -0,0 +1,121 @@
+
+  
+    Tcsh
+    tcsh
+    csh
+    *.tcsh
+    *.csh
+    application/x-csh
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml
new file mode 100644
index 000000000000..e863bbd058a0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml
@@ -0,0 +1,75 @@
+
+  
+    Termcap
+    termcap
+    termcap
+    termcap.src
+  
+  
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml
new file mode 100644
index 000000000000..9e8f56ec422b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml
@@ -0,0 +1,84 @@
+
+  
+    Terminfo
+    terminfo
+    terminfo
+    terminfo.src
+  
+  
+    
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml
new file mode 100644
index 000000000000..452f211db3ef
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml
@@ -0,0 +1,140 @@
+
+  
+    Terraform
+    terraform
+    tf
+    *.tf
+    application/x-tf
+    application/x-terraform
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml
new file mode 100644
index 000000000000..809bb9a2974f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml
@@ -0,0 +1,113 @@
+
+  
+    TeX
+    tex
+    latex
+    *.tex
+    *.aux
+    *.toc
+    text/x-tex
+    text/x-latex
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml
new file mode 100644
index 000000000000..f14257d4fb83
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml
@@ -0,0 +1,154 @@
+
+  
+    Thrift
+    thrift
+    *.thrift
+    application/x-thrift
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml
new file mode 100644
index 000000000000..9c98ba534c76
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml
@@ -0,0 +1,44 @@
+
+  
+    TOML
+    toml
+    *.toml
+    Pipfile
+    poetry.lock
+    text/x-toml
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml
new file mode 100644
index 000000000000..3671f61edc10
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml
@@ -0,0 +1,81 @@
+
+  
+    TradingView
+    tradingview
+    tv
+    *.tv
+    text/x-tradingview
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml
new file mode 100644
index 000000000000..b0490aa7c75c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml
@@ -0,0 +1,137 @@
+
+  
+    Transact-SQL
+    tsql
+    t-sql
+    text/x-tsql
+    true
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml
new file mode 100644
index 000000000000..4eab69b731e7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml
@@ -0,0 +1,82 @@
+
+  
+    Turing
+    turing
+    *.turing
+    *.tu
+    text/x-turing
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml
new file mode 100644
index 000000000000..7c572f9ca1b6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml
@@ -0,0 +1,170 @@
+
+  
+    Turtle
+    turtle
+    *.ttl
+    text/turtle
+    application/x-turtle
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml
new file mode 100644
index 000000000000..de95c5f9131d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml
@@ -0,0 +1,155 @@
+
+  
+    Twig
+    twig
+    *.twig
+    application/x-twig
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml
new file mode 100644
index 000000000000..9828e92f5e14
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml
@@ -0,0 +1,279 @@
+
+  
+    TypeScript
+    ts
+    tsx
+    typescript
+    *.ts
+    *.tsx
+    *.mts
+    *.cts
+    text/x-typescript
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+   
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml
new file mode 100644
index 000000000000..bc416d4726d1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml
@@ -0,0 +1,178 @@
+
+  
+    TypoScript
+    typoscript
+    *.ts
+    text/x-typoscript
+    true
+    0.1
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml
new file mode 100644
index 000000000000..62c42c1537b8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml
@@ -0,0 +1,52 @@
+
+  
+    TypoScriptCssData
+    typoscriptcssdata
+  
+  
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml
new file mode 100644
index 000000000000..1b0af3a4e486
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml
@@ -0,0 +1,52 @@
+
+  
+    TypoScriptHtmlData
+    typoscripthtmldata
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml
new file mode 100644
index 000000000000..054fa8913c8b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml
@@ -0,0 +1,147 @@
+
+  
+    ucode
+    *.uc
+    application/x.ucode
+    text/x.ucode
+    true
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml
new file mode 100644
index 000000000000..e1af3d1cd10e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml
@@ -0,0 +1,355 @@
+
+  
+    V
+    v
+    vlang
+    *.v
+    *.vv
+    v.mod
+    text/x-v
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml
new file mode 100644
index 000000000000..34ce61065051
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml
@@ -0,0 +1,365 @@
+
+  
+    V shell
+    vsh
+    vshell
+    *.vsh
+    text/x-vsh
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+        
+      
+    
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml
new file mode 100644
index 000000000000..17c1acf487f3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml
@@ -0,0 +1,72 @@
+
+
+  
+    Vala
+    vala
+    vapi
+    *.vala
+    *.vapi
+    text/x-vala
+  
+  
+    
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+      
+    
+    
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+      
+    
+    
+      
+      
+      
+      
+    
+    
+      
+    
+    
+      
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml
new file mode 100644
index 000000000000..9f85afd0e51c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml
@@ -0,0 +1,162 @@
+
+  
+    VB.net
+    vb.net
+    vbnet
+    *.vb
+    *.bas
+    text/x-vbnet
+    text/x-vba
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml
new file mode 100644
index 000000000000..cd4b9ff09f2e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml
@@ -0,0 +1,158 @@
+
+  
+    verilog
+    verilog
+    v
+    *.v
+    text/x-verilog
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml
new file mode 100644
index 000000000000..aa420448680d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml
@@ -0,0 +1,171 @@
+
+  
+    VHDL
+    vhdl
+    *.vhdl
+    *.vhd
+    text/x-vhdl
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml
new file mode 100644
index 000000000000..ee84d12924ea
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml
@@ -0,0 +1,48 @@
+
+  
+    VHS
+	vhs
+	tape
+	cassette
+	*.tape
+  
+  
+    
+      
+	    
+	      
+	      
+	      
+	      
+		
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+	
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml
new file mode 100644
index 000000000000..43e6bfa7443c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml
@@ -0,0 +1,85 @@
+
+  
+    VimL
+    vim
+    *.vim
+    .vimrc
+    .exrc
+    .gvimrc
+    _vimrc
+    _exrc
+    _gvimrc
+    vimrc
+    gvimrc
+    text/x-vim
+  
+  
+    
+      
+        
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml
new file mode 100644
index 000000000000..ec3508359cc6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml
@@ -0,0 +1,307 @@
+
+  
+    vue
+    vue
+    vuejs
+    *.vue
+    text/x-vue
+    application/x-vue
+    true
+  
+  
+    
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+        
+      
+    
+    
+      
+        
+          
+          
+        
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+    
+    
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml
new file mode 100644
index 000000000000..c663ee2fe795
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml
@@ -0,0 +1,43 @@
+
+  
+    WDTE
+    *.wdte
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml
new file mode 100644
index 000000000000..ea2b6e1ef35e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml
@@ -0,0 +1,142 @@
+
+  
+    WebGPU Shading Language
+    wgsl
+    *.wgsl
+    text/wgsl
+  
+  
+    
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml
new file mode 100644
index 000000000000..1762c966efb1
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml
@@ -0,0 +1,57 @@
+
+  
+    Whiley
+    whiley
+    *.whiley
+    text/x-whiley
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+            
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+            
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+            
+      
+        
+            
+    
+  
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml
new file mode 100644
index 000000000000..2c6a4d990411
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml
@@ -0,0 +1,95 @@
+
+  
+    XML
+    xml
+    *.xml
+    *.xsl
+    *.rss
+    *.xslt
+    *.xsd
+    *.wsdl
+    *.wsf
+    *.svg
+    *.csproj
+    *.vcxproj
+    *.fsproj
+    text/xml
+    application/xml
+    image/svg+xml
+    application/rss+xml
+    application/atom+xml
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml
new file mode 100644
index 000000000000..53bf43286685
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml
@@ -0,0 +1,35 @@
+
+  
+    Xorg
+    xorg.conf
+    xorg.conf
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml
new file mode 100644
index 000000000000..97a0b6ed9bdd
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml
@@ -0,0 +1,122 @@
+
+  
+    YAML
+    yaml
+    *.yaml
+    *.yml
+    text/x-yaml
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+          
+          
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+          
+          
+          
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml
new file mode 100644
index 000000000000..f3da7ceb3778
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml
@@ -0,0 +1,99 @@
+
+  
+    YANG
+    yang
+    *.yang
+    application/yang
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+          
+          
+          
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+    
+      
+        
+      
+      
+        
+        
+      
+      
+        
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml
new file mode 100644
index 000000000000..5bb77a9adc44
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml
@@ -0,0 +1,74 @@
+
+  
+    Z80 Assembly
+    z80
+    *.z80
+    *.asm
+    true
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml
new file mode 100644
index 000000000000..929f4953206d
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml
@@ -0,0 +1,51 @@
+
+  
+    Zed
+    zed
+    *.zed
+    text/zed
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml
new file mode 100644
index 000000000000..fb51cc1abf77
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml
@@ -0,0 +1,112 @@
+
+  
+    Zig
+    zig
+    *.zig
+    text/zig
+  
+  
+    
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+    
+    
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+      
+      
+        
+        
+      
+      
+        
+      
+      
+        
+      
+    
+  
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/genshi.go b/vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go
similarity index 75%
rename from vendor/github.com/alecthomas/chroma/lexers/g/genshi.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go
index dc4d4b17ded7..7f396f4b8615 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/g/genshi.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go
@@ -1,13 +1,11 @@
-package g
+package lexers
 
 import (
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/internal"
-	. "github.com/alecthomas/chroma/lexers/p"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 // Genshi Text lexer.
-var GenshiText = internal.Register(MustNewLazyLexer(
+var GenshiText = Register(MustNewLexer(
 	&Config{
 		Name:      "Genshi Text",
 		Aliases:   []string{"genshitext"},
@@ -28,20 +26,20 @@ func genshiTextRules() Rules {
 		},
 		"directive": {
 			{`\n`, Text, Pop(1)},
-			{`(?:def|for|if)\s+.*`, Using(Python), Pop(1)},
-			{`(choose|when|with)([^\S\n]+)(.*)`, ByGroups(Keyword, Text, Using(Python)), Pop(1)},
+			{`(?:def|for|if)\s+.*`, Using("Python"), Pop(1)},
+			{`(choose|when|with)([^\S\n]+)(.*)`, ByGroups(Keyword, Text, Using("Python")), Pop(1)},
 			{`(choose|otherwise)\b`, Keyword, Pop(1)},
 			{`(end\w*)([^\S\n]*)(.*)`, ByGroups(Keyword, Text, Comment), Pop(1)},
 		},
 		"variable": {
-			{`(?)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
+			{`(<\?python)(.*?)(\?>)`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
 			{`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil},
 			{`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")},
 			{`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")},
@@ -83,8 +81,8 @@ func genshiMarkupRules() Rules {
 			{`/?\s*>`, NameTag, Pop(1)},
 		},
 		"pyattr": {
-			{`(")(.*?)(")`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
-			{`(')(.*?)(')`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
+			{`(")(.*?)(")`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)},
+			{`(')(.*?)(')`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)},
 			{`[^\s>]+`, LiteralString, Pop(1)},
 		},
 		"tag": {
@@ -113,7 +111,7 @@ func genshiMarkupRules() Rules {
 			Include("variable"),
 		},
 		"variable": {
-			{`(?>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
 			{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
-			{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
+			{`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
 			{`[^\W\d]\w*`, NameOther, nil},
 		},
 	}
 }
 
-func goTemplateRules() Rules {
-	return Rules{
-		"root": {
-			{`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil},
-			{`{{[-]?`, CommentPreproc, Push("template")},
-			{`[^{]+`, Other, nil},
-			{`{`, Other, nil},
-		},
-		"template": {
-			{`[-]?}}`, CommentPreproc, Pop(1)},
-			{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
-			{`\(`, Operator, Push("subexpression")},
-			{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
-			Include("expression"),
-		},
-		"subexpression": {
-			{`\)`, Operator, Pop(1)},
-			Include("expression"),
-		},
-		"expression": {
-			{`\s+`, Whitespace, nil},
-			{`\(`, Operator, Push("subexpression")},
-			{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
-			{`\||:?=|,`, Operator, nil},
-			{`[$]?[^\W\d]\w*`, NameOther, nil},
-			{`\$|[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
-			{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
-			{`-?\d+i`, LiteralNumber, nil},
-			{`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
-			{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
-			{`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil},
-			{`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
-			{`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
-			{`-?0[0-7]+`, LiteralNumberOct, nil},
-			{`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
-			{`-?0b[01_]+`, LiteralNumberBin, nil},
-			{`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
-			{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
-			{"`[^`]*`", LiteralString, nil},
-		},
-	}
-}
-
-var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
+var GoHTMLTemplate = Register(DelegatingLexer(HTML, MustNewXMLLexer(
+	embedded,
+	"embedded/go_template.xml",
+).SetConfig(
 	&Config{
 		Name:    "Go HTML Template",
 		Aliases: []string{"go-html-template"},
 	},
-	goTemplateRules,
 )))
 
-var GoTextTemplate = internal.Register(MustNewLazyLexer(
+var GoTextTemplate = Register(MustNewXMLLexer(
+	embedded,
+	"embedded/go_template.xml",
+).SetConfig(
 	&Config{
 		Name:    "Go Text Template",
 		Aliases: []string{"go-text-template"},
 	},
-	goTemplateRules,
 ))
diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go b/vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go
similarity index 99%
rename from vendor/github.com/alecthomas/chroma/lexers/h/haxe.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go
index cc8c693efedc..9a72de865bf7 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go
@@ -1,12 +1,11 @@
-package h
+package lexers
 
 import (
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 // Haxe lexer.
-var Haxe = internal.Register(MustNewLazyLexer(
+var Haxe = Register(MustNewLexer(
 	&Config{
 		Name:      "Haxe",
 		Aliases:   []string{"hx", "haxe", "hxsl"},
@@ -631,7 +630,9 @@ func haxePreProcMutator(state *LexerState) error {
 			state.Stack = stack[len(stack)-1]
 		}
 	case "end":
-		stack = stack[:len(stack)-1]
+		if len(stack) > 0 {
+			stack = stack[:len(stack)-1]
+		}
 	}
 
 	if proc == "if" || proc == "elseif" {
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/html.go b/vendor/github.com/alecthomas/chroma/v2/lexers/html.go
new file mode 100644
index 000000000000..c858042bf9d2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/html.go
@@ -0,0 +1,8 @@
+package lexers
+
+import (
+	"github.com/alecthomas/chroma/v2"
+)
+
+// HTML lexer.
+var HTML = chroma.MustNewXMLLexer(embedded, "embedded/html.xml")
diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/http.go b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go
similarity index 81%
rename from vendor/github.com/alecthomas/chroma/lexers/h/http.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/http.go
index c515ed4c1b93..b57cb1b84e29 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/h/http.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go
@@ -1,14 +1,13 @@
-package h
+package lexers
 
 import (
 	"strings"
 
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 // HTTP lexer.
-var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer(
+var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
 	&Config{
 		Name:         "HTTP",
 		Aliases:      []string{"http"},
@@ -23,8 +22,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer(
 func httpRules() Rules {
 	return Rules{
 		"root": {
-			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
-			{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
+			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([123](?:\.[01])?)(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
+			{`(HTTP)(/)([123](?:\.[01])?)( +)(\d{3})( *)([^\r\n]*)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
 		},
 		"headers": {
 			{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
@@ -105,7 +104,7 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
 			}
 		case token.Type == Generic && contentType != "":
 			{
-				lexer := internal.MatchMimeType(contentType)
+				lexer := MatchMimeType(contentType)
 
 				// application/calendar+xml can be treated as application/xml
 				// if there's not a better match.
@@ -113,7 +112,7 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
 					slashPos := strings.Index(contentType, "/")
 					plusPos := strings.LastIndex(contentType, "+")
 					contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
-					lexer = internal.MatchMimeType(contentType)
+					lexer = MatchMimeType(contentType)
 				}
 
 				if lexer == nil {
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go b/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go
new file mode 100644
index 000000000000..4fa35ad22223
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go
@@ -0,0 +1,79 @@
+package lexers
+
+import (
+	"embed"
+	"io/fs"
+
+	"github.com/alecthomas/chroma/v2"
+)
+
+//go:embed embedded
+var embedded embed.FS
+
+// GlobalLexerRegistry is the global LexerRegistry of Lexers.
+var GlobalLexerRegistry = func() *chroma.LexerRegistry {
+	reg := chroma.NewLexerRegistry()
+	// index(reg)
+	paths, err := fs.Glob(embedded, "embedded/*.xml")
+	if err != nil {
+		panic(err)
+	}
+	for _, path := range paths {
+		reg.Register(chroma.MustNewXMLLexer(embedded, path))
+	}
+	return reg
+}()
+
+// Names of all lexers, optionally including aliases.
+func Names(withAliases bool) []string {
+	return GlobalLexerRegistry.Names(withAliases)
+}
+
+// Get a Lexer by name, alias or file extension.
+//
+// Note that this if there isn't an exact match on name or alias, this will
+// call Match(), so it is not efficient.
+func Get(name string) chroma.Lexer {
+	return GlobalLexerRegistry.Get(name)
+}
+
+// MatchMimeType attempts to find a lexer for the given MIME type.
+func MatchMimeType(mimeType string) chroma.Lexer {
+	return GlobalLexerRegistry.MatchMimeType(mimeType)
+}
+
+// Match returns the first lexer matching filename.
+//
+// Note that this iterates over all file patterns in all lexers, so it's not
+// particularly efficient.
+func Match(filename string) chroma.Lexer {
+	return GlobalLexerRegistry.Match(filename)
+}
+
+// Register a Lexer with the global registry.
+func Register(lexer chroma.Lexer) chroma.Lexer {
+	return GlobalLexerRegistry.Register(lexer)
+}
+
+// Analyse text content and return the "best" lexer..
+func Analyse(text string) chroma.Lexer {
+	return GlobalLexerRegistry.Analyse(text)
+}
+
+// PlaintextRules is used for the fallback lexer as well as the explicit
+// plaintext lexer.
+func PlaintextRules() chroma.Rules {
+	return chroma.Rules{
+		"root": []chroma.Rule{
+			{`.+`, chroma.Text, nil},
+			{`\n`, chroma.Text, nil},
+		},
+	}
+}
+
+// Fallback lexer if no other is found.
+var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{
+	Name:      "fallback",
+	Filenames: []string{"*"},
+	Priority:  -1,
+}, PlaintextRules)
diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go b/vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go
similarity index 82%
rename from vendor/github.com/alecthomas/chroma/lexers/m/markdown.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go
index e50e4702e777..1fb9f5bf380a 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go
@@ -1,13 +1,11 @@
-package m
+package lexers
 
 import (
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/h"
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
 // Markdown lexer.
-var Markdown = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
+var Markdown = Register(DelegatingLexer(HTML, MustNewLexer(
 	&Config{
 		Name:      "markdown",
 		Aliases:   []string{"md", "mkd"},
@@ -29,11 +27,7 @@ func markdownRules() Rules {
 			{"^(```\\n)([\\w\\W]*?)(^```$)", ByGroups(String, Text, String), nil},
 			{
 				"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
-				UsingByGroup(
-					internal.Get,
-					2, 4,
-					String, String, String, Text, String,
-				),
+				UsingByGroup(2, 4, String, String, String, Text, String),
 				nil,
 			},
 			Include("inline"),
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go b/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go
new file mode 100644
index 000000000000..32e94c2f2ef2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go
@@ -0,0 +1,33 @@
+package lexers
+
+import (
+	"regexp"
+)
+
+var (
+	mysqlAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`")
+	mysqlAnalyserNameBetweenBracketRe  = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`)
+)
+
+func init() { // nolint: gochecknoinits
+	Get("mysql").
+		SetAnalyser(func(text string) float32 {
+			nameBetweenBacktickCount := len(mysqlAnalyserNameBetweenBacktickRe.FindAllString(text, -1))
+			nameBetweenBracketCount := len(mysqlAnalyserNameBetweenBracketRe.FindAllString(text, -1))
+
+			var result float32
+
+			// Same logic as above in the TSQL analysis.
+			dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount
+			if dialectNameCount >= 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) {
+				// Found at least twice as many `name` as [name].
+				result += 0.5
+			} else if nameBetweenBacktickCount > nameBetweenBracketCount {
+				result += 0.2
+			} else if nameBetweenBacktickCount > 0 {
+				result += 0.1
+			}
+
+			return result
+		})
+}
diff --git a/vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go b/vendor/github.com/alecthomas/chroma/v2/lexers/php.go
similarity index 55%
rename from vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
rename to vendor/github.com/alecthomas/chroma/v2/lexers/php.go
index fc2e2eaf94fc..ff82f6eafa96 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/php.go
@@ -1,15 +1,13 @@
-package circular
+package lexers
 
 import (
 	"strings"
 
-	. "github.com/alecthomas/chroma" // nolint
-	"github.com/alecthomas/chroma/lexers/h"
-	"github.com/alecthomas/chroma/lexers/internal"
+	. "github.com/alecthomas/chroma/v2" // nolint
 )
 
-// PHTML lexer is PHP in HTML.
-var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
+// phtml lexer is PHP in HTML.
+var _ = Register(DelegatingLexer(HTML, MustNewLexer(
 	&Config{
 		Name:            "PHTML",
 		Aliases:         []string{"phtml"},
@@ -20,20 +18,20 @@ var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
 		EnsureNL:        true,
 		Priority:        2,
 	},
-	phtmlRules,
+	func() Rules {
+		return Get("PHP").(*RegexLexer).MustRules().
+			Rename("root", "php").
+			Merge(Rules{
+				"root": {
+					{`<\?(php)?`, CommentPreproc, Push("php")},
+					{`[^<]+`, Other, nil},
+					{`<`, Other, nil},
+				},
+			})
+	},
 ).SetAnalyser(func(text string) float32 {
 	if strings.Contains(text, ")` +
 					`(.+?)` +
 					`(<\s*/\s*(?:script|style)\s*>)`,
-				UsingByGroup(internal.Get, 2, 4, Other, Other, Other, Other, Other),
+				UsingByGroup(2, 4, Other, Other, Other, Other, Other),
 				nil,
 			},
 			{
@@ -53,21 +50,21 @@ func svelteRules() Rules {
 		"templates": {
 			{`}`, Punctuation, Pop(1)},
 			// Let TypeScript handle strings and the curly braces inside them
-			{`(?]*>`, Using(TypoScriptHTMLData), nil},
+			{`<\S[^\n>]*>`, Using("TypoScriptHTMLData"), nil},
 			{`&[^;\n]*;`, LiteralString, nil},
-			{`(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))`, ByGroups(NameClass, Text, LiteralStringSymbol, Using(TypoScriptCSSData)), nil},
+			{`(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))`, ByGroups(NameClass, Text, LiteralStringSymbol, Using("TypoScriptCSSData")), nil},
 		},
 		"literal": {
 			{`0x[0-9A-Fa-f]+t?`, LiteralNumberHex, nil},
@@ -84,55 +83,3 @@ func typoscriptRules() Rules {
 		},
 	}
 }
-
-// TypoScriptCSSData lexer.
-var TypoScriptCSSData = internal.Register(MustNewLazyLexer(
-	&Config{
-		Name:      "TypoScriptCssData",
-		Aliases:   []string{"typoscriptcssdata"},
-		Filenames: []string{},
-		MimeTypes: []string{},
-	},
-	typoScriptCSSDataRules,
-))
-
-func typoScriptCSSDataRules() Rules {
-	return Rules{
-		"root": {
-			{`(.*)(###\w+###)(.*)`, ByGroups(LiteralString, NameConstant, LiteralString), nil},
-			{`(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})`, ByGroups(LiteralStringSymbol, Operator, NameConstant, NameConstant, LiteralStringSymbol), nil},
-			{`(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)`, ByGroups(LiteralString, LiteralStringSymbol, NameConstant, Operator, NameConstant, LiteralStringSymbol, LiteralString), nil},
-			{`\s+`, Text, nil},
-			{`/\*(?:(?!\*/).)*\*/`, Comment, nil},
-			{`(?,:=.*%+|]`, LiteralString, nil},
-			{`[\w"\-!/&;(){}]+`, LiteralString, nil},
-		},
-	}
-}
-
-// TypoScriptHTMLData lexer.
-var TypoScriptHTMLData = internal.Register(MustNewLazyLexer(
-	&Config{
-		Name:      "TypoScriptHtmlData",
-		Aliases:   []string{"typoscripthtmldata"},
-		Filenames: []string{},
-		MimeTypes: []string{},
-	},
-	typoScriptHTMLDataRules,
-))
-
-func typoScriptHTMLDataRules() Rules {
-	return Rules{
-		"root": {
-			{`(INCLUDE_TYPOSCRIPT)`, NameClass, nil},
-			{`(EXT|FILE|LLL):[^}\n"]*`, LiteralString, nil},
-			{`(.*)(###\w+###)(.*)`, ByGroups(LiteralString, NameConstant, LiteralString), nil},
-			{`(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})`, ByGroups(LiteralStringSymbol, Operator, NameConstant, NameConstant, LiteralStringSymbol), nil},
-			{`(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)`, ByGroups(LiteralString, LiteralStringSymbol, NameConstant, Operator, NameConstant, LiteralStringSymbol, LiteralString), nil},
-			{`\s+`, Text, nil},
-			{`[<>,:=.*%+|]`, LiteralString, nil},
-			{`[\w"\-!/&;(){}#]+`, LiteralString, nil},
-		},
-	}
-}
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/zed.go b/vendor/github.com/alecthomas/chroma/v2/lexers/zed.go
new file mode 100644
index 000000000000..aadc80f06232
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/zed.go
@@ -0,0 +1,24 @@
+package lexers
+
+import (
+	"strings"
+)
+
+// Zed lexer.
+func init() { // nolint: gochecknoinits
+	Get("Zed").SetAnalyser(func(text string) float32 {
+		if strings.Contains(text, "definition ") && strings.Contains(text, "relation ") && strings.Contains(text, "permission ") {
+			return 0.9
+		}
+		if strings.Contains(text, "definition ") {
+			return 0.5
+		}
+		if strings.Contains(text, "relation ") {
+			return 0.5
+		}
+		if strings.Contains(text, "permission ") {
+			return 0.25
+		}
+		return 0.0
+	})
+}
diff --git a/vendor/github.com/alecthomas/chroma/v2/mutators.go b/vendor/github.com/alecthomas/chroma/v2/mutators.go
new file mode 100644
index 000000000000..e80ad97a52db
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/mutators.go
@@ -0,0 +1,201 @@
+package chroma
+
+import (
+	"encoding/xml"
+	"fmt"
+	"strings"
+)
+
+// A Mutator modifies the behaviour of the lexer.
+type Mutator interface {
+	// Mutate the lexer state machine as it is processing.
+	Mutate(state *LexerState) error
+}
+
+// SerialisableMutator is a Mutator that can be serialised and deserialised.
+type SerialisableMutator interface {
+	Mutator
+	MutatorKind() string
+}
+
+// A LexerMutator is an additional interface that a Mutator can implement
+// to modify the lexer when it is compiled.
+type LexerMutator interface {
+	// MutateLexer can be implemented to mutate the lexer itself.
+	//
+	// Rules are the lexer rules, state is the state key for the rule the mutator is associated with.
+	MutateLexer(rules CompiledRules, state string, rule int) error
+}
+
+// A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing.
+type MutatorFunc func(state *LexerState) error
+
+func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) } // nolint
+
+type multiMutator struct {
+	Mutators []Mutator `xml:"mutator"`
+}
+
+func (m *multiMutator) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	for {
+		token, err := d.Token()
+		if err != nil {
+			return err
+		}
+		switch token := token.(type) {
+		case xml.StartElement:
+			mutator, err := unmarshalMutator(d, token)
+			if err != nil {
+				return err
+			}
+			m.Mutators = append(m.Mutators, mutator)
+
+		case xml.EndElement:
+			return nil
+		}
+	}
+}
+
+func (m *multiMutator) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+	name := xml.Name{Local: "mutators"}
+	if err := e.EncodeToken(xml.StartElement{Name: name}); err != nil {
+		return err
+	}
+	for _, m := range m.Mutators {
+		if err := marshalMutator(e, m); err != nil {
+			return err
+		}
+	}
+	return e.EncodeToken(xml.EndElement{Name: name})
+}
+
+func (m *multiMutator) MutatorKind() string { return "mutators" }
+
+func (m *multiMutator) Mutate(state *LexerState) error {
+	for _, modifier := range m.Mutators {
+		if err := modifier.Mutate(state); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+// Mutators applies a set of Mutators in order.
+func Mutators(modifiers ...Mutator) Mutator {
+	return &multiMutator{modifiers}
+}
+
+type includeMutator struct {
+	State string `xml:"state,attr"`
+}
+
+// Include the given state.
+func Include(state string) Rule {
+	return Rule{Mutator: &includeMutator{state}}
+}
+
+func (i *includeMutator) MutatorKind() string { return "include" }
+
+func (i *includeMutator) Mutate(s *LexerState) error {
+	return fmt.Errorf("should never reach here Include(%q)", i.State)
+}
+
+func (i *includeMutator) MutateLexer(rules CompiledRules, state string, rule int) error {
+	includedRules, ok := rules[i.State]
+	if !ok {
+		return fmt.Errorf("invalid include state %q", i.State)
+	}
+	rules[state] = append(rules[state][:rule], append(includedRules, rules[state][rule+1:]...)...)
+	return nil
+}
+
+type combinedMutator struct {
+	States []string `xml:"state,attr"`
+}
+
+func (c *combinedMutator) MutatorKind() string { return "combined" }
+
+// Combined creates a new anonymous state from the given states, and pushes that state.
+func Combined(states ...string) Mutator {
+	return &combinedMutator{states}
+}
+
+func (c *combinedMutator) Mutate(s *LexerState) error {
+	return fmt.Errorf("should never reach here Combined(%v)", c.States)
+}
+
+func (c *combinedMutator) MutateLexer(rules CompiledRules, state string, rule int) error {
+	name := "__combined_" + strings.Join(c.States, "__")
+	if _, ok := rules[name]; !ok {
+		combined := []*CompiledRule{}
+		for _, state := range c.States {
+			rules, ok := rules[state]
+			if !ok {
+				return fmt.Errorf("invalid combine state %q", state)
+			}
+			combined = append(combined, rules...)
+		}
+		rules[name] = combined
+	}
+	rules[state][rule].Mutator = Push(name)
+	return nil
+}
+
+type pushMutator struct {
+	States []string `xml:"state,attr"`
+}
+
+func (p *pushMutator) MutatorKind() string { return "push" }
+
+func (p *pushMutator) Mutate(s *LexerState) error {
+	if len(p.States) == 0 {
+		s.Stack = append(s.Stack, s.State)
+	} else {
+		for _, state := range p.States {
+			if state == "#pop" {
+				s.Stack = s.Stack[:len(s.Stack)-1]
+			} else {
+				s.Stack = append(s.Stack, state)
+			}
+		}
+	}
+	return nil
+}
+
+// Push states onto the stack.
+func Push(states ...string) Mutator {
+	return &pushMutator{states}
+}
+
+type popMutator struct {
+	Depth int `xml:"depth,attr"`
+}
+
+func (p *popMutator) MutatorKind() string { return "pop" }
+
+func (p *popMutator) Mutate(state *LexerState) error {
+	if len(state.Stack) == 0 {
+		return fmt.Errorf("nothing to pop")
+	}
+	state.Stack = state.Stack[:len(state.Stack)-p.Depth]
+	return nil
+}
+
+// Pop state from the stack when rule matches.
+func Pop(n int) Mutator {
+	return &popMutator{n}
+}
+
+// Default returns a Rule that applies a set of Mutators.
+func Default(mutators ...Mutator) Rule {
+	return Rule{Mutator: Mutators(mutators...)}
+}
+
+// Stringify returns the raw string for a set of tokens.
+func Stringify(tokens ...Token) string {
+	out := []string{}
+	for _, t := range tokens {
+		out = append(out, t.Value)
+	}
+	return strings.Join(out, "")
+}
diff --git a/vendor/github.com/alecthomas/chroma/pygments-lexers.txt b/vendor/github.com/alecthomas/chroma/v2/pygments-lexers.txt
similarity index 100%
rename from vendor/github.com/alecthomas/chroma/pygments-lexers.txt
rename to vendor/github.com/alecthomas/chroma/v2/pygments-lexers.txt
diff --git a/vendor/github.com/alecthomas/chroma/quick/quick.go b/vendor/github.com/alecthomas/chroma/v2/quick/quick.go
similarity index 81%
rename from vendor/github.com/alecthomas/chroma/quick/quick.go
rename to vendor/github.com/alecthomas/chroma/v2/quick/quick.go
index 1ed3db7e8ddc..af701d9cc353 100644
--- a/vendor/github.com/alecthomas/chroma/quick/quick.go
+++ b/vendor/github.com/alecthomas/chroma/v2/quick/quick.go
@@ -4,10 +4,10 @@ package quick
 import (
 	"io"
 
-	"github.com/alecthomas/chroma"
-	"github.com/alecthomas/chroma/formatters"
-	"github.com/alecthomas/chroma/lexers"
-	"github.com/alecthomas/chroma/styles"
+	"github.com/alecthomas/chroma/v2"
+	"github.com/alecthomas/chroma/v2/formatters"
+	"github.com/alecthomas/chroma/v2/lexers"
+	"github.com/alecthomas/chroma/v2/styles"
 )
 
 // Highlight some text.
diff --git a/vendor/github.com/alecthomas/chroma/regexp.go b/vendor/github.com/alecthomas/chroma/v2/regexp.go
similarity index 58%
rename from vendor/github.com/alecthomas/chroma/regexp.go
rename to vendor/github.com/alecthomas/chroma/v2/regexp.go
index 4096dfc46fde..0dcb077feb0a 100644
--- a/vendor/github.com/alecthomas/chroma/regexp.go
+++ b/vendor/github.com/alecthomas/chroma/v2/regexp.go
@@ -21,156 +21,6 @@ type Rule struct {
 	Mutator Mutator
 }
 
-// An Emitter takes group matches and returns tokens.
-type Emitter interface {
-	// Emit tokens for the given regex groups.
-	Emit(groups []string, state *LexerState) Iterator
-}
-
-// EmitterFunc is a function that is an Emitter.
-type EmitterFunc func(groups []string, state *LexerState) Iterator
-
-// Emit tokens for groups.
-func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator {
-	return e(groups, state)
-}
-
-// ByGroups emits a token for each matching group in the rule's regex.
-func ByGroups(emitters ...Emitter) Emitter {
-	return EmitterFunc(func(groups []string, state *LexerState) Iterator {
-		iterators := make([]Iterator, 0, len(groups)-1)
-		if len(emitters) != len(groups)-1 {
-			iterators = append(iterators, Error.Emit(groups, state))
-			// panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
-		} else {
-			for i, group := range groups[1:] {
-				if emitters[i] != nil {
-					iterators = append(iterators, emitters[i].Emit([]string{group}, state))
-				}
-			}
-		}
-		return Concaterator(iterators...)
-	})
-}
-
-// ByGroupNames emits a token for each named matching group in the rule's regex.
-func ByGroupNames(emitters map[string]Emitter) Emitter {
-	return EmitterFunc(func(groups []string, state *LexerState) Iterator {
-		iterators := make([]Iterator, 0, len(state.NamedGroups)-1)
-		if len(state.NamedGroups)-1 == 0 {
-			if emitter, ok := emitters[`0`]; ok {
-				iterators = append(iterators, emitter.Emit(groups, state))
-			} else {
-				iterators = append(iterators, Error.Emit(groups, state))
-			}
-		} else {
-			ruleRegex := state.Rules[state.State][state.Rule].Regexp
-			for i := 1; i < len(state.NamedGroups); i++ {
-				groupName := ruleRegex.GroupNameFromNumber(i)
-				group := state.NamedGroups[groupName]
-				if emitter, ok := emitters[groupName]; ok {
-					if emitter != nil {
-						iterators = append(iterators, emitter.Emit([]string{group}, state))
-					}
-				} else {
-					iterators = append(iterators, Error.Emit([]string{group}, state))
-				}
-			}
-		}
-		return Concaterator(iterators...)
-	})
-}
-
-// UsingByGroup emits tokens for the matched groups in the regex using a
-// "sublexer". Used when lexing code blocks where the name of a sublexer is
-// contained within the block, for example on a Markdown text block or SQL
-// language block.
-//
-// The sublexer will be retrieved using sublexerGetFunc (typically
-// internal.Get), using the captured value from the matched sublexerNameGroup.
-//
-// If sublexerGetFunc returns a non-nil lexer for the captured sublexerNameGroup,
-// then tokens for the matched codeGroup will be emitted using the retrieved
-// lexer. Otherwise, if the sublexer is nil, then tokens will be emitted from
-// the passed emitter.
-//
-// Example:
-//
-// 	var Markdown = internal.Register(MustNewLexer(
-// 		&Config{
-// 			Name:      "markdown",
-// 			Aliases:   []string{"md", "mkd"},
-// 			Filenames: []string{"*.md", "*.mkd", "*.markdown"},
-// 			MimeTypes: []string{"text/x-markdown"},
-// 		},
-// 		Rules{
-// 			"root": {
-// 				{"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
-// 					UsingByGroup(
-// 						internal.Get,
-// 						2, 4,
-// 						String, String, String, Text, String,
-// 					),
-// 					nil,
-// 				},
-// 			},
-// 		},
-// 	))
-//
-// See the lexers/m/markdown.go for the complete example.
-//
-// Note: panic's if the number emitters does not equal the number of matched
-// groups in the regex.
-func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter {
-	return EmitterFunc(func(groups []string, state *LexerState) Iterator {
-		// bounds check
-		if len(emitters) != len(groups)-1 {
-			panic("UsingByGroup expects number of emitters to be the same as len(groups)-1")
-		}
-
-		// grab sublexer
-		sublexer := sublexerGetFunc(groups[sublexerNameGroup])
-
-		// build iterators
-		iterators := make([]Iterator, len(groups)-1)
-		for i, group := range groups[1:] {
-			if i == codeGroup-1 && sublexer != nil {
-				var err error
-				iterators[i], err = sublexer.Tokenise(nil, groups[codeGroup])
-				if err != nil {
-					panic(err)
-				}
-			} else if emitters[i] != nil {
-				iterators[i] = emitters[i].Emit([]string{group}, state)
-			}
-		}
-
-		return Concaterator(iterators...)
-	})
-}
-
-// Using returns an Emitter that uses a given Lexer for parsing and emitting.
-func Using(lexer Lexer) Emitter {
-	return EmitterFunc(func(groups []string, _ *LexerState) Iterator {
-		it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
-		if err != nil {
-			panic(err)
-		}
-		return it
-	})
-}
-
-// UsingSelf is like Using, but uses the current Lexer.
-func UsingSelf(stateName string) Emitter {
-	return EmitterFunc(func(groups []string, state *LexerState) Iterator {
-		it, err := state.Lexer.Tokenise(&TokeniseOptions{State: stateName, Nested: true}, groups[0])
-		if err != nil {
-			panic(err)
-		}
-		return it
-	})
-}
-
 // Words creates a regex that matches any of the given literal words.
 func Words(prefix, suffix string, words ...string) string {
 	sort.Slice(words, func(i, j int) bool {
@@ -225,17 +75,20 @@ func (r Rules) Merge(rules Rules) Rules {
 	return out
 }
 
-// MustNewLazyLexer creates a new Lexer with deferred rules generation or panics.
-func MustNewLazyLexer(config *Config, rulesFunc func() Rules) *RegexLexer {
-	lexer, err := NewLazyLexer(config, rulesFunc)
+// MustNewLexer creates a new Lexer with deferred rules generation or panics.
+func MustNewLexer(config *Config, rules func() Rules) *RegexLexer {
+	lexer, err := NewLexer(config, rules)
 	if err != nil {
 		panic(err)
 	}
 	return lexer
 }
 
-// NewLazyLexer creates a new regex-based Lexer with deferred rules generation.
-func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
+// NewLexer creates a new regex-based Lexer.
+//
+// "rules" is a state machine transition map. Each key is a state. Values are sets of rules
+// that match input, optionally modify lexer state, and output tokens.
+func NewLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
 	if config == nil {
 		config = &Config{}
 	}
@@ -245,31 +98,40 @@ func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
 			return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err)
 		}
 	}
-	return &RegexLexer{
-		config:       config,
-		compilerFunc: rulesFunc,
-	}, nil
-}
-
-// MustNewLexer creates a new Lexer or panics.
-//
-// Deprecated: Use MustNewLazyLexer instead.
-func MustNewLexer(config *Config, rules Rules) *RegexLexer { // nolint: forbidigo
-	lexer, err := NewLexer(config, rules) // nolint: forbidigo
-	if err != nil {
-		panic(err)
+	r := &RegexLexer{
+		config:         config,
+		fetchRulesFunc: func() (Rules, error) { return rulesFunc(), nil },
 	}
-	return lexer
-}
-
-// NewLexer creates a new regex-based Lexer.
-//
-// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules
-// that match input, optionally modify lexer state, and output tokens.
-//
-// Deprecated: Use NewLazyLexer instead.
-func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { // nolint: forbidigo
-	return NewLazyLexer(config, func() Rules { return rules })
+	// One-off code to generate XML lexers in the Chroma source tree.
+	// var nameCleanRe = regexp.MustCompile(`[^-+A-Za-z0-9_]`)
+	// name := strings.ToLower(nameCleanRe.ReplaceAllString(config.Name, "_"))
+	// data, err := Marshal(r)
+	// if err != nil {
+	// 	if errors.Is(err, ErrNotSerialisable) {
+	// 		fmt.Fprintf(os.Stderr, "warning: %q: %s\n", name, err)
+	// 		return r, nil
+	// 	}
+	// 	return nil, err
+	// }
+	// _, file, _, ok := runtime.Caller(2)
+	// if !ok {
+	// 	panic("??")
+	// }
+	// fmt.Println(file)
+	// if strings.Contains(file, "/lexers/") {
+	// 	dir := filepath.Join(filepath.Dir(file), "embedded")
+	// 	err = os.MkdirAll(dir, 0700)
+	// 	if err != nil {
+	// 		return nil, err
+	// 	}
+	// 	filename := filepath.Join(dir, name) + ".xml"
+	// 	fmt.Println(filename)
+	// 	err = ioutil.WriteFile(filename, data, 0600)
+	// 	if err != nil {
+	// 		return nil, err
+	// 	}
+	// }
+	return r, nil
 }
 
 // Trace enables debug tracing.
@@ -292,13 +154,14 @@ type CompiledRules map[string][]*CompiledRule
 
 // LexerState contains the state for a single lex.
 type LexerState struct {
-	Lexer *RegexLexer
-	Text  []rune
-	Pos   int
-	Rules CompiledRules
-	Stack []string
-	State string
-	Rule  int
+	Lexer    *RegexLexer
+	Registry *LexerRegistry
+	Text     []rune
+	Pos      int
+	Rules    CompiledRules
+	Stack    []string
+	State    string
+	Rule     int
 	// Group matches.
 	Groups []string
 	// Named Group matches.
@@ -398,31 +261,59 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
 
 // RegexLexer is the default lexer implementation used in Chroma.
 type RegexLexer struct {
+	registry *LexerRegistry // The LexerRegistry this Lexer is associated with, if any.
 	config   *Config
 	analyser func(text string) float32
 	trace    bool
 
-	mu           sync.Mutex
-	compiled     bool
-	rules        map[string][]*CompiledRule
-	compilerFunc func() Rules
-	compileOnce  sync.Once
+	mu             sync.Mutex
+	compiled       bool
+	rawRules       Rules
+	rules          map[string][]*CompiledRule
+	fetchRulesFunc func() (Rules, error)
+	compileOnce    sync.Once
+}
+
+func (r *RegexLexer) String() string {
+	return r.config.Name
+}
+
+// Rules in the Lexer.
+func (r *RegexLexer) Rules() (Rules, error) {
+	if err := r.needRules(); err != nil {
+		return nil, err
+	}
+	return r.rawRules, nil
+}
+
+// SetRegistry the lexer will use to lookup other lexers if necessary.
+func (r *RegexLexer) SetRegistry(registry *LexerRegistry) Lexer {
+	r.registry = registry
+	return r
 }
 
 // SetAnalyser sets the analyser function used to perform content inspection.
-func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer {
+func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) Lexer {
 	r.analyser = analyser
 	return r
 }
 
-func (r *RegexLexer) AnalyseText(text string) float32 { // nolint
+// AnalyseText scores how likely a fragment of text is to match this lexer, between 0.0 and 1.0.
+func (r *RegexLexer) AnalyseText(text string) float32 {
 	if r.analyser != nil {
 		return r.analyser(text)
 	}
-	return 0.0
+	return 0
 }
 
-func (r *RegexLexer) Config() *Config { // nolint
+// SetConfig replaces the Config for this Lexer.
+func (r *RegexLexer) SetConfig(config *Config) *RegexLexer {
+	r.config = config
+	return r
+}
+
+// Config returns the Config for this Lexer.
+func (r *RegexLexer) Config() *Config {
 	return r.config
 }
 
@@ -441,7 +332,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
 					pattern = "(?" + rule.flags + ")" + pattern
 				}
 				pattern = `\G` + pattern
-				rule.Regexp, err = regexp2.Compile(pattern, regexp2.RE2)
+				rule.Regexp, err = regexp2.Compile(pattern, 0)
 				if err != nil {
 					return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
 				}
@@ -473,8 +364,11 @@ restart:
 	return nil
 }
 
-func (r *RegexLexer) compileRules() error {
-	rules := r.compilerFunc()
+func (r *RegexLexer) fetchRules() error {
+	rules, err := r.fetchRulesFunc()
+	if err != nil {
+		return fmt.Errorf("%s: failed to compile rules: %w", r.config.Name, err)
+	}
 	if _, ok := rules["root"]; !ok {
 		return fmt.Errorf("no \"root\" state")
 	}
@@ -496,21 +390,28 @@ func (r *RegexLexer) compileRules() error {
 		}
 	}
 
+	r.rawRules = rules
 	r.rules = compiledRules
 	return nil
 }
 
-func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint
+func (r *RegexLexer) needRules() error {
 	var err error
-	if r.compilerFunc != nil {
+	if r.fetchRulesFunc != nil {
 		r.compileOnce.Do(func() {
-			err = r.compileRules()
+			err = r.fetchRules()
 		})
 	}
-	if err != nil {
-		return nil, err
-	}
 	if err := r.maybeCompile(); err != nil {
+		return err
+	}
+	return err
+}
+
+// Tokenise text using lexer, returning an iterator.
+func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
+	err := r.needRules()
+	if err != nil {
 		return nil, err
 	}
 	if options == nil {
@@ -525,6 +426,7 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
 		newlineAdded = true
 	}
 	state := &LexerState{
+		Registry:       r.registry,
 		newlineAdded:   newlineAdded,
 		options:        options,
 		Lexer:          r,
@@ -536,6 +438,15 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
 	return state.Iterator, nil
 }
 
+// MustRules is like Rules() but will panic on error.
+func (r *RegexLexer) MustRules() Rules {
+	rules, err := r.Rules()
+	if err != nil {
+		panic(err)
+	}
+	return rules
+}
+
 func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string, map[string]string) {
 	for i, rule := range rules {
 		match, err := rule.Regexp.FindRunesMatchStartingAt(text, pos)
diff --git a/vendor/github.com/alecthomas/chroma/lexers/internal/api.go b/vendor/github.com/alecthomas/chroma/v2/registry.go
similarity index 57%
rename from vendor/github.com/alecthomas/chroma/lexers/internal/api.go
rename to vendor/github.com/alecthomas/chroma/v2/registry.go
index 12fa45ff84c6..4742e8c5abca 100644
--- a/vendor/github.com/alecthomas/chroma/lexers/internal/api.go
+++ b/vendor/github.com/alecthomas/chroma/v2/registry.go
@@ -1,12 +1,9 @@
-// Package internal contains common API functions and structures shared between lexer packages.
-package internal
+package chroma
 
 import (
 	"path/filepath"
 	"sort"
 	"strings"
-
-	"github.com/alecthomas/chroma"
 )
 
 var (
@@ -22,20 +19,25 @@ var (
 	}
 )
 
-// Registry of Lexers.
-var Registry = struct {
-	Lexers  chroma.Lexers
-	byName  map[string]chroma.Lexer
-	byAlias map[string]chroma.Lexer
-}{
-	byName:  map[string]chroma.Lexer{},
-	byAlias: map[string]chroma.Lexer{},
+// LexerRegistry is a registry of Lexers.
+type LexerRegistry struct {
+	Lexers  Lexers
+	byName  map[string]Lexer
+	byAlias map[string]Lexer
+}
+
+// NewLexerRegistry creates a new LexerRegistry of Lexers.
+func NewLexerRegistry() *LexerRegistry {
+	return &LexerRegistry{
+		byName:  map[string]Lexer{},
+		byAlias: map[string]Lexer{},
+	}
 }
 
 // Names of all lexers, optionally including aliases.
-func Names(withAliases bool) []string {
+func (l *LexerRegistry) Names(withAliases bool) []string {
 	out := []string{}
-	for _, lexer := range Registry.Lexers {
+	for _, lexer := range l.Lexers {
 		config := lexer.Config()
 		out = append(out, config.Name)
 		if withAliases {
@@ -47,27 +49,27 @@ func Names(withAliases bool) []string {
 }
 
 // Get a Lexer by name, alias or file extension.
-func Get(name string) chroma.Lexer {
-	if lexer := Registry.byName[name]; lexer != nil {
+func (l *LexerRegistry) Get(name string) Lexer {
+	if lexer := l.byName[name]; lexer != nil {
 		return lexer
 	}
-	if lexer := Registry.byAlias[name]; lexer != nil {
+	if lexer := l.byAlias[name]; lexer != nil {
 		return lexer
 	}
-	if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
+	if lexer := l.byName[strings.ToLower(name)]; lexer != nil {
 		return lexer
 	}
-	if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
+	if lexer := l.byAlias[strings.ToLower(name)]; lexer != nil {
 		return lexer
 	}
 
-	candidates := chroma.PrioritisedLexers{}
+	candidates := PrioritisedLexers{}
 	// Try file extension.
-	if lexer := Match("filename." + name); lexer != nil {
+	if lexer := l.Match("filename." + name); lexer != nil {
 		candidates = append(candidates, lexer)
 	}
 	// Try exact filename.
-	if lexer := Match(name); lexer != nil {
+	if lexer := l.Match(name); lexer != nil {
 		candidates = append(candidates, lexer)
 	}
 	if len(candidates) == 0 {
@@ -78,9 +80,9 @@ func Get(name string) chroma.Lexer {
 }
 
 // MatchMimeType attempts to find a lexer for the given MIME type.
-func MatchMimeType(mimeType string) chroma.Lexer {
-	matched := chroma.PrioritisedLexers{}
-	for _, l := range Registry.Lexers {
+func (l *LexerRegistry) MatchMimeType(mimeType string) Lexer {
+	matched := PrioritisedLexers{}
+	for _, l := range l.Lexers {
 		for _, lmt := range l.Config().MimeTypes {
 			if mimeType == lmt {
 				matched = append(matched, l)
@@ -95,11 +97,13 @@ func MatchMimeType(mimeType string) chroma.Lexer {
 }
 
 // Match returns the first lexer matching filename.
-func Match(filename string) chroma.Lexer {
+//
+// Note that this iterates over all file patterns in all lexers, so is not fast.
+func (l *LexerRegistry) Match(filename string) Lexer {
 	filename = filepath.Base(filename)
-	matched := chroma.PrioritisedLexers{}
+	matched := PrioritisedLexers{}
 	// First, try primary filename matches.
-	for _, lexer := range Registry.Lexers {
+	for _, lexer := range l.Lexers {
 		config := lexer.Config()
 		for _, glob := range config.Filenames {
 			ok, err := filepath.Match(glob, filename)
@@ -126,7 +130,7 @@ func Match(filename string) chroma.Lexer {
 	}
 	matched = nil
 	// Next, try filename aliases.
-	for _, lexer := range Registry.Lexers {
+	for _, lexer := range l.Lexers {
 		config := lexer.Config()
 		for _, glob := range config.AliasFilenames {
 			ok, err := filepath.Match(glob, filename)
@@ -155,11 +159,11 @@ func Match(filename string) chroma.Lexer {
 }
 
 // Analyse text content and return the "best" lexer..
-func Analyse(text string) chroma.Lexer {
-	var picked chroma.Lexer
+func (l *LexerRegistry) Analyse(text string) Lexer {
+	var picked Lexer
 	highest := float32(0.0)
-	for _, lexer := range Registry.Lexers {
-		if analyser, ok := lexer.(chroma.Analyser); ok {
+	for _, lexer := range l.Lexers {
+		if analyser, ok := lexer.(Analyser); ok {
 			weight := analyser.AnalyseText(text)
 			if weight > highest {
 				picked = lexer
@@ -170,32 +174,37 @@ func Analyse(text string) chroma.Lexer {
 	return picked
 }
 
-// Register a Lexer with the global registry.
-func Register(lexer chroma.Lexer) chroma.Lexer {
+// Register a Lexer with the LexerRegistry. If the lexer is already registered
+// it will be replaced.
+func (l *LexerRegistry) Register(lexer Lexer) Lexer {
+	lexer.SetRegistry(l)
 	config := lexer.Config()
-	Registry.byName[config.Name] = lexer
-	Registry.byName[strings.ToLower(config.Name)] = lexer
+
+	l.byName[config.Name] = lexer
+	l.byName[strings.ToLower(config.Name)] = lexer
+
 	for _, alias := range config.Aliases {
-		Registry.byAlias[alias] = lexer
-		Registry.byAlias[strings.ToLower(alias)] = lexer
+		l.byAlias[alias] = lexer
+		l.byAlias[strings.ToLower(alias)] = lexer
 	}
-	Registry.Lexers = append(Registry.Lexers, lexer)
+
+	l.Lexers = add(l.Lexers, lexer)
+
 	return lexer
 }
 
-// PlaintextRules is used for the fallback lexer as well as the explicit
-// plaintext lexer.
-func PlaintextRules() chroma.Rules {
-	return chroma.Rules{
-		"root": []chroma.Rule{
-			{`.+`, chroma.Text, nil},
-			{`\n`, chroma.Text, nil},
-		},
+// add adds a lexer to a slice of lexers if it doesn't already exist, or if found will replace it.
+func add(lexers Lexers, lexer Lexer) Lexers {
+	for i, val := range lexers {
+		if val == nil {
+			continue
+		}
+
+		if val.Config().Name == lexer.Config().Name {
+			lexers[i] = lexer
+			return lexers
+		}
 	}
-}
 
-// Fallback lexer if no other is found.
-var Fallback chroma.Lexer = chroma.MustNewLazyLexer(&chroma.Config{
-	Name:      "fallback",
-	Filenames: []string{"*"},
-}, PlaintextRules)
+	return append(lexers, lexer)
+}
diff --git a/vendor/github.com/alecthomas/chroma/remap.go b/vendor/github.com/alecthomas/chroma/v2/remap.go
similarity index 77%
rename from vendor/github.com/alecthomas/chroma/remap.go
rename to vendor/github.com/alecthomas/chroma/v2/remap.go
index cfb5c381470d..bcf5e66d1752 100644
--- a/vendor/github.com/alecthomas/chroma/remap.go
+++ b/vendor/github.com/alecthomas/chroma/v2/remap.go
@@ -10,6 +10,20 @@ func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer {
 	return &remappingLexer{lexer, mapper}
 }
 
+func (r *remappingLexer) AnalyseText(text string) float32 {
+	return r.lexer.AnalyseText(text)
+}
+
+func (r *remappingLexer) SetAnalyser(analyser func(text string) float32) Lexer {
+	r.lexer.SetAnalyser(analyser)
+	return r
+}
+
+func (r *remappingLexer) SetRegistry(registry *LexerRegistry) Lexer {
+	r.lexer.SetRegistry(registry)
+	return r
+}
+
 func (r *remappingLexer) Config() *Config {
 	return r.lexer.Config()
 }
@@ -46,10 +60,10 @@ type TypeMapping []struct {
 //
 // eg. Map "defvaralias" tokens of type NameVariable to NameFunction:
 //
-// 		mapping := TypeMapping{
-// 			{NameVariable, NameFunction, []string{"defvaralias"},
-// 		}
-// 		lexer = TypeRemappingLexer(lexer, mapping)
+//	mapping := TypeMapping{
+//		{NameVariable, NameFunction, []string{"defvaralias"},
+//	}
+//	lexer = TypeRemappingLexer(lexer, mapping)
 func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
 	// Lookup table for fast remapping.
 	lut := map[TokenType]map[string]TokenType{}
diff --git a/vendor/github.com/alecthomas/chroma/v2/renovate.json5 b/vendor/github.com/alecthomas/chroma/v2/renovate.json5
new file mode 100644
index 000000000000..77c7b016ccd4
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/renovate.json5
@@ -0,0 +1,18 @@
+{
+	$schema: "https://docs.renovatebot.com/renovate-schema.json",
+	extends: [
+		"config:recommended",
+		":semanticCommits",
+		":semanticCommitTypeAll(chore)",
+		":semanticCommitScope(deps)",
+		"group:allNonMajor",
+		"schedule:earlyMondays", // Run once a week.
+	],
+	packageRules: [
+		{
+			matchPackageNames: ["golangci-lint"],
+			matchManagers: ["hermit"],
+			enabled: false,
+		},
+	],
+}
diff --git a/vendor/github.com/alecthomas/chroma/v2/serialise.go b/vendor/github.com/alecthomas/chroma/v2/serialise.go
new file mode 100644
index 000000000000..645a5faabcc9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/serialise.go
@@ -0,0 +1,479 @@
+package chroma
+
+import (
+	"compress/gzip"
+	"encoding/xml"
+	"errors"
+	"fmt"
+	"io"
+	"io/fs"
+	"math"
+	"path/filepath"
+	"reflect"
+	"regexp"
+	"strings"
+
+	"github.com/dlclark/regexp2"
+)
+
+// Serialisation of Chroma rules to XML. The format is:
+//
+//	
+//	  
+//	    
+//	      [<$EMITTER ...>]
+//	      [<$MUTATOR ...>]
+//	    
+//	  
+//	
+//
+// eg. Include("String") would become:
+//
+//	
+//	  
+//	
+//
+//	[null, null, {"kind": "include", "state": "String"}]
+//
+// eg. Rule{`\d+`, Text, nil} would become:
+//
+//	
+//	  
+//	
+//
+// eg. Rule{`"`, String, Push("String")}
+//
+//	
+//	  
+//	  
+//	
+//
+// eg. Rule{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
+//
+//	
+//	  
+//	  
+//	
+var (
+	// ErrNotSerialisable is returned if a lexer contains Rules that cannot be serialised.
+	ErrNotSerialisable = fmt.Errorf("not serialisable")
+	emitterTemplates   = func() map[string]SerialisableEmitter {
+		out := map[string]SerialisableEmitter{}
+		for _, emitter := range []SerialisableEmitter{
+			&byGroupsEmitter{},
+			&usingSelfEmitter{},
+			TokenType(0),
+			&usingEmitter{},
+			&usingByGroup{},
+		} {
+			out[emitter.EmitterKind()] = emitter
+		}
+		return out
+	}()
+	mutatorTemplates = func() map[string]SerialisableMutator {
+		out := map[string]SerialisableMutator{}
+		for _, mutator := range []SerialisableMutator{
+			&includeMutator{},
+			&combinedMutator{},
+			&multiMutator{},
+			&pushMutator{},
+			&popMutator{},
+		} {
+			out[mutator.MutatorKind()] = mutator
+		}
+		return out
+	}()
+)
+
+// fastUnmarshalConfig unmarshals only the Config from a serialised lexer.
+func fastUnmarshalConfig(from fs.FS, path string) (*Config, error) {
+	r, err := from.Open(path)
+	if err != nil {
+		return nil, err
+	}
+	defer r.Close()
+	dec := xml.NewDecoder(r)
+	for {
+		token, err := dec.Token()
+		if err != nil {
+			if errors.Is(err, io.EOF) {
+				return nil, fmt.Errorf("could not find  element")
+			}
+			return nil, err
+		}
+		switch se := token.(type) {
+		case xml.StartElement:
+			if se.Name.Local != "config" {
+				break
+			}
+
+			var config Config
+			err = dec.DecodeElement(&config, &se)
+			if err != nil {
+				return nil, fmt.Errorf("%s: %w", path, err)
+			}
+			return &config, nil
+		}
+	}
+}
+
+// MustNewXMLLexer constructs a new RegexLexer from an XML file or panics.
+func MustNewXMLLexer(from fs.FS, path string) *RegexLexer {
+	lex, err := NewXMLLexer(from, path)
+	if err != nil {
+		panic(err)
+	}
+	return lex
+}
+
+// NewXMLLexer creates a new RegexLexer from a serialised RegexLexer.
+func NewXMLLexer(from fs.FS, path string) (*RegexLexer, error) {
+	config, err := fastUnmarshalConfig(from, path)
+	if err != nil {
+		return nil, err
+	}
+
+	for _, glob := range append(config.Filenames, config.AliasFilenames...) {
+		_, err := filepath.Match(glob, "")
+		if err != nil {
+			return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err)
+		}
+	}
+
+	var analyserFn func(string) float32
+
+	if config.Analyse != nil {
+		type regexAnalyse struct {
+			re    *regexp2.Regexp
+			score float32
+		}
+
+		regexAnalysers := make([]regexAnalyse, 0, len(config.Analyse.Regexes))
+
+		for _, ra := range config.Analyse.Regexes {
+			re, err := regexp2.Compile(ra.Pattern, regexp2.None)
+			if err != nil {
+				return nil, fmt.Errorf("%s: %q is not a valid analyser regex: %w", config.Name, ra.Pattern, err)
+			}
+
+			regexAnalysers = append(regexAnalysers, regexAnalyse{re, ra.Score})
+		}
+
+		analyserFn = func(text string) float32 {
+			var score float32
+
+			for _, ra := range regexAnalysers {
+				ok, err := ra.re.MatchString(text)
+				if err != nil {
+					return 0
+				}
+
+				if ok && config.Analyse.First {
+					return float32(math.Min(float64(ra.score), 1.0))
+				}
+
+				if ok {
+					score += ra.score
+				}
+			}
+
+			return float32(math.Min(float64(score), 1.0))
+		}
+	}
+
+	return &RegexLexer{
+		config:   config,
+		analyser: analyserFn,
+		fetchRulesFunc: func() (Rules, error) {
+			var lexer struct {
+				Config
+				Rules Rules `xml:"rules"`
+			}
+			// Try to open .xml fallback to .xml.gz
+			fr, err := from.Open(path)
+			if err != nil {
+				if errors.Is(err, fs.ErrNotExist) {
+					path += ".gz"
+					fr, err = from.Open(path)
+					if err != nil {
+						return nil, err
+					}
+				} else {
+					return nil, err
+				}
+			}
+			defer fr.Close()
+			var r io.Reader = fr
+			if strings.HasSuffix(path, ".gz") {
+				r, err = gzip.NewReader(r)
+				if err != nil {
+					return nil, fmt.Errorf("%s: %w", path, err)
+				}
+			}
+			err = xml.NewDecoder(r).Decode(&lexer)
+			if err != nil {
+				return nil, fmt.Errorf("%s: %w", path, err)
+			}
+			return lexer.Rules, nil
+		},
+	}, nil
+}
+
+// Marshal a RegexLexer to XML.
+func Marshal(l *RegexLexer) ([]byte, error) {
+	type lexer struct {
+		Config Config `xml:"config"`
+		Rules  Rules  `xml:"rules"`
+	}
+
+	rules, err := l.Rules()
+	if err != nil {
+		return nil, err
+	}
+	root := &lexer{
+		Config: *l.Config(),
+		Rules:  rules,
+	}
+	data, err := xml.MarshalIndent(root, "", "  ")
+	if err != nil {
+		return nil, err
+	}
+	re := regexp.MustCompile(`>`)
+	data = re.ReplaceAll(data, []byte(`/>`))
+	return data, nil
+}
+
+// Unmarshal a RegexLexer from XML.
+func Unmarshal(data []byte) (*RegexLexer, error) {
+	type lexer struct {
+		Config Config `xml:"config"`
+		Rules  Rules  `xml:"rules"`
+	}
+	root := &lexer{}
+	err := xml.Unmarshal(data, root)
+	if err != nil {
+		return nil, fmt.Errorf("invalid Lexer XML: %w", err)
+	}
+	lex, err := NewLexer(&root.Config, func() Rules { return root.Rules })
+	if err != nil {
+		return nil, err
+	}
+	return lex, nil
+}
+
+func marshalMutator(e *xml.Encoder, mutator Mutator) error {
+	if mutator == nil {
+		return nil
+	}
+	smutator, ok := mutator.(SerialisableMutator)
+	if !ok {
+		return fmt.Errorf("unsupported mutator: %w", ErrNotSerialisable)
+	}
+	return e.EncodeElement(mutator, xml.StartElement{Name: xml.Name{Local: smutator.MutatorKind()}})
+}
+
+func unmarshalMutator(d *xml.Decoder, start xml.StartElement) (Mutator, error) {
+	kind := start.Name.Local
+	mutator, ok := mutatorTemplates[kind]
+	if !ok {
+		return nil, fmt.Errorf("unknown mutator %q: %w", kind, ErrNotSerialisable)
+	}
+	value, target := newFromTemplate(mutator)
+	if err := d.DecodeElement(target, &start); err != nil {
+		return nil, err
+	}
+	return value().(SerialisableMutator), nil
+}
+
+func marshalEmitter(e *xml.Encoder, emitter Emitter) error {
+	if emitter == nil {
+		return nil
+	}
+	semitter, ok := emitter.(SerialisableEmitter)
+	if !ok {
+		return fmt.Errorf("unsupported emitter %T: %w", emitter, ErrNotSerialisable)
+	}
+	return e.EncodeElement(emitter, xml.StartElement{
+		Name: xml.Name{Local: semitter.EmitterKind()},
+	})
+}
+
+func unmarshalEmitter(d *xml.Decoder, start xml.StartElement) (Emitter, error) {
+	kind := start.Name.Local
+	mutator, ok := emitterTemplates[kind]
+	if !ok {
+		return nil, fmt.Errorf("unknown emitter %q: %w", kind, ErrNotSerialisable)
+	}
+	value, target := newFromTemplate(mutator)
+	if err := d.DecodeElement(target, &start); err != nil {
+		return nil, err
+	}
+	return value().(SerialisableEmitter), nil
+}
+
+func (r Rule) MarshalXML(e *xml.Encoder, _ xml.StartElement) error {
+	start := xml.StartElement{
+		Name: xml.Name{Local: "rule"},
+	}
+	if r.Pattern != "" {
+		start.Attr = append(start.Attr, xml.Attr{
+			Name:  xml.Name{Local: "pattern"},
+			Value: r.Pattern,
+		})
+	}
+	if err := e.EncodeToken(start); err != nil {
+		return err
+	}
+	if err := marshalEmitter(e, r.Type); err != nil {
+		return err
+	}
+	if err := marshalMutator(e, r.Mutator); err != nil {
+		return err
+	}
+	return e.EncodeToken(xml.EndElement{Name: start.Name})
+}
+
+func (r *Rule) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	for _, attr := range start.Attr {
+		if attr.Name.Local == "pattern" {
+			r.Pattern = attr.Value
+			break
+		}
+	}
+	for {
+		token, err := d.Token()
+		if err != nil {
+			return err
+		}
+		switch token := token.(type) {
+		case xml.StartElement:
+			mutator, err := unmarshalMutator(d, token)
+			if err != nil && !errors.Is(err, ErrNotSerialisable) {
+				return err
+			} else if err == nil {
+				if r.Mutator != nil {
+					return fmt.Errorf("duplicate mutator")
+				}
+				r.Mutator = mutator
+				continue
+			}
+			emitter, err := unmarshalEmitter(d, token)
+			if err != nil && !errors.Is(err, ErrNotSerialisable) { // nolint: gocritic
+				return err
+			} else if err == nil {
+				if r.Type != nil {
+					return fmt.Errorf("duplicate emitter")
+				}
+				r.Type = emitter
+				continue
+			} else {
+				return err
+			}
+
+		case xml.EndElement:
+			return nil
+		}
+	}
+}
+
+type xmlRuleState struct {
+	Name  string `xml:"name,attr"`
+	Rules []Rule `xml:"rule"`
+}
+
+type xmlRules struct {
+	States []xmlRuleState `xml:"state"`
+}
+
+func (r Rules) MarshalXML(e *xml.Encoder, _ xml.StartElement) error {
+	xr := xmlRules{}
+	for state, rules := range r {
+		xr.States = append(xr.States, xmlRuleState{
+			Name:  state,
+			Rules: rules,
+		})
+	}
+	return e.EncodeElement(xr, xml.StartElement{Name: xml.Name{Local: "rules"}})
+}
+
+func (r *Rules) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	xr := xmlRules{}
+	if err := d.DecodeElement(&xr, &start); err != nil {
+		return err
+	}
+	if *r == nil {
+		*r = Rules{}
+	}
+	for _, state := range xr.States {
+		(*r)[state.Name] = state.Rules
+	}
+	return nil
+}
+
+type xmlTokenType struct {
+	Type string `xml:"type,attr"`
+}
+
+func (t *TokenType) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	el := xmlTokenType{}
+	if err := d.DecodeElement(&el, &start); err != nil {
+		return err
+	}
+	tt, err := TokenTypeString(el.Type)
+	if err != nil {
+		return err
+	}
+	*t = tt
+	return nil
+}
+
+func (t TokenType) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+	start.Attr = append(start.Attr, xml.Attr{Name: xml.Name{Local: "type"}, Value: t.String()})
+	if err := e.EncodeToken(start); err != nil {
+		return err
+	}
+	return e.EncodeToken(xml.EndElement{Name: start.Name})
+}
+
+// This hijinks is a bit unfortunate but without it we can't deserialise into TokenType.
+func newFromTemplate(template interface{}) (value func() interface{}, target interface{}) {
+	t := reflect.TypeOf(template)
+	if t.Kind() == reflect.Ptr {
+		v := reflect.New(t.Elem())
+		return v.Interface, v.Interface()
+	}
+	v := reflect.New(t)
+	return func() interface{} { return v.Elem().Interface() }, v.Interface()
+}
+
+func (b *Emitters) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	for {
+		token, err := d.Token()
+		if err != nil {
+			return err
+		}
+		switch token := token.(type) {
+		case xml.StartElement:
+			emitter, err := unmarshalEmitter(d, token)
+			if err != nil {
+				return err
+			}
+			*b = append(*b, emitter)
+
+		case xml.EndElement:
+			return nil
+		}
+	}
+}
+
+func (b Emitters) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+	if err := e.EncodeToken(start); err != nil {
+		return err
+	}
+	for _, m := range b {
+		if err := marshalEmitter(e, m); err != nil {
+			return err
+		}
+	}
+	return e.EncodeToken(xml.EndElement{Name: start.Name})
+}
diff --git a/vendor/github.com/alecthomas/chroma/style.go b/vendor/github.com/alecthomas/chroma/v2/style.go
similarity index 69%
rename from vendor/github.com/alecthomas/chroma/style.go
rename to vendor/github.com/alecthomas/chroma/v2/style.go
index 1319fc42487b..cc8d9a602f18 100644
--- a/vendor/github.com/alecthomas/chroma/style.go
+++ b/vendor/github.com/alecthomas/chroma/v2/style.go
@@ -1,7 +1,10 @@
 package chroma
 
 import (
+	"encoding/xml"
 	"fmt"
+	"io"
+	"sort"
 	"strings"
 )
 
@@ -49,6 +52,10 @@ type StyleEntry struct {
 	NoInherit bool
 }
 
+func (s StyleEntry) MarshalText() ([]byte, error) {
+	return []byte(s.String()), nil
+}
+
 func (s StyleEntry) String() string {
 	out := []string{}
 	if s.Bold != Pass {
@@ -157,9 +164,12 @@ func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder {
 }
 
 func (s *StyleBuilder) Get(ttype TokenType) StyleEntry {
-	// This is less than ideal, but it's the price for having to check errors on each Add().
+	// This is less than ideal, but it's the price for not having to check errors on each Add().
 	entry, _ := ParseStyleEntry(s.entries[ttype])
-	return entry.Inherit(s.parent.Get(ttype))
+	if s.parent != nil {
+		entry = entry.Inherit(s.parent.Get(ttype))
+	}
+	return entry
 }
 
 // Add an entry to the Style map.
@@ -175,6 +185,25 @@ func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
 	return s
 }
 
+// Transform passes each style entry currently defined in the builder to the supplied
+// function and saves the returned value. This can be used to adjust a style's colours;
+// see Colour's ClampBrightness function, for example.
+func (s *StyleBuilder) Transform(transform func(StyleEntry) StyleEntry) *StyleBuilder {
+	types := make(map[TokenType]struct{})
+	for tt := range s.entries {
+		types[tt] = struct{}{}
+	}
+	if s.parent != nil {
+		for _, tt := range s.parent.Types() {
+			types[tt] = struct{}{}
+		}
+	}
+	for tt := range types {
+		s.AddEntry(tt, transform(s.Get(tt)))
+	}
+	return s
+}
+
 func (s *StyleBuilder) Build() (*Style, error) {
 	style := &Style{
 		Name:    s.name,
@@ -194,6 +223,22 @@ func (s *StyleBuilder) Build() (*Style, error) {
 // StyleEntries mapping TokenType to colour definition.
 type StyleEntries map[TokenType]string
 
+// NewXMLStyle parses an XML style definition.
+func NewXMLStyle(r io.Reader) (*Style, error) {
+	dec := xml.NewDecoder(r)
+	style := &Style{}
+	return style, dec.Decode(style)
+}
+
+// MustNewXMLStyle is like NewXMLStyle but panics on error.
+func MustNewXMLStyle(r io.Reader) *Style {
+	style, err := NewXMLStyle(r)
+	if err != nil {
+		panic(err)
+	}
+	return style
+}
+
 // NewStyle creates a new style definition.
 func NewStyle(name string, entries StyleEntries) (*Style, error) {
 	return NewStyleBuilder(name).AddAll(entries).Build()
@@ -217,6 +262,89 @@ type Style struct {
 	parent  *Style
 }
 
+func (s *Style) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+	if s.parent != nil {
+		return fmt.Errorf("cannot marshal style with parent")
+	}
+	start.Name = xml.Name{Local: "style"}
+	start.Attr = []xml.Attr{{Name: xml.Name{Local: "name"}, Value: s.Name}}
+	if err := e.EncodeToken(start); err != nil {
+		return err
+	}
+	sorted := make([]TokenType, 0, len(s.entries))
+	for ttype := range s.entries {
+		sorted = append(sorted, ttype)
+	}
+	sort.Slice(sorted, func(i, j int) bool { return sorted[i] < sorted[j] })
+	for _, ttype := range sorted {
+		entry := s.entries[ttype]
+		el := xml.StartElement{Name: xml.Name{Local: "entry"}}
+		el.Attr = []xml.Attr{
+			{Name: xml.Name{Local: "type"}, Value: ttype.String()},
+			{Name: xml.Name{Local: "style"}, Value: entry.String()},
+		}
+		if err := e.EncodeToken(el); err != nil {
+			return err
+		}
+		if err := e.EncodeToken(xml.EndElement{Name: el.Name}); err != nil {
+			return err
+		}
+	}
+	return e.EncodeToken(xml.EndElement{Name: start.Name})
+}
+
+func (s *Style) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
+	for _, attr := range start.Attr {
+		if attr.Name.Local == "name" {
+			s.Name = attr.Value
+		} else {
+			return fmt.Errorf("unexpected attribute %s", attr.Name.Local)
+		}
+	}
+	if s.Name == "" {
+		return fmt.Errorf("missing style name attribute")
+	}
+	s.entries = map[TokenType]StyleEntry{}
+	for {
+		tok, err := d.Token()
+		if err != nil {
+			return err
+		}
+		switch el := tok.(type) {
+		case xml.StartElement:
+			if el.Name.Local != "entry" {
+				return fmt.Errorf("unexpected element %s", el.Name.Local)
+			}
+			var ttype TokenType
+			var entry StyleEntry
+			for _, attr := range el.Attr {
+				switch attr.Name.Local {
+				case "type":
+					ttype, err = TokenTypeString(attr.Value)
+					if err != nil {
+						return err
+					}
+
+				case "style":
+					entry, err = ParseStyleEntry(attr.Value)
+					if err != nil {
+						return err
+					}
+
+				default:
+					return fmt.Errorf("unexpected attribute %s", attr.Name.Local)
+				}
+			}
+			s.entries[ttype] = entry
+
+		case xml.EndElement:
+			if el.Name.Local == start.Name.Local {
+				return nil
+			}
+		}
+	}
+}
+
 // Types that are styled.
 func (s *Style) Types() []TokenType {
 	dedupe := map[TokenType]bool{}
@@ -297,6 +425,15 @@ func (s *Style) synthesisable(ttype TokenType) bool {
 	return ttype == LineHighlight || ttype == LineNumbers || ttype == LineNumbersTable
 }
 
+// MustParseStyleEntry parses a Pygments style entry or panics.
+func MustParseStyleEntry(entry string) StyleEntry {
+	out, err := ParseStyleEntry(entry)
+	if err != nil {
+		panic(err)
+	}
+	return out
+}
+
 // ParseStyleEntry parses a Pygments style entry.
 func ParseStyleEntry(entry string) (StyleEntry, error) { // nolint: gocyclo
 	out := StyleEntry{}
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml b/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml
new file mode 100644
index 000000000000..36ea2f1d0841
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml
@@ -0,0 +1,11 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml b/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml
new file mode 100644
index 000000000000..e8a6dc1b8299
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml
@@ -0,0 +1,18 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml b/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml
new file mode 100644
index 000000000000..7fa340f32b38
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml
@@ -0,0 +1,18 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/api.go b/vendor/github.com/alecthomas/chroma/v2/styles/api.go
new file mode 100644
index 000000000000..e26d6f0a56a0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/api.go
@@ -0,0 +1,65 @@
+package styles
+
+import (
+	"embed"
+	"io/fs"
+	"sort"
+
+	"github.com/alecthomas/chroma/v2"
+)
+
+//go:embed *.xml
+var embedded embed.FS
+
+// Registry of Styles.
+var Registry = func() map[string]*chroma.Style {
+	registry := map[string]*chroma.Style{}
+	// Register all embedded styles.
+	files, err := fs.ReadDir(embedded, ".")
+	if err != nil {
+		panic(err)
+	}
+	for _, file := range files {
+		if file.IsDir() {
+			continue
+		}
+		r, err := embedded.Open(file.Name())
+		if err != nil {
+			panic(err)
+		}
+		style, err := chroma.NewXMLStyle(r)
+		if err != nil {
+			panic(err)
+		}
+		registry[style.Name] = style
+		_ = r.Close()
+	}
+	return registry
+}()
+
+// Fallback style. Reassign to change the default fallback style.
+var Fallback = Registry["swapoff"]
+
+// Register a chroma.Style.
+func Register(style *chroma.Style) *chroma.Style {
+	Registry[style.Name] = style
+	return style
+}
+
+// Names of all available styles.
+func Names() []string {
+	out := []string{}
+	for name := range Registry {
+		out = append(out, name)
+	}
+	sort.Strings(out)
+	return out
+}
+
+// Get named style, or Fallback.
+func Get(name string) *chroma.Style {
+	if style, ok := Registry[name]; ok {
+		return style
+	}
+	return Fallback
+}
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml b/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml
new file mode 100644
index 000000000000..d9891dc53819
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml
@@ -0,0 +1,18 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml b/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml
new file mode 100644
index 000000000000..74d2eae98827
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml
@@ -0,0 +1,36 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/average.xml b/vendor/github.com/alecthomas/chroma/v2/styles/average.xml
new file mode 100644
index 000000000000..79bdb95f340a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/average.xml
@@ -0,0 +1,74 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml b/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml
new file mode 100644
index 000000000000..a05ba24e5299
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml
@@ -0,0 +1,74 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml b/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml
new file mode 100644
index 000000000000..0d8f574c646b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml
@@ -0,0 +1,26 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml b/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml
new file mode 100644
index 000000000000..fb0e868d1597
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml
@@ -0,0 +1,23 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml
new file mode 100644
index 000000000000..0adf1ba9e34e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml
new file mode 100644
index 000000000000..3ea767fd4a32
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml
new file mode 100644
index 000000000000..6b5002848708
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml
new file mode 100644
index 000000000000..9a401912f36b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml b/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml
new file mode 100644
index 000000000000..32442d716dab
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml
@@ -0,0 +1,52 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/compat.go b/vendor/github.com/alecthomas/chroma/v2/styles/compat.go
new file mode 100644
index 000000000000..4a6aaa6652dd
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/compat.go
@@ -0,0 +1,66 @@
+package styles
+
+// Present for backwards compatibility.
+//
+// Deprecated: use styles.Get(name) instead.
+var (
+	Abap                = Registry["abap"]
+	Algol               = Registry["algol"]
+	AlgolNu             = Registry["algol_nu"]
+	Arduino             = Registry["arduino"]
+	Autumn              = Registry["autumn"]
+	Average             = Registry["average"]
+	Base16Snazzy        = Registry["base16-snazzy"]
+	Borland             = Registry["borland"]
+	BlackWhite          = Registry["bw"]
+	CatppuccinFrappe    = Registry["catppuccin-frappe"]
+	CatppuccinLatte     = Registry["catppuccin-latte"]
+	CatppuccinMacchiato = Registry["catppuccin-macchiato"]
+	CatppuccinMocha     = Registry["catppuccin-mocha"]
+	Colorful            = Registry["colorful"]
+	DoomOne             = Registry["doom-one"]
+	DoomOne2            = Registry["doom-one2"]
+	Dracula             = Registry["dracula"]
+	Emacs               = Registry["emacs"]
+	Friendly            = Registry["friendly"]
+	Fruity              = Registry["fruity"]
+	GitHubDark          = Registry["github-dark"]
+	GitHub              = Registry["github"]
+	GruvboxLight        = Registry["gruvbox-light"]
+	Gruvbox             = Registry["gruvbox"]
+	HrDark              = Registry["hrdark"]
+	HrHighContrast      = Registry["hr_high_contrast"]
+	Igor                = Registry["igor"]
+	Lovelace            = Registry["lovelace"]
+	Manni               = Registry["manni"]
+	ModusOperandi       = Registry["modus-operandi"]
+	ModusVivendi        = Registry["modus-vivendi"]
+	Monokai             = Registry["monokai"]
+	MonokaiLight        = Registry["monokailight"]
+	Murphy              = Registry["murphy"]
+	Native              = Registry["native"]
+	Nord                = Registry["nord"]
+	OnesEnterprise      = Registry["onesenterprise"]
+	ParaisoDark         = Registry["paraiso-dark"]
+	ParaisoLight        = Registry["paraiso-light"]
+	Pastie              = Registry["pastie"]
+	Perldoc             = Registry["perldoc"]
+	Pygments            = Registry["pygments"]
+	RainbowDash         = Registry["rainbow_dash"]
+	RosePineDawn        = Registry["rose-pine-dawn"]
+	RosePineMoon        = Registry["rose-pine-moon"]
+	RosePine            = Registry["rose-pine"]
+	Rrt                 = Registry["rrt"]
+	SolarizedDark       = Registry["solarized-dark"]
+	SolarizedDark256    = Registry["solarized-dark256"]
+	SolarizedLight      = Registry["solarized-light"]
+	SwapOff             = Registry["swapoff"]
+	Tango               = Registry["tango"]
+	Trac                = Registry["trac"]
+	Vim                 = Registry["vim"]
+	VisualStudio        = Registry["vs"]
+	Vulcan              = Registry["vulcan"]
+	WitchHazel          = Registry["witchhazel"]
+	XcodeDark           = Registry["xcode-dark"]
+	Xcode               = Registry["xcode"]
+)
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml
new file mode 100644
index 000000000000..1f5127ef90d9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml
@@ -0,0 +1,51 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml
new file mode 100644
index 000000000000..f47debaf0919
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml
@@ -0,0 +1,64 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml b/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml
new file mode 100644
index 000000000000..9df7da11c23c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml
@@ -0,0 +1,74 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml b/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml
new file mode 100644
index 000000000000..981ce8e40fb3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml
@@ -0,0 +1,44 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml b/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml
new file mode 100644
index 000000000000..f49801040eb5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml
@@ -0,0 +1,44 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml b/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml
new file mode 100644
index 000000000000..bcc06aa7bcfc
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml
@@ -0,0 +1,19 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml
new file mode 100644
index 000000000000..711aeafc4ab5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml
@@ -0,0 +1,45 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/github.xml b/vendor/github.com/alecthomas/chroma/v2/styles/github.xml
new file mode 100644
index 000000000000..e7caee7b668e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/github.xml
@@ -0,0 +1,44 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml
new file mode 100644
index 000000000000..8c4f0642c648
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml
@@ -0,0 +1,33 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml
new file mode 100644
index 000000000000..2f6a0a2a050c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml
@@ -0,0 +1,33 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml b/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml
new file mode 100644
index 000000000000..61cde204a4b3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml
@@ -0,0 +1,12 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml
new file mode 100644
index 000000000000..bc7a6f315ec6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml
@@ -0,0 +1,10 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml b/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml
new file mode 100644
index 000000000000..773c83b6034c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml
@@ -0,0 +1,9 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml b/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml
new file mode 100644
index 000000000000..e336c930a495
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml
@@ -0,0 +1,53 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml b/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml
new file mode 100644
index 000000000000..99324bd3b12c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml
@@ -0,0 +1,44 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml b/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml
new file mode 100644
index 000000000000..023137aae550
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml
@@ -0,0 +1,13 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml b/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml
new file mode 100644
index 000000000000..8da663dcc0c7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml
@@ -0,0 +1,13 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml b/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml
new file mode 100644
index 000000000000..1a789ddec58f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml
@@ -0,0 +1,29 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml b/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml
new file mode 100644
index 000000000000..85cd23e00868
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml
@@ -0,0 +1,26 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml b/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml
new file mode 100644
index 000000000000..112d6205c5b7
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml
@@ -0,0 +1,52 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/native.xml b/vendor/github.com/alecthomas/chroma/v2/styles/native.xml
new file mode 100644
index 000000000000..43eea7fd5372
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/native.xml
@@ -0,0 +1,35 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml b/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml
new file mode 100644
index 000000000000..1c1d1ffb260e
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml
@@ -0,0 +1,46 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml
new file mode 100644
index 000000000000..6921eb5eebd5
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml
@@ -0,0 +1,25 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml b/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml
new file mode 100644
index 000000000000..ce86db3fbc3a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml
@@ -0,0 +1,10 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml
new file mode 100644
index 000000000000..788db3f7cb60
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml
@@ -0,0 +1,37 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml
new file mode 100644
index 000000000000..06a63bae176b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml
@@ -0,0 +1,37 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml b/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml
new file mode 100644
index 000000000000..a3b0abde5ce6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml
@@ -0,0 +1,45 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml b/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml
new file mode 100644
index 000000000000..9e5564c3fbd3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml
@@ -0,0 +1,37 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml b/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml
new file mode 100644
index 000000000000..a3d0d8bab537
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml
@@ -0,0 +1,42 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml
new file mode 100644
index 000000000000..5b0fe49d633a
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml
@@ -0,0 +1,40 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml
new file mode 100644
index 000000000000..788bd6f65c2b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml
@@ -0,0 +1,29 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml
new file mode 100644
index 000000000000..f67b804324e6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml
@@ -0,0 +1,29 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml
new file mode 100644
index 000000000000..3fb70a5ac79c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml
@@ -0,0 +1,29 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml
new file mode 100644
index 000000000000..5f1daaa2c70f
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml
@@ -0,0 +1,13 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml
new file mode 100644
index 000000000000..a3cf46fddbb2
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml
@@ -0,0 +1,39 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml
new file mode 100644
index 000000000000..977cfbe3f3f0
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml
@@ -0,0 +1,41 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml
new file mode 100644
index 000000000000..4fbc1d4a6754
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml
@@ -0,0 +1,17 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml b/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml
new file mode 100644
index 000000000000..8a398df8d9d6
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml
@@ -0,0 +1,18 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml
new file mode 100644
index 000000000000..5ca46bb75e15
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml
@@ -0,0 +1,72 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-day.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-day.xml
new file mode 100644
index 000000000000..c20d9a41e668
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-day.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-moon.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-moon.xml
new file mode 100644
index 000000000000..3312f029d1e9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-moon.xml
@@ -0,0 +1,83 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-night.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-night.xml
new file mode 100644
index 000000000000..c798bad4dc8c
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-night.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-storm.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-storm.xml
new file mode 100644
index 000000000000..c0811524791b
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/tokyonight-storm.xml
@@ -0,0 +1,83 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml b/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml
new file mode 100644
index 000000000000..9f1d266780c3
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml
@@ -0,0 +1,35 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml
new file mode 100644
index 000000000000..fec6934347ea
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml
@@ -0,0 +1,29 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml
new file mode 100644
index 000000000000..564350154979
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml
@@ -0,0 +1,16 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml
new file mode 100644
index 000000000000..4e690945e2e8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml
@@ -0,0 +1,74 @@
+
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml b/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml
new file mode 100644
index 000000000000..52f2299133ec
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml
@@ -0,0 +1,31 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml
new file mode 100644
index 000000000000..93439791fd84
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml
@@ -0,0 +1,31 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml b/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml
new file mode 100644
index 000000000000..523d746cf6d9
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml
@@ -0,0 +1,22 @@
+
\ No newline at end of file
diff --git a/vendor/github.com/alecthomas/chroma/table.py b/vendor/github.com/alecthomas/chroma/v2/table.py
similarity index 89%
rename from vendor/github.com/alecthomas/chroma/table.py
rename to vendor/github.com/alecthomas/chroma/v2/table.py
index 77945398215e..ea4b7556a7b0 100644
--- a/vendor/github.com/alecthomas/chroma/table.py
+++ b/vendor/github.com/alecthomas/chroma/v2/table.py
@@ -5,8 +5,7 @@
 
 README_FILE = "README.md"
 
-
-lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode("utf-8").splitlines()
+lines = check_output(["chroma", "--list"]).decode("utf-8").splitlines()
 lines = [line.strip() for line in lines if line.startswith("  ") and not line.startswith("   ")]
 lines = sorted(lines, key=lambda l: l.lower())
 
diff --git a/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go b/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go
new file mode 100644
index 000000000000..696e9ce5e9c8
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go
@@ -0,0 +1,573 @@
+// Code generated by "enumer -text -type TokenType"; DO NOT EDIT.
+
+package chroma
+
+import (
+	"fmt"
+	"strings"
+)
+
+const _TokenTypeName = "NoneOtherErrorCodeLineLineLinkLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
+const _TokenTypeLowerName = "noneothererrorcodelinelinelinklinetabletdlinetablelinehighlightlinenumberstablelinenumberslineprewrapperbackgroundeoftypekeywordkeywordconstantkeyworddeclarationkeywordnamespacekeywordpseudokeywordreservedkeywordtypenamenameattributenamebuiltinnamebuiltinpseudonameclassnameconstantnamedecoratornameentitynameexceptionnamefunctionnamefunctionmagicnamekeywordnamelabelnamenamespacenameoperatornameothernamepseudonamepropertynametagnamevariablenamevariableanonymousnamevariableclassnamevariableglobalnamevariableinstancenamevariablemagicliteralliteraldateliteralotherliteralstringliteralstringaffixliteralstringatomliteralstringbacktickliteralstringbooleanliteralstringcharliteralstringdelimiterliteralstringdocliteralstringdoubleliteralstringescapeliteralstringheredocliteralstringinterpolliteralstringnameliteralstringotherliteralstringregexliteralstringsingleliteralstringsymbolliteralnumberliteralnumberbinliteralnumberfloatliteralnumberhexliteralnumberintegerliteralnumberintegerlongliteralnumberoctoperatoroperatorwordpunctuationcommentcommenthashbangcommentmultilinecommentsinglecommentspecialcommentpreproccommentpreprocfilegenericgenericdeletedgenericemphgenericerrorgenericheadinggenericinsertedgenericoutputgenericpromptgenericstronggenericsubheadinggenerictracebackgenericunderlinetexttextwhitespacetextsymboltextpunctuation"
+
+var _TokenTypeMap = map[TokenType]string{
+	-13:  _TokenTypeName[0:4],
+	-12:  _TokenTypeName[4:9],
+	-11:  _TokenTypeName[9:14],
+	-10:  _TokenTypeName[14:22],
+	-9:   _TokenTypeName[22:30],
+	-8:   _TokenTypeName[30:41],
+	-7:   _TokenTypeName[41:50],
+	-6:   _TokenTypeName[50:63],
+	-5:   _TokenTypeName[63:79],
+	-4:   _TokenTypeName[79:90],
+	-3:   _TokenTypeName[90:94],
+	-2:   _TokenTypeName[94:104],
+	-1:   _TokenTypeName[104:114],
+	0:    _TokenTypeName[114:121],
+	1000: _TokenTypeName[121:128],
+	1001: _TokenTypeName[128:143],
+	1002: _TokenTypeName[143:161],
+	1003: _TokenTypeName[161:177],
+	1004: _TokenTypeName[177:190],
+	1005: _TokenTypeName[190:205],
+	1006: _TokenTypeName[205:216],
+	2000: _TokenTypeName[216:220],
+	2001: _TokenTypeName[220:233],
+	2002: _TokenTypeName[233:244],
+	2003: _TokenTypeName[244:261],
+	2004: _TokenTypeName[261:270],
+	2005: _TokenTypeName[270:282],
+	2006: _TokenTypeName[282:295],
+	2007: _TokenTypeName[295:305],
+	2008: _TokenTypeName[305:318],
+	2009: _TokenTypeName[318:330],
+	2010: _TokenTypeName[330:347],
+	2011: _TokenTypeName[347:358],
+	2012: _TokenTypeName[358:367],
+	2013: _TokenTypeName[367:380],
+	2014: _TokenTypeName[380:392],
+	2015: _TokenTypeName[392:401],
+	2016: _TokenTypeName[401:411],
+	2017: _TokenTypeName[411:423],
+	2018: _TokenTypeName[423:430],
+	2019: _TokenTypeName[430:442],
+	2020: _TokenTypeName[442:463],
+	2021: _TokenTypeName[463:480],
+	2022: _TokenTypeName[480:498],
+	2023: _TokenTypeName[498:518],
+	2024: _TokenTypeName[518:535],
+	3000: _TokenTypeName[535:542],
+	3001: _TokenTypeName[542:553],
+	3002: _TokenTypeName[553:565],
+	3100: _TokenTypeName[565:578],
+	3101: _TokenTypeName[578:596],
+	3102: _TokenTypeName[596:613],
+	3103: _TokenTypeName[613:634],
+	3104: _TokenTypeName[634:654],
+	3105: _TokenTypeName[654:671],
+	3106: _TokenTypeName[671:693],
+	3107: _TokenTypeName[693:709],
+	3108: _TokenTypeName[709:728],
+	3109: _TokenTypeName[728:747],
+	3110: _TokenTypeName[747:767],
+	3111: _TokenTypeName[767:788],
+	3112: _TokenTypeName[788:805],
+	3113: _TokenTypeName[805:823],
+	3114: _TokenTypeName[823:841],
+	3115: _TokenTypeName[841:860],
+	3116: _TokenTypeName[860:879],
+	3200: _TokenTypeName[879:892],
+	3201: _TokenTypeName[892:908],
+	3202: _TokenTypeName[908:926],
+	3203: _TokenTypeName[926:942],
+	3204: _TokenTypeName[942:962],
+	3205: _TokenTypeName[962:986],
+	3206: _TokenTypeName[986:1002],
+	4000: _TokenTypeName[1002:1010],
+	4001: _TokenTypeName[1010:1022],
+	5000: _TokenTypeName[1022:1033],
+	6000: _TokenTypeName[1033:1040],
+	6001: _TokenTypeName[1040:1055],
+	6002: _TokenTypeName[1055:1071],
+	6003: _TokenTypeName[1071:1084],
+	6004: _TokenTypeName[1084:1098],
+	6100: _TokenTypeName[1098:1112],
+	6101: _TokenTypeName[1112:1130],
+	7000: _TokenTypeName[1130:1137],
+	7001: _TokenTypeName[1137:1151],
+	7002: _TokenTypeName[1151:1162],
+	7003: _TokenTypeName[1162:1174],
+	7004: _TokenTypeName[1174:1188],
+	7005: _TokenTypeName[1188:1203],
+	7006: _TokenTypeName[1203:1216],
+	7007: _TokenTypeName[1216:1229],
+	7008: _TokenTypeName[1229:1242],
+	7009: _TokenTypeName[1242:1259],
+	7010: _TokenTypeName[1259:1275],
+	7011: _TokenTypeName[1275:1291],
+	8000: _TokenTypeName[1291:1295],
+	8001: _TokenTypeName[1295:1309],
+	8002: _TokenTypeName[1309:1319],
+	8003: _TokenTypeName[1319:1334],
+}
+
+func (i TokenType) String() string {
+	if str, ok := _TokenTypeMap[i]; ok {
+		return str
+	}
+	return fmt.Sprintf("TokenType(%d)", i)
+}
+
+// An "invalid array index" compiler error signifies that the constant values have changed.
+// Re-run the stringer command to generate them again.
+func _TokenTypeNoOp() {
+	var x [1]struct{}
+	_ = x[None-(-13)]
+	_ = x[Other-(-12)]
+	_ = x[Error-(-11)]
+	_ = x[CodeLine-(-10)]
+	_ = x[LineLink-(-9)]
+	_ = x[LineTableTD-(-8)]
+	_ = x[LineTable-(-7)]
+	_ = x[LineHighlight-(-6)]
+	_ = x[LineNumbersTable-(-5)]
+	_ = x[LineNumbers-(-4)]
+	_ = x[Line-(-3)]
+	_ = x[PreWrapper-(-2)]
+	_ = x[Background-(-1)]
+	_ = x[EOFType-(0)]
+	_ = x[Keyword-(1000)]
+	_ = x[KeywordConstant-(1001)]
+	_ = x[KeywordDeclaration-(1002)]
+	_ = x[KeywordNamespace-(1003)]
+	_ = x[KeywordPseudo-(1004)]
+	_ = x[KeywordReserved-(1005)]
+	_ = x[KeywordType-(1006)]
+	_ = x[Name-(2000)]
+	_ = x[NameAttribute-(2001)]
+	_ = x[NameBuiltin-(2002)]
+	_ = x[NameBuiltinPseudo-(2003)]
+	_ = x[NameClass-(2004)]
+	_ = x[NameConstant-(2005)]
+	_ = x[NameDecorator-(2006)]
+	_ = x[NameEntity-(2007)]
+	_ = x[NameException-(2008)]
+	_ = x[NameFunction-(2009)]
+	_ = x[NameFunctionMagic-(2010)]
+	_ = x[NameKeyword-(2011)]
+	_ = x[NameLabel-(2012)]
+	_ = x[NameNamespace-(2013)]
+	_ = x[NameOperator-(2014)]
+	_ = x[NameOther-(2015)]
+	_ = x[NamePseudo-(2016)]
+	_ = x[NameProperty-(2017)]
+	_ = x[NameTag-(2018)]
+	_ = x[NameVariable-(2019)]
+	_ = x[NameVariableAnonymous-(2020)]
+	_ = x[NameVariableClass-(2021)]
+	_ = x[NameVariableGlobal-(2022)]
+	_ = x[NameVariableInstance-(2023)]
+	_ = x[NameVariableMagic-(2024)]
+	_ = x[Literal-(3000)]
+	_ = x[LiteralDate-(3001)]
+	_ = x[LiteralOther-(3002)]
+	_ = x[LiteralString-(3100)]
+	_ = x[LiteralStringAffix-(3101)]
+	_ = x[LiteralStringAtom-(3102)]
+	_ = x[LiteralStringBacktick-(3103)]
+	_ = x[LiteralStringBoolean-(3104)]
+	_ = x[LiteralStringChar-(3105)]
+	_ = x[LiteralStringDelimiter-(3106)]
+	_ = x[LiteralStringDoc-(3107)]
+	_ = x[LiteralStringDouble-(3108)]
+	_ = x[LiteralStringEscape-(3109)]
+	_ = x[LiteralStringHeredoc-(3110)]
+	_ = x[LiteralStringInterpol-(3111)]
+	_ = x[LiteralStringName-(3112)]
+	_ = x[LiteralStringOther-(3113)]
+	_ = x[LiteralStringRegex-(3114)]
+	_ = x[LiteralStringSingle-(3115)]
+	_ = x[LiteralStringSymbol-(3116)]
+	_ = x[LiteralNumber-(3200)]
+	_ = x[LiteralNumberBin-(3201)]
+	_ = x[LiteralNumberFloat-(3202)]
+	_ = x[LiteralNumberHex-(3203)]
+	_ = x[LiteralNumberInteger-(3204)]
+	_ = x[LiteralNumberIntegerLong-(3205)]
+	_ = x[LiteralNumberOct-(3206)]
+	_ = x[Operator-(4000)]
+	_ = x[OperatorWord-(4001)]
+	_ = x[Punctuation-(5000)]
+	_ = x[Comment-(6000)]
+	_ = x[CommentHashbang-(6001)]
+	_ = x[CommentMultiline-(6002)]
+	_ = x[CommentSingle-(6003)]
+	_ = x[CommentSpecial-(6004)]
+	_ = x[CommentPreproc-(6100)]
+	_ = x[CommentPreprocFile-(6101)]
+	_ = x[Generic-(7000)]
+	_ = x[GenericDeleted-(7001)]
+	_ = x[GenericEmph-(7002)]
+	_ = x[GenericError-(7003)]
+	_ = x[GenericHeading-(7004)]
+	_ = x[GenericInserted-(7005)]
+	_ = x[GenericOutput-(7006)]
+	_ = x[GenericPrompt-(7007)]
+	_ = x[GenericStrong-(7008)]
+	_ = x[GenericSubheading-(7009)]
+	_ = x[GenericTraceback-(7010)]
+	_ = x[GenericUnderline-(7011)]
+	_ = x[Text-(8000)]
+	_ = x[TextWhitespace-(8001)]
+	_ = x[TextSymbol-(8002)]
+	_ = x[TextPunctuation-(8003)]
+}
+
+var _TokenTypeValues = []TokenType{None, Other, Error, CodeLine, LineLink, LineTableTD, LineTable, LineHighlight, LineNumbersTable, LineNumbers, Line, PreWrapper, Background, EOFType, Keyword, KeywordConstant, KeywordDeclaration, KeywordNamespace, KeywordPseudo, KeywordReserved, KeywordType, Name, NameAttribute, NameBuiltin, NameBuiltinPseudo, NameClass, NameConstant, NameDecorator, NameEntity, NameException, NameFunction, NameFunctionMagic, NameKeyword, NameLabel, NameNamespace, NameOperator, NameOther, NamePseudo, NameProperty, NameTag, NameVariable, NameVariableAnonymous, NameVariableClass, NameVariableGlobal, NameVariableInstance, NameVariableMagic, Literal, LiteralDate, LiteralOther, LiteralString, LiteralStringAffix, LiteralStringAtom, LiteralStringBacktick, LiteralStringBoolean, LiteralStringChar, LiteralStringDelimiter, LiteralStringDoc, LiteralStringDouble, LiteralStringEscape, LiteralStringHeredoc, LiteralStringInterpol, LiteralStringName, LiteralStringOther, LiteralStringRegex, LiteralStringSingle, LiteralStringSymbol, LiteralNumber, LiteralNumberBin, LiteralNumberFloat, LiteralNumberHex, LiteralNumberInteger, LiteralNumberIntegerLong, LiteralNumberOct, Operator, OperatorWord, Punctuation, Comment, CommentHashbang, CommentMultiline, CommentSingle, CommentSpecial, CommentPreproc, CommentPreprocFile, Generic, GenericDeleted, GenericEmph, GenericError, GenericHeading, GenericInserted, GenericOutput, GenericPrompt, GenericStrong, GenericSubheading, GenericTraceback, GenericUnderline, Text, TextWhitespace, TextSymbol, TextPunctuation}
+
+var _TokenTypeNameToValueMap = map[string]TokenType{
+	_TokenTypeName[0:4]:            None,
+	_TokenTypeLowerName[0:4]:       None,
+	_TokenTypeName[4:9]:            Other,
+	_TokenTypeLowerName[4:9]:       Other,
+	_TokenTypeName[9:14]:           Error,
+	_TokenTypeLowerName[9:14]:      Error,
+	_TokenTypeName[14:22]:          CodeLine,
+	_TokenTypeLowerName[14:22]:     CodeLine,
+	_TokenTypeName[22:30]:          LineLink,
+	_TokenTypeLowerName[22:30]:     LineLink,
+	_TokenTypeName[30:41]:          LineTableTD,
+	_TokenTypeLowerName[30:41]:     LineTableTD,
+	_TokenTypeName[41:50]:          LineTable,
+	_TokenTypeLowerName[41:50]:     LineTable,
+	_TokenTypeName[50:63]:          LineHighlight,
+	_TokenTypeLowerName[50:63]:     LineHighlight,
+	_TokenTypeName[63:79]:          LineNumbersTable,
+	_TokenTypeLowerName[63:79]:     LineNumbersTable,
+	_TokenTypeName[79:90]:          LineNumbers,
+	_TokenTypeLowerName[79:90]:     LineNumbers,
+	_TokenTypeName[90:94]:          Line,
+	_TokenTypeLowerName[90:94]:     Line,
+	_TokenTypeName[94:104]:         PreWrapper,
+	_TokenTypeLowerName[94:104]:    PreWrapper,
+	_TokenTypeName[104:114]:        Background,
+	_TokenTypeLowerName[104:114]:   Background,
+	_TokenTypeName[114:121]:        EOFType,
+	_TokenTypeLowerName[114:121]:   EOFType,
+	_TokenTypeName[121:128]:        Keyword,
+	_TokenTypeLowerName[121:128]:   Keyword,
+	_TokenTypeName[128:143]:        KeywordConstant,
+	_TokenTypeLowerName[128:143]:   KeywordConstant,
+	_TokenTypeName[143:161]:        KeywordDeclaration,
+	_TokenTypeLowerName[143:161]:   KeywordDeclaration,
+	_TokenTypeName[161:177]:        KeywordNamespace,
+	_TokenTypeLowerName[161:177]:   KeywordNamespace,
+	_TokenTypeName[177:190]:        KeywordPseudo,
+	_TokenTypeLowerName[177:190]:   KeywordPseudo,
+	_TokenTypeName[190:205]:        KeywordReserved,
+	_TokenTypeLowerName[190:205]:   KeywordReserved,
+	_TokenTypeName[205:216]:        KeywordType,
+	_TokenTypeLowerName[205:216]:   KeywordType,
+	_TokenTypeName[216:220]:        Name,
+	_TokenTypeLowerName[216:220]:   Name,
+	_TokenTypeName[220:233]:        NameAttribute,
+	_TokenTypeLowerName[220:233]:   NameAttribute,
+	_TokenTypeName[233:244]:        NameBuiltin,
+	_TokenTypeLowerName[233:244]:   NameBuiltin,
+	_TokenTypeName[244:261]:        NameBuiltinPseudo,
+	_TokenTypeLowerName[244:261]:   NameBuiltinPseudo,
+	_TokenTypeName[261:270]:        NameClass,
+	_TokenTypeLowerName[261:270]:   NameClass,
+	_TokenTypeName[270:282]:        NameConstant,
+	_TokenTypeLowerName[270:282]:   NameConstant,
+	_TokenTypeName[282:295]:        NameDecorator,
+	_TokenTypeLowerName[282:295]:   NameDecorator,
+	_TokenTypeName[295:305]:        NameEntity,
+	_TokenTypeLowerName[295:305]:   NameEntity,
+	_TokenTypeName[305:318]:        NameException,
+	_TokenTypeLowerName[305:318]:   NameException,
+	_TokenTypeName[318:330]:        NameFunction,
+	_TokenTypeLowerName[318:330]:   NameFunction,
+	_TokenTypeName[330:347]:        NameFunctionMagic,
+	_TokenTypeLowerName[330:347]:   NameFunctionMagic,
+	_TokenTypeName[347:358]:        NameKeyword,
+	_TokenTypeLowerName[347:358]:   NameKeyword,
+	_TokenTypeName[358:367]:        NameLabel,
+	_TokenTypeLowerName[358:367]:   NameLabel,
+	_TokenTypeName[367:380]:        NameNamespace,
+	_TokenTypeLowerName[367:380]:   NameNamespace,
+	_TokenTypeName[380:392]:        NameOperator,
+	_TokenTypeLowerName[380:392]:   NameOperator,
+	_TokenTypeName[392:401]:        NameOther,
+	_TokenTypeLowerName[392:401]:   NameOther,
+	_TokenTypeName[401:411]:        NamePseudo,
+	_TokenTypeLowerName[401:411]:   NamePseudo,
+	_TokenTypeName[411:423]:        NameProperty,
+	_TokenTypeLowerName[411:423]:   NameProperty,
+	_TokenTypeName[423:430]:        NameTag,
+	_TokenTypeLowerName[423:430]:   NameTag,
+	_TokenTypeName[430:442]:        NameVariable,
+	_TokenTypeLowerName[430:442]:   NameVariable,
+	_TokenTypeName[442:463]:        NameVariableAnonymous,
+	_TokenTypeLowerName[442:463]:   NameVariableAnonymous,
+	_TokenTypeName[463:480]:        NameVariableClass,
+	_TokenTypeLowerName[463:480]:   NameVariableClass,
+	_TokenTypeName[480:498]:        NameVariableGlobal,
+	_TokenTypeLowerName[480:498]:   NameVariableGlobal,
+	_TokenTypeName[498:518]:        NameVariableInstance,
+	_TokenTypeLowerName[498:518]:   NameVariableInstance,
+	_TokenTypeName[518:535]:        NameVariableMagic,
+	_TokenTypeLowerName[518:535]:   NameVariableMagic,
+	_TokenTypeName[535:542]:        Literal,
+	_TokenTypeLowerName[535:542]:   Literal,
+	_TokenTypeName[542:553]:        LiteralDate,
+	_TokenTypeLowerName[542:553]:   LiteralDate,
+	_TokenTypeName[553:565]:        LiteralOther,
+	_TokenTypeLowerName[553:565]:   LiteralOther,
+	_TokenTypeName[565:578]:        LiteralString,
+	_TokenTypeLowerName[565:578]:   LiteralString,
+	_TokenTypeName[578:596]:        LiteralStringAffix,
+	_TokenTypeLowerName[578:596]:   LiteralStringAffix,
+	_TokenTypeName[596:613]:        LiteralStringAtom,
+	_TokenTypeLowerName[596:613]:   LiteralStringAtom,
+	_TokenTypeName[613:634]:        LiteralStringBacktick,
+	_TokenTypeLowerName[613:634]:   LiteralStringBacktick,
+	_TokenTypeName[634:654]:        LiteralStringBoolean,
+	_TokenTypeLowerName[634:654]:   LiteralStringBoolean,
+	_TokenTypeName[654:671]:        LiteralStringChar,
+	_TokenTypeLowerName[654:671]:   LiteralStringChar,
+	_TokenTypeName[671:693]:        LiteralStringDelimiter,
+	_TokenTypeLowerName[671:693]:   LiteralStringDelimiter,
+	_TokenTypeName[693:709]:        LiteralStringDoc,
+	_TokenTypeLowerName[693:709]:   LiteralStringDoc,
+	_TokenTypeName[709:728]:        LiteralStringDouble,
+	_TokenTypeLowerName[709:728]:   LiteralStringDouble,
+	_TokenTypeName[728:747]:        LiteralStringEscape,
+	_TokenTypeLowerName[728:747]:   LiteralStringEscape,
+	_TokenTypeName[747:767]:        LiteralStringHeredoc,
+	_TokenTypeLowerName[747:767]:   LiteralStringHeredoc,
+	_TokenTypeName[767:788]:        LiteralStringInterpol,
+	_TokenTypeLowerName[767:788]:   LiteralStringInterpol,
+	_TokenTypeName[788:805]:        LiteralStringName,
+	_TokenTypeLowerName[788:805]:   LiteralStringName,
+	_TokenTypeName[805:823]:        LiteralStringOther,
+	_TokenTypeLowerName[805:823]:   LiteralStringOther,
+	_TokenTypeName[823:841]:        LiteralStringRegex,
+	_TokenTypeLowerName[823:841]:   LiteralStringRegex,
+	_TokenTypeName[841:860]:        LiteralStringSingle,
+	_TokenTypeLowerName[841:860]:   LiteralStringSingle,
+	_TokenTypeName[860:879]:        LiteralStringSymbol,
+	_TokenTypeLowerName[860:879]:   LiteralStringSymbol,
+	_TokenTypeName[879:892]:        LiteralNumber,
+	_TokenTypeLowerName[879:892]:   LiteralNumber,
+	_TokenTypeName[892:908]:        LiteralNumberBin,
+	_TokenTypeLowerName[892:908]:   LiteralNumberBin,
+	_TokenTypeName[908:926]:        LiteralNumberFloat,
+	_TokenTypeLowerName[908:926]:   LiteralNumberFloat,
+	_TokenTypeName[926:942]:        LiteralNumberHex,
+	_TokenTypeLowerName[926:942]:   LiteralNumberHex,
+	_TokenTypeName[942:962]:        LiteralNumberInteger,
+	_TokenTypeLowerName[942:962]:   LiteralNumberInteger,
+	_TokenTypeName[962:986]:        LiteralNumberIntegerLong,
+	_TokenTypeLowerName[962:986]:   LiteralNumberIntegerLong,
+	_TokenTypeName[986:1002]:       LiteralNumberOct,
+	_TokenTypeLowerName[986:1002]:  LiteralNumberOct,
+	_TokenTypeName[1002:1010]:      Operator,
+	_TokenTypeLowerName[1002:1010]: Operator,
+	_TokenTypeName[1010:1022]:      OperatorWord,
+	_TokenTypeLowerName[1010:1022]: OperatorWord,
+	_TokenTypeName[1022:1033]:      Punctuation,
+	_TokenTypeLowerName[1022:1033]: Punctuation,
+	_TokenTypeName[1033:1040]:      Comment,
+	_TokenTypeLowerName[1033:1040]: Comment,
+	_TokenTypeName[1040:1055]:      CommentHashbang,
+	_TokenTypeLowerName[1040:1055]: CommentHashbang,
+	_TokenTypeName[1055:1071]:      CommentMultiline,
+	_TokenTypeLowerName[1055:1071]: CommentMultiline,
+	_TokenTypeName[1071:1084]:      CommentSingle,
+	_TokenTypeLowerName[1071:1084]: CommentSingle,
+	_TokenTypeName[1084:1098]:      CommentSpecial,
+	_TokenTypeLowerName[1084:1098]: CommentSpecial,
+	_TokenTypeName[1098:1112]:      CommentPreproc,
+	_TokenTypeLowerName[1098:1112]: CommentPreproc,
+	_TokenTypeName[1112:1130]:      CommentPreprocFile,
+	_TokenTypeLowerName[1112:1130]: CommentPreprocFile,
+	_TokenTypeName[1130:1137]:      Generic,
+	_TokenTypeLowerName[1130:1137]: Generic,
+	_TokenTypeName[1137:1151]:      GenericDeleted,
+	_TokenTypeLowerName[1137:1151]: GenericDeleted,
+	_TokenTypeName[1151:1162]:      GenericEmph,
+	_TokenTypeLowerName[1151:1162]: GenericEmph,
+	_TokenTypeName[1162:1174]:      GenericError,
+	_TokenTypeLowerName[1162:1174]: GenericError,
+	_TokenTypeName[1174:1188]:      GenericHeading,
+	_TokenTypeLowerName[1174:1188]: GenericHeading,
+	_TokenTypeName[1188:1203]:      GenericInserted,
+	_TokenTypeLowerName[1188:1203]: GenericInserted,
+	_TokenTypeName[1203:1216]:      GenericOutput,
+	_TokenTypeLowerName[1203:1216]: GenericOutput,
+	_TokenTypeName[1216:1229]:      GenericPrompt,
+	_TokenTypeLowerName[1216:1229]: GenericPrompt,
+	_TokenTypeName[1229:1242]:      GenericStrong,
+	_TokenTypeLowerName[1229:1242]: GenericStrong,
+	_TokenTypeName[1242:1259]:      GenericSubheading,
+	_TokenTypeLowerName[1242:1259]: GenericSubheading,
+	_TokenTypeName[1259:1275]:      GenericTraceback,
+	_TokenTypeLowerName[1259:1275]: GenericTraceback,
+	_TokenTypeName[1275:1291]:      GenericUnderline,
+	_TokenTypeLowerName[1275:1291]: GenericUnderline,
+	_TokenTypeName[1291:1295]:      Text,
+	_TokenTypeLowerName[1291:1295]: Text,
+	_TokenTypeName[1295:1309]:      TextWhitespace,
+	_TokenTypeLowerName[1295:1309]: TextWhitespace,
+	_TokenTypeName[1309:1319]:      TextSymbol,
+	_TokenTypeLowerName[1309:1319]: TextSymbol,
+	_TokenTypeName[1319:1334]:      TextPunctuation,
+	_TokenTypeLowerName[1319:1334]: TextPunctuation,
+}
+
+var _TokenTypeNames = []string{
+	_TokenTypeName[0:4],
+	_TokenTypeName[4:9],
+	_TokenTypeName[9:14],
+	_TokenTypeName[14:22],
+	_TokenTypeName[22:30],
+	_TokenTypeName[30:41],
+	_TokenTypeName[41:50],
+	_TokenTypeName[50:63],
+	_TokenTypeName[63:79],
+	_TokenTypeName[79:90],
+	_TokenTypeName[90:94],
+	_TokenTypeName[94:104],
+	_TokenTypeName[104:114],
+	_TokenTypeName[114:121],
+	_TokenTypeName[121:128],
+	_TokenTypeName[128:143],
+	_TokenTypeName[143:161],
+	_TokenTypeName[161:177],
+	_TokenTypeName[177:190],
+	_TokenTypeName[190:205],
+	_TokenTypeName[205:216],
+	_TokenTypeName[216:220],
+	_TokenTypeName[220:233],
+	_TokenTypeName[233:244],
+	_TokenTypeName[244:261],
+	_TokenTypeName[261:270],
+	_TokenTypeName[270:282],
+	_TokenTypeName[282:295],
+	_TokenTypeName[295:305],
+	_TokenTypeName[305:318],
+	_TokenTypeName[318:330],
+	_TokenTypeName[330:347],
+	_TokenTypeName[347:358],
+	_TokenTypeName[358:367],
+	_TokenTypeName[367:380],
+	_TokenTypeName[380:392],
+	_TokenTypeName[392:401],
+	_TokenTypeName[401:411],
+	_TokenTypeName[411:423],
+	_TokenTypeName[423:430],
+	_TokenTypeName[430:442],
+	_TokenTypeName[442:463],
+	_TokenTypeName[463:480],
+	_TokenTypeName[480:498],
+	_TokenTypeName[498:518],
+	_TokenTypeName[518:535],
+	_TokenTypeName[535:542],
+	_TokenTypeName[542:553],
+	_TokenTypeName[553:565],
+	_TokenTypeName[565:578],
+	_TokenTypeName[578:596],
+	_TokenTypeName[596:613],
+	_TokenTypeName[613:634],
+	_TokenTypeName[634:654],
+	_TokenTypeName[654:671],
+	_TokenTypeName[671:693],
+	_TokenTypeName[693:709],
+	_TokenTypeName[709:728],
+	_TokenTypeName[728:747],
+	_TokenTypeName[747:767],
+	_TokenTypeName[767:788],
+	_TokenTypeName[788:805],
+	_TokenTypeName[805:823],
+	_TokenTypeName[823:841],
+	_TokenTypeName[841:860],
+	_TokenTypeName[860:879],
+	_TokenTypeName[879:892],
+	_TokenTypeName[892:908],
+	_TokenTypeName[908:926],
+	_TokenTypeName[926:942],
+	_TokenTypeName[942:962],
+	_TokenTypeName[962:986],
+	_TokenTypeName[986:1002],
+	_TokenTypeName[1002:1010],
+	_TokenTypeName[1010:1022],
+	_TokenTypeName[1022:1033],
+	_TokenTypeName[1033:1040],
+	_TokenTypeName[1040:1055],
+	_TokenTypeName[1055:1071],
+	_TokenTypeName[1071:1084],
+	_TokenTypeName[1084:1098],
+	_TokenTypeName[1098:1112],
+	_TokenTypeName[1112:1130],
+	_TokenTypeName[1130:1137],
+	_TokenTypeName[1137:1151],
+	_TokenTypeName[1151:1162],
+	_TokenTypeName[1162:1174],
+	_TokenTypeName[1174:1188],
+	_TokenTypeName[1188:1203],
+	_TokenTypeName[1203:1216],
+	_TokenTypeName[1216:1229],
+	_TokenTypeName[1229:1242],
+	_TokenTypeName[1242:1259],
+	_TokenTypeName[1259:1275],
+	_TokenTypeName[1275:1291],
+	_TokenTypeName[1291:1295],
+	_TokenTypeName[1295:1309],
+	_TokenTypeName[1309:1319],
+	_TokenTypeName[1319:1334],
+}
+
+// TokenTypeString retrieves an enum value from the enum constants string name.
+// Throws an error if the param is not part of the enum.
+func TokenTypeString(s string) (TokenType, error) {
+	if val, ok := _TokenTypeNameToValueMap[s]; ok {
+		return val, nil
+	}
+
+	if val, ok := _TokenTypeNameToValueMap[strings.ToLower(s)]; ok {
+		return val, nil
+	}
+	return 0, fmt.Errorf("%s does not belong to TokenType values", s)
+}
+
+// TokenTypeValues returns all values of the enum
+func TokenTypeValues() []TokenType {
+	return _TokenTypeValues
+}
+
+// TokenTypeStrings returns a slice of all String values of the enum
+func TokenTypeStrings() []string {
+	strs := make([]string, len(_TokenTypeNames))
+	copy(strs, _TokenTypeNames)
+	return strs
+}
+
+// IsATokenType returns "true" if the value is listed in the enum definition. "false" otherwise
+func (i TokenType) IsATokenType() bool {
+	_, ok := _TokenTypeMap[i]
+	return ok
+}
+
+// MarshalText implements the encoding.TextMarshaler interface for TokenType
+func (i TokenType) MarshalText() ([]byte, error) {
+	return []byte(i.String()), nil
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface for TokenType
+func (i *TokenType) UnmarshalText(text []byte) error {
+	var err error
+	*i, err = TokenTypeString(string(text))
+	return err
+}
diff --git a/vendor/github.com/alecthomas/chroma/types.go b/vendor/github.com/alecthomas/chroma/v2/types.go
similarity index 94%
rename from vendor/github.com/alecthomas/chroma/types.go
rename to vendor/github.com/alecthomas/chroma/v2/types.go
index 3f15be84b164..3d12310a0eee 100644
--- a/vendor/github.com/alecthomas/chroma/types.go
+++ b/vendor/github.com/alecthomas/chroma/v2/types.go
@@ -1,33 +1,12 @@
 package chroma
 
-import (
-	"encoding/json"
-	"fmt"
-)
-
-//go:generate stringer -type TokenType
+//go:generate enumer -text -type TokenType
 
 // TokenType is the type of token to highlight.
 //
 // It is also an Emitter, emitting a single token of itself
 type TokenType int
 
-func (t TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) }
-func (t *TokenType) UnmarshalJSON(data []byte) error {
-	key := ""
-	err := json.Unmarshal(data, &key)
-	if err != nil {
-		return err
-	}
-	for tt, text := range _TokenType_map {
-		if text == key {
-			*t = tt
-			return nil
-		}
-	}
-	return fmt.Errorf("unknown TokenType %q", data)
-}
-
 // Set of TokenTypes.
 //
 // Categories of types are grouped in ranges of 1000, while sub-categories are in ranges of 100. For
@@ -52,6 +31,8 @@ const (
 	LineTable
 	// Line numbers table TD wrapper style.
 	LineTableTD
+	// Line number links.
+	LineLink
 	// Code line wrapper style.
 	CodeLine
 	// Input that could not be tokenised.
@@ -233,6 +214,7 @@ var (
 		LineHighlight:    "hl",
 		LineTable:        "lntable",
 		LineTableTD:      "lntd",
+		LineLink:         "lnlinks",
 		CodeLine:         "cl",
 		Text:             "",
 		Whitespace:       "w",
@@ -354,3 +336,5 @@ func (t TokenType) InSubCategory(other TokenType) bool {
 func (t TokenType) Emit(groups []string, _ *LexerState) Iterator {
 	return Literator(Token{Type: t, Value: groups[0]})
 }
+
+func (t TokenType) EmitterKind() string { return "token" }
diff --git a/vendor/github.com/alecthomas/kingpin/v2/.travis.yml b/vendor/github.com/alecthomas/kingpin/v2/.travis.yml
new file mode 100644
index 000000000000..9c45bacc0a88
--- /dev/null
+++ b/vendor/github.com/alecthomas/kingpin/v2/.travis.yml
@@ -0,0 +1,14 @@
+sudo: false
+language: go
+install: go get -t -v ./...
+go:
+    - 1.2.x
+    - 1.3.x
+    - 1.4.x
+    - 1.5.x
+    - 1.6.x
+    - 1.7.x
+    - 1.8.x
+    - 1.9.x
+    - 1.10.x
+    - 1.11.x
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING b/vendor/github.com/alecthomas/kingpin/v2/COPYING
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/COPYING
rename to vendor/github.com/alecthomas/kingpin/v2/COPYING
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/README.md b/vendor/github.com/alecthomas/kingpin/v2/README.md
similarity index 89%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/README.md
rename to vendor/github.com/alecthomas/kingpin/v2/README.md
index 498704c8905f..5495ca191b38 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/README.md
+++ b/vendor/github.com/alecthomas/kingpin/v2/README.md
@@ -1,5 +1,15 @@
+# CONTRIBUTIONS ONLY
+
+**What does this mean?** I do not have time to fix issues myself. The only way fixes or new features will be added is by people submitting PRs. If you are interested in taking over maintenance and have a history of contributions to Kingpin, please let me know.
+
+**Current status.** Kingpin is largely feature stable. There hasn't been a need to add new features in a while, but there are some bugs that should be fixed.
+
+**Why?** I no longer use Kingpin personally (I now use [kong](https://github.com/alecthomas/kong)). Rather than leave the project in a limbo of people filing issues and wondering why they're not being worked on, I believe this notice will more clearly set expectations.
+
 # Kingpin - A Go (golang) command line and flag parser
-[![](https://godoc.org/github.com/alecthomas/kingpin?status.svg)](http://godoc.org/github.com/alecthomas/kingpin) [![Build Status](https://travis-ci.org/alecthomas/kingpin.svg?branch=master)](https://travis-ci.org/alecthomas/kingpin) [![Gitter chat](https://badges.gitter.im/alecthomas.png)](https://gitter.im/alecthomas/Lobby)
+[![](https://godoc.org/github.com/alecthomas/kingpin?status.svg)](http://godoc.org/github.com/alecthomas/kingpin) [![CI](https://github.com/alecthomas/kingpin/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/kingpin/actions/workflows/ci.yml)
+
+
 
 
 
@@ -41,7 +51,7 @@ positional arguments.
 
 Install it with:
 
-    $ go get gopkg.in/alecthomas/kingpin.v2
+    $ go get github.com/alecthomas/kingpin/v2
 
 It looks like this:
 
@@ -116,16 +126,14 @@ be separated by a space. That is no longer the case.
 
 ## Versions
 
-Kingpin uses [gopkg.in](https://gopkg.in/alecthomas/kingpin) for versioning.
+The current stable version is [github.com/alecthomas/kingpin/v2](https://github.com/alecthomas/kingpin/v2). The previous version, [gopkg.in/alecthomas/kingpin.v1](https://gopkg.in/alecthomas/kingpin.v1), is deprecated and in maintenance mode.
 
-The current stable version is [gopkg.in/alecthomas/kingpin.v2](https://gopkg.in/alecthomas/kingpin.v2). The previous version, [gopkg.in/alecthomas/kingpin.v1](https://gopkg.in/alecthomas/kingpin.v1), is deprecated and in maintenance mode.
-
-### [V2](https://gopkg.in/alecthomas/kingpin.v2) is the current stable version
+### [V2](https://github.com/alecthomas/kingpin/v2) is the current stable version
 
 Installation:
 
 ```sh
-$ go get gopkg.in/alecthomas/kingpin.v2
+$ go get github.com/alecthomas/kingpin/v2
 ```
 
 ### [V1](https://gopkg.in/alecthomas/kingpin.v1) is the OLD stable version
@@ -233,12 +241,12 @@ package main
 import (
   "fmt"
 
-  "gopkg.in/alecthomas/kingpin.v2"
+  "github.com/alecthomas/kingpin/v2"
 )
 
 var (
   debug   = kingpin.Flag("debug", "Enable debug mode.").Bool()
-  timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").Default("5s").OverrideDefaultFromEnvar("PING_TIMEOUT").Short('t').Duration()
+  timeout = kingpin.Flag("timeout", "Timeout waiting for ping.").Default("5s").Envar("PING_TIMEOUT").Short('t').Duration()
   ip      = kingpin.Arg("ip", "IP address to ping.").Required().IP()
   count   = kingpin.Arg("count", "Number of packets to send").Int()
 )
@@ -250,6 +258,17 @@ func main() {
 }
 ```
 
+#### Reading arguments from a file
+Kingpin supports reading arguments from a file.
+Create a file with the corresponding arguments:
+```
+echo -t=5\n > args
+```
+And now supply it:
+```
+$ ping @args
+```
+
 ### Complex Example
 
 Kingpin can also produce complex command-line applications with global flags,
@@ -300,7 +319,7 @@ package main
 import (
   "os"
   "strings"
-  "gopkg.in/alecthomas/kingpin.v2"
+  "github.com/alecthomas/kingpin/v2"
 )
 
 var (
@@ -377,8 +396,8 @@ var (
 
 func main() {
   switch kingpin.Parse() {
-  case "delete user":
-  case "delete post":
+  case deleteUserCommand.FullCommand():
+  case deletePostCommand.FullCommand():
   }
 }
 ```
@@ -514,35 +533,35 @@ ips := IPList(kingpin.Arg("ips", "IP addresses to ping."))
 
 ### Bash/ZSH Shell Completion
 
-By default, all flags and commands/subcommands generate completions 
+By default, all flags and commands/subcommands generate completions
 internally.
 
-Out of the box, CLI tools using kingpin should be able to take advantage 
-of completion hinting for flags and commands. By specifying 
-`--completion-bash` as the first argument, your CLI tool will show 
-possible subcommands. By ending your argv with `--`, hints for flags 
+Out of the box, CLI tools using kingpin should be able to take advantage
+of completion hinting for flags and commands. By specifying
+`--completion-bash` as the first argument, your CLI tool will show
+possible subcommands. By ending your argv with `--`, hints for flags
 will be shown.
 
-To allow your end users to take advantage you must package a 
-`/etc/bash_completion.d` script with your distribution (or the equivalent 
-for your target platform/shell). An alternative is to instruct your end 
+To allow your end users to take advantage you must package a
+`/etc/bash_completion.d` script with your distribution (or the equivalent
+for your target platform/shell). An alternative is to instruct your end
 user to source a script from their `bash_profile` (or equivalent).
 
 Fortunately Kingpin makes it easy to generate or source a script for use
-with end users shells. `./yourtool --completion-script-bash` and 
+with end users shells. `./yourtool --completion-script-bash` and
 `./yourtool --completion-script-zsh` will generate these scripts for you.
 
 **Installation by Package**
 
-For the best user experience, you should bundle your pre-created 
-completion script with your CLI tool and install it inside 
-`/etc/bash_completion.d` (or equivalent). A good suggestion is to add 
-this as an automated step to your build pipeline, in the implementation 
+For the best user experience, you should bundle your pre-created
+completion script with your CLI tool and install it inside
+`/etc/bash_completion.d` (or equivalent). A good suggestion is to add
+this as an automated step to your build pipeline, in the implementation
 is improved for bug fixed.
 
 **Installation by `bash_profile`**
 
-Alternatively, instruct your users to add an additional statement to 
+Alternatively, instruct your users to add an additional statement to
 their `bash_profile` (or equivalent):
 
 ```
@@ -558,13 +577,13 @@ eval "$(your-cli-tool --completion-script-zsh)"
 #### Additional API
 To provide more flexibility, a completion option API has been
 exposed for flags to allow user defined completion options, to extend
-completions further than just EnumVar/Enum. 
+completions further than just EnumVar/Enum.
 
 
 **Provide Static Options**
 
-When using an `Enum` or `EnumVar`, users are limited to only the options 
-given. Maybe we wish to hint possible options to the user, but also 
+When using an `Enum` or `EnumVar`, users are limited to only the options
+given. Maybe we wish to hint possible options to the user, but also
 allow them to provide their own custom option. `HintOptions` gives
 this functionality to flags.
 
@@ -577,7 +596,7 @@ app.Flag("port", "Provide a port to connect to").
 ```
 
 **Provide Dynamic Options**
-Consider the case that you needed to read a local database or a file to 
+Consider the case that you needed to read a local database or a file to
 provide suggestions. You can dynamically generate the options
 
 ```
@@ -596,13 +615,13 @@ app.Flag("flag-1", "").HintAction(listHosts).String()
 
 **EnumVar/Enum**
 When using `Enum` or `EnumVar`, any provided options will be automatically
-used for bash autocompletion. However, if you wish to provide a subset or 
+used for bash autocompletion. However, if you wish to provide a subset or
 different options, you can use `HintOptions` or `HintAction` which will override
 the default completion options for `Enum`/`EnumVar`.
 
 
 **Examples**
-You can see an in depth example of the completion API within 
+You can see an in depth example of the completion API within
 `examples/completion/main.go`
 
 
@@ -610,11 +629,27 @@ You can see an in depth example of the completion API within
 
 `kingpin.CommandLine.HelpFlag.Short('h')`
 
+Short help is also available when creating a more complicated app:
+
+```go
+var (
+	app = kingpin.New("chat", "A command-line chat application.")
+  // ...
+)
+
+func main() {
+	app.HelpFlag.Short('h')
+	switch kingpin.MustParse(app.Parse(os.Args[1:])) {
+  // ...
+  }
+}
+```
+
 ### Custom help
 
 Kingpin v2 supports templatised help using the text/template library (actually, [a fork](https://github.com/alecthomas/template)).
 
-You can specify the template to use with the [Application.UsageTemplate()](http://godoc.org/gopkg.in/alecthomas/kingpin.v2#Application.UsageTemplate) function.
+You can specify the template to use with the [Application.UsageTemplate()](http://godoc.org/github.com/alecthomas/kingpin/v2#Application.UsageTemplate) function.
 
 There are four included templates: `kingpin.DefaultUsageTemplate` is the default,
 `kingpin.CompactUsageTemplate` provides a more compact representation for more complex command-line structures,
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go b/vendor/github.com/alecthomas/kingpin/v2/actions.go
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/actions.go
rename to vendor/github.com/alecthomas/kingpin/v2/actions.go
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/app.go b/vendor/github.com/alecthomas/kingpin/v2/app.go
similarity index 97%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/app.go
rename to vendor/github.com/alecthomas/kingpin/v2/app.go
index 1a1a5effd078..4f1f31be224a 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/app.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/app.go
@@ -6,6 +6,7 @@ import (
 	"os"
 	"regexp"
 	"strings"
+	"text/template"
 )
 
 var (
@@ -32,6 +33,7 @@ type Application struct {
 	errorWriter    io.Writer // Destination for errors.
 	usageWriter    io.Writer // Destination for usage
 	usageTemplate  string
+	usageFuncs     template.FuncMap
 	validator      ApplicationValidator
 	terminate      func(status int) // See Terminate()
 	noInterspersed bool             // can flags be interspersed with args (or must they come first)
@@ -153,6 +155,12 @@ func (a *Application) UsageTemplate(template string) *Application {
 	return a
 }
 
+// UsageFuncs adds extra functions that can be used in the usage template.
+func (a *Application) UsageFuncs(funcs template.FuncMap) *Application {
+	a.usageFuncs = funcs
+	return a
+}
+
 // Validate sets a validation function to run when parsing.
 func (a *Application) Validate(validator ApplicationValidator) *Application {
 	a.validator = validator
@@ -405,6 +413,10 @@ func (a *Application) setDefaults(context *ParseContext) error {
 			if flag.name == "help" {
 				return nil
 			}
+
+			if flag.name == "version" {
+				return nil
+			}
 			flagElements[flag.name] = element
 		}
 	}
@@ -452,14 +464,18 @@ func (a *Application) validateRequired(context *ParseContext) error {
 	}
 
 	// Check required flags and set defaults.
+	var missingFlags []string
 	for _, flag := range context.flags.long {
 		if flagElements[flag.name] == nil {
 			// Check required flags were provided.
 			if flag.needsValue() {
-				return fmt.Errorf("required flag --%s not provided", flag.name)
+				missingFlags = append(missingFlags, fmt.Sprintf("'--%s'", flag.name))
 			}
 		}
 	}
+	if len(missingFlags) != 0 {
+		return fmt.Errorf("required flag(s) %s not provided", strings.Join(missingFlags, ", "))
+	}
 
 	for _, arg := range context.arguments.args {
 		if argElements[arg.name] == nil {
@@ -496,11 +512,6 @@ func (a *Application) setValues(context *ParseContext) (selected []string, err e
 			}
 
 		case *CmdClause:
-			if clause.validator != nil {
-				if err = clause.validator(clause); err != nil {
-					return
-				}
-			}
 			selected = append(selected, clause.name)
 			lastCmd = clause
 		}
@@ -631,6 +642,10 @@ func (a *Application) completionOptions(context *ParseContext) []string {
 	}
 
 	if (currArg != "" && strings.HasPrefix(currArg, "--")) || strings.HasPrefix(prevArg, "--") {
+		if context.argsOnly {
+			return nil
+		}
+
 		// Perform completion for A flag. The last/current argument started with "-"
 		var (
 			flagName  string // The name of a flag if given (could be half complete)
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/args.go b/vendor/github.com/alecthomas/kingpin/v2/args.go
similarity index 88%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/args.go
rename to vendor/github.com/alecthomas/kingpin/v2/args.go
index 340069476b92..54e4107191bc 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/args.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/args.go
@@ -71,6 +71,8 @@ type ArgClause struct {
 	name          string
 	help          string
 	defaultValues []string
+	placeholder   string
+	hidden        bool
 	required      bool
 }
 
@@ -120,6 +122,19 @@ func (a *ArgClause) consumesRemainder() bool {
 	return false
 }
 
+// Hidden hides the argument from usage but still allows it to be used.
+func (a *ArgClause) Hidden() *ArgClause {
+	a.hidden = true
+	return a
+}
+
+// PlaceHolder sets the place-holder string used for arg values in the help. The
+// default behaviour is to use the arg name between < > brackets.
+func (a *ArgClause) PlaceHolder(value string) *ArgClause {
+	a.placeholder = value
+	return a
+}
+
 // Required arguments must be input by the user. They can not have a Default() value provided.
 func (a *ArgClause) Required() *ArgClause {
 	a.required = true
@@ -173,6 +188,12 @@ func (a *ArgClause) HintOptions(options ...string) *ArgClause {
 	return a
 }
 
+// Help sets the help message.
+func (a *ArgClause) Help(help string) *ArgClause {
+	a.help = help
+	return a
+}
+
 func (a *ArgClause) init() error {
 	if a.required && len(a.defaultValues) > 0 {
 		return fmt.Errorf("required argument '%s' with unusable default value", a.name)
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go b/vendor/github.com/alecthomas/kingpin/v2/cmd.go
similarity index 80%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go
rename to vendor/github.com/alecthomas/kingpin/v2/cmd.go
index 0473b871d5bd..cd7e6120bac7 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/cmd.go
@@ -21,11 +21,47 @@ func (c *cmdMixin) CmdCompletion(context *ParseContext) []string {
 	// default commands' alternatives, since they weren't listed explicitly
 	// and the user may want to explicitly list something else.
 	argsSatisfied := 0
+	allSatisfied := false
+ElementLoop:
 	for _, el := range context.Elements {
 		switch clause := el.Clause.(type) {
 		case *ArgClause:
+			// Each new element should reset the previous state
+			allSatisfied = false
+			options = nil
+
 			if el.Value != nil && *el.Value != "" {
-				argsSatisfied++
+				// Get the list of valid options for the last argument
+				validOptions := c.argGroup.args[argsSatisfied].resolveCompletions()
+				if len(validOptions) == 0 {
+					// If there are no options for this argument,
+					// mark is as allSatisfied as we can't suggest anything
+					if !clause.consumesRemainder() {
+						argsSatisfied++
+						allSatisfied = true
+					}
+					continue ElementLoop
+				}
+
+				for _, opt := range validOptions {
+					if opt == *el.Value {
+						// We have an exact match
+						// We don't need to suggest any option
+						if !clause.consumesRemainder() {
+							argsSatisfied++
+						}
+						continue ElementLoop
+					}
+					if strings.HasPrefix(opt, *el.Value) {
+						// If the option match the partially entered argument, add it to the list
+						options = append(options, opt)
+					}
+				}
+				// Avoid further completion as we have done everything we could
+				if !clause.consumesRemainder() {
+					argsSatisfied++
+					allSatisfied = true
+				}
 			}
 		case *CmdClause:
 			options = append(options, clause.completionAlts...)
@@ -33,7 +69,7 @@ func (c *cmdMixin) CmdCompletion(context *ParseContext) []string {
 		}
 	}
 
-	if argsSatisfied < len(c.argGroup.args) {
+	if argsSatisfied < len(c.argGroup.args) && !allSatisfied {
 		// Since not all args have been satisfied, show options for the current one
 		options = append(options, c.argGroup.args[argsSatisfied].resolveCompletions()...)
 	} else {
@@ -191,6 +227,7 @@ type CmdClause struct {
 	name           string
 	aliases        []string
 	help           string
+	helpLong       string
 	isDefault      bool
 	validator      CmdClauseValidator
 	hidden         bool
@@ -252,6 +289,12 @@ func (c *CmdClause) PreAction(action Action) *CmdClause {
 	return c
 }
 
+// Help sets the help message.
+func (c *CmdClause) Help(help string) *CmdClause {
+	c.help = help
+	return c
+}
+
 func (c *CmdClause) init() error {
 	if err := c.flagGroup.init(c.app.defaultEnvarPrefix()); err != nil {
 		return err
@@ -272,3 +315,11 @@ func (c *CmdClause) Hidden() *CmdClause {
 	c.hidden = true
 	return c
 }
+
+// HelpLong adds a long help text, which can be used in usage templates.
+// For example, to use a longer help text in the command-specific help
+// than in the apps root help.
+func (c *CmdClause) HelpLong(help string) *CmdClause {
+	c.helpLong = help
+	return c
+}
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go b/vendor/github.com/alecthomas/kingpin/v2/completions.go
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/completions.go
rename to vendor/github.com/alecthomas/kingpin/v2/completions.go
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go b/vendor/github.com/alecthomas/kingpin/v2/doc.go
similarity index 97%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/doc.go
rename to vendor/github.com/alecthomas/kingpin/v2/doc.go
index cb951a80459b..8a72729d7890 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/doc.go
@@ -35,7 +35,7 @@
 //
 //     package main
 //
-//     import "gopkg.in/alecthomas/kingpin.v2"
+//     import "github.com/alecthomas/kingpin/v2"
 //
 //     var (
 //       debug    = kingpin.Flag("debug", "enable debug mode").Default("false").Bool()
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/envar.go b/vendor/github.com/alecthomas/kingpin/v2/envar.go
similarity index 81%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/envar.go
rename to vendor/github.com/alecthomas/kingpin/v2/envar.go
index c01a27df8001..44e16de3b811 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/envar.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/envar.go
@@ -28,18 +28,13 @@ func (e *envarMixin) GetEnvarValue() string {
 }
 
 func (e *envarMixin) GetSplitEnvarValue() []string {
-	values := make([]string, 0)
-
 	envarValue := e.GetEnvarValue()
 	if envarValue == "" {
-		return values
+		return []string{}
 	}
 
 	// Split by new line to extract multiple values, if any.
 	trimmed := envVarValuesTrimmer.ReplaceAllString(envarValue, "")
-	for _, value := range envVarValuesSplitter.Split(trimmed, -1) {
-		values = append(values, value)
-	}
 
-	return values
+	return envVarValuesSplitter.Split(trimmed, -1)
 }
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go b/vendor/github.com/alecthomas/kingpin/v2/flags.go
similarity index 94%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/flags.go
rename to vendor/github.com/alecthomas/kingpin/v2/flags.go
index 8f33721fc9ec..2b2938b482f5 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/flags.go
@@ -109,6 +109,8 @@ loop:
 
 			context.Next()
 
+			flag.isSetByUser()
+
 			fb, ok := flag.value.(boolFlag)
 			if ok && fb.IsBoolFlag() {
 				if invert {
@@ -152,6 +154,7 @@ type FlagClause struct {
 	defaultValues []string
 	placeholder   string
 	hidden        bool
+	setByUser     *bool
 }
 
 func newFlag(name, help string) *FlagClause {
@@ -189,6 +192,12 @@ func (f *FlagClause) setDefault() error {
 	return nil
 }
 
+func (f *FlagClause) isSetByUser() {
+	if f.setByUser != nil {
+		*f.setByUser = true
+	}
+}
+
 func (f *FlagClause) needsValue() bool {
 	haveDefault := len(f.defaultValues) > 0
 	return f.required && !(haveDefault || f.HasEnvarValue())
@@ -246,6 +255,15 @@ func (a *FlagClause) Enum(options ...string) (target *string) {
 	return a.parserMixin.Enum(options...)
 }
 
+// IsSetByUser let to know if the flag was set by the user
+func (f *FlagClause) IsSetByUser(setByUser *bool) *FlagClause {
+	if setByUser != nil {
+		*setByUser = false
+	}
+	f.setByUser = setByUser
+	return f
+}
+
 // Default values for this flag. They *must* be parseable by the value of the flag.
 func (f *FlagClause) Default(values ...string) *FlagClause {
 	f.defaultValues = values
@@ -300,6 +318,12 @@ func (f *FlagClause) Short(name rune) *FlagClause {
 	return f
 }
 
+// Help sets the help message.
+func (f *FlagClause) Help(help string) *FlagClause {
+	f.help = help
+	return f
+}
+
 // Bool makes this flag a boolean flag.
 func (f *FlagClause) Bool() (target *bool) {
 	target = new(bool)
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/global.go b/vendor/github.com/alecthomas/kingpin/v2/global.go
similarity index 97%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/global.go
rename to vendor/github.com/alecthomas/kingpin/v2/global.go
index 10a29137ce1e..4d073eabb652 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/global.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/global.go
@@ -14,6 +14,8 @@ var (
 	HelpCommand = CommandLine.HelpCommand
 	// Global version flag. Exposed for user customisation. May be nil.
 	VersionFlag = CommandLine.VersionFlag
+	// Whether to file expansion with '@' is enabled.
+	EnableFileExpansion = true
 )
 
 // Command adds a new command to the default parser.
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go b/vendor/github.com/alecthomas/kingpin/v2/guesswidth.go
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go
rename to vendor/github.com/alecthomas/kingpin/v2/guesswidth.go
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go b/vendor/github.com/alecthomas/kingpin/v2/guesswidth_unix.go
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go
rename to vendor/github.com/alecthomas/kingpin/v2/guesswidth_unix.go
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/model.go b/vendor/github.com/alecthomas/kingpin/v2/model.go
similarity index 77%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/model.go
rename to vendor/github.com/alecthomas/kingpin/v2/model.go
index a4ee83b421d5..382616c5d96a 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/model.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/model.go
@@ -8,6 +8,17 @@ import (
 
 // Data model for Kingpin command-line structure.
 
+var (
+	ignoreInCount = map[string]bool{
+		"help":                   true,
+		"help-long":              true,
+		"help-man":               true,
+		"completion-bash":        true,
+		"completion-script-bash": true,
+		"completion-script-zsh":  true,
+	}
+)
+
 type FlagGroupModel struct {
 	Flags []*FlagModel
 }
@@ -15,10 +26,13 @@ type FlagGroupModel struct {
 func (f *FlagGroupModel) FlagSummary() string {
 	out := []string{}
 	count := 0
+
 	for _, flag := range f.Flags {
-		if flag.Name != "help" {
+
+		if !ignoreInCount[flag.Name] {
 			count++
 		}
+
 		if flag.Required {
 			if flag.IsBoolFlag() {
 				out = append(out, fmt.Sprintf("--[no-]%s", flag.Name))
@@ -46,6 +60,9 @@ type FlagModel struct {
 }
 
 func (f *FlagModel) String() string {
+	if f.Value == nil {
+		return ""
+	}
 	return f.Value.String()
 }
 
@@ -73,6 +90,13 @@ func (f *FlagModel) FormatPlaceHolder() string {
 	return strings.ToUpper(f.Name)
 }
 
+func (f *FlagModel) HelpWithEnvar() string {
+	if f.Envar == "" {
+		return f.Help
+	}
+	return fmt.Sprintf("%s ($%s)", f.Help, f.Envar)
+}
+
 type ArgGroupModel struct {
 	Args []*ArgModel
 }
@@ -81,7 +105,12 @@ func (a *ArgGroupModel) ArgSummary() string {
 	depth := 0
 	out := []string{}
 	for _, arg := range a.Args {
-		h := "<" + arg.Name + ">"
+		var h string
+		if arg.PlaceHolder != "" {
+			h = arg.PlaceHolder
+		} else {
+			h = "<" + arg.Name + ">"
+		}
 		if !arg.Required {
 			h = "[" + h
 			depth++
@@ -92,16 +121,29 @@ func (a *ArgGroupModel) ArgSummary() string {
 	return strings.Join(out, " ")
 }
 
+func (a *ArgModel) HelpWithEnvar() string {
+	if a.Envar == "" {
+		return a.Help
+	}
+	return fmt.Sprintf("%s ($%s)", a.Help, a.Envar)
+}
+
 type ArgModel struct {
-	Name     string
-	Help     string
-	Default  []string
-	Envar    string
-	Required bool
-	Value    Value
+	Name        string
+	Help        string
+	Default     []string
+	Envar       string
+	PlaceHolder string
+	Required    bool
+	Hidden      bool
+	Value       Value
 }
 
 func (a *ArgModel) String() string {
+	if a.Value == nil {
+		return ""
+	}
+
 	return a.Value.String()
 }
 
@@ -123,6 +165,7 @@ type CmdModel struct {
 	Name        string
 	Aliases     []string
 	Help        string
+	HelpLong    string
 	FullCommand string
 	Depth       int
 	Hidden      bool
@@ -168,12 +211,14 @@ func (a *argGroup) Model() *ArgGroupModel {
 
 func (a *ArgClause) Model() *ArgModel {
 	return &ArgModel{
-		Name:     a.name,
-		Help:     a.help,
-		Default:  a.defaultValues,
-		Envar:    a.envar,
-		Required: a.required,
-		Value:    a.value,
+		Name:        a.name,
+		Help:        a.help,
+		Default:     a.defaultValues,
+		Envar:       a.envar,
+		PlaceHolder: a.placeholder,
+		Required:    a.required,
+		Hidden:      a.hidden,
+		Value:       a.value,
 	}
 }
 
@@ -216,6 +261,7 @@ func (c *CmdClause) Model() *CmdModel {
 		Name:           c.name,
 		Aliases:        c.aliases,
 		Help:           c.help,
+		HelpLong:       c.helpLong,
 		Depth:          depth,
 		Hidden:         c.hidden,
 		Default:        c.isDefault,
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go b/vendor/github.com/alecthomas/kingpin/v2/parser.go
similarity index 95%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/parser.go
rename to vendor/github.com/alecthomas/kingpin/v2/parser.go
index 2a18351928a5..5f28c78dcfcc 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/parser.go
@@ -144,9 +144,7 @@ func (p *ParseContext) mergeFlags(flags *flagGroup) {
 }
 
 func (p *ParseContext) mergeArgs(args *argGroup) {
-	for _, arg := range args.args {
-		p.arguments.args = append(p.arguments.args, arg)
-	}
+	p.arguments.args = append(p.arguments.args, args.args...)
 }
 
 func (p *ParseContext) EOL() bool {
@@ -168,6 +166,10 @@ func (p *ParseContext) Next() *Token {
 		return &Token{Index: p.argi, Type: TokenEOL}
 	}
 
+	if p.argi > 0 && p.argi <= len(p.rawArgs) && p.rawArgs[p.argi-1] == "--" {
+		// If the previous argument was a --, from now on only arguments are parsed.
+		p.argsOnly = true
+	}
 	arg := p.args[0]
 	p.next()
 
@@ -175,9 +177,7 @@ func (p *ParseContext) Next() *Token {
 		return &Token{p.argi, TokenArg, arg}
 	}
 
-	// All remaining args are passed directly.
 	if arg == "--" {
-		p.argsOnly = true
 		return p.Next()
 	}
 
@@ -192,7 +192,7 @@ func (p *ParseContext) Next() *Token {
 
 	if strings.HasPrefix(arg, "-") {
 		if len(arg) == 1 {
-			return &Token{Index: p.argi, Type: TokenShort}
+			return &Token{Index: p.argi, Type: TokenArg}
 		}
 		shortRune, size := utf8.DecodeRuneInString(arg[1:])
 		short := string(shortRune)
@@ -214,7 +214,7 @@ func (p *ParseContext) Next() *Token {
 			p.args = append([]string{"-" + arg[size+1:]}, p.args...)
 		}
 		return &Token{p.argi, TokenShort, short}
-	} else if strings.HasPrefix(arg, "@") {
+	} else if EnableFileExpansion && strings.HasPrefix(arg, "@") {
 		expanded, err := ExpandArgsFromFile(arg[1:])
 		if err != nil {
 			return &Token{p.argi, TokenError, err.Error()}
@@ -281,7 +281,7 @@ func ExpandArgsFromFile(filename string) (out []string, err error) {
 	scanner := bufio.NewScanner(r)
 	for scanner.Scan() {
 		line := scanner.Text()
-		if strings.HasPrefix(line, "#") {
+		if strings.HasPrefix(line, "#") || strings.TrimSpace(line) == "" {
 			continue
 		}
 		out = append(out, line)
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go b/vendor/github.com/alecthomas/kingpin/v2/parsers.go
similarity index 98%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go
rename to vendor/github.com/alecthomas/kingpin/v2/parsers.go
index d9ad57e5cfbb..5a0688215dc3 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/parsers.go
@@ -18,6 +18,10 @@ type parserMixin struct {
 	required bool
 }
 
+func (p *parserMixin) SetText(text Text) {
+	p.value = &wrapText{text}
+}
+
 func (p *parserMixin) SetValue(value Value) {
 	p.value = value
 }
diff --git a/vendor/github.com/alecthomas/kingpin/v2/templates.go b/vendor/github.com/alecthomas/kingpin/v2/templates.go
new file mode 100644
index 000000000000..703c2cda7c5f
--- /dev/null
+++ b/vendor/github.com/alecthomas/kingpin/v2/templates.go
@@ -0,0 +1,262 @@
+package kingpin
+
+// Default usage template.
+var DefaultUsageTemplate = `{{define "FormatCommand" -}}
+{{if .FlagSummary}} {{.FlagSummary}}{{end -}}
+{{range .Args}}{{if not .Hidden}} {{if not .Required}}[{{end}}{{if .PlaceHolder}}{{.PlaceHolder}}{{else}}<{{.Name}}>{{end}}{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}{{end -}}
+{{end -}}
+
+{{define "FormatCommands" -}}
+{{range .FlattenedCommands -}}
+{{if not .Hidden -}}
+  {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}}
+{{.Help|Wrap 4}}
+{{end -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatUsage" -}}
+{{template "FormatCommand" .}}{{if .Commands}}  [ ...]{{end}}
+{{if .Help}}
+{{.Help|Wrap 0 -}}
+{{end -}}
+
+{{end -}}
+
+{{if .Context.SelectedCommand -}}
+usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}}
+{{ else -}}
+usage: {{.App.Name}}{{template "FormatUsage" .App}}
+{{end}}
+{{if .Context.Flags -}}
+Flags:
+{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.Args -}}
+Args:
+{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.SelectedCommand -}}
+{{if len .Context.SelectedCommand.Commands -}}
+Subcommands:
+{{template "FormatCommands" .Context.SelectedCommand}}
+{{end -}}
+{{else if .App.Commands -}}
+Commands:
+{{template "FormatCommands" .App}}
+{{end -}}
+`
+
+// Usage template where command's optional flags are listed separately
+var SeparateOptionalFlagsUsageTemplate = `{{define "FormatCommand" -}}
+{{if .FlagSummary}} {{.FlagSummary}}{{end -}}
+{{range .Args}}{{if not .Hidden}} {{if not .Required}}[{{end}}{{if .PlaceHolder}}{{.PlaceHolder}}{{else}}<{{.Name}}>{{end}}{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}{{end -}}
+{{end -}}
+
+{{define "FormatCommands" -}}
+{{range .FlattenedCommands -}}
+{{if not .Hidden -}}
+  {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}}
+{{.Help|Wrap 4}}
+{{end -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatUsage" -}}
+{{template "FormatCommand" .}}{{if .Commands}}  [ ...]{{end}}
+{{if .Help}}
+{{.Help|Wrap 0 -}}
+{{end -}}
+
+{{end -}}
+{{if .Context.SelectedCommand -}}
+usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}}
+{{else -}}
+usage: {{.App.Name}}{{template "FormatUsage" .App}}
+{{end -}}
+
+{{if .Context.Flags|RequiredFlags -}}
+Required flags:
+{{.Context.Flags|RequiredFlags|FlagsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if  .Context.Flags|OptionalFlags -}}
+Optional flags:
+{{.Context.Flags|OptionalFlags|FlagsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.Args -}}
+Args:
+{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.SelectedCommand -}}
+Subcommands:
+{{if .Context.SelectedCommand.Commands -}}
+{{template "FormatCommands" .Context.SelectedCommand}}
+{{end -}}
+{{else if .App.Commands -}}
+Commands:
+{{template "FormatCommands" .App}}
+{{end -}}
+`
+
+// Usage template with compactly formatted commands.
+var CompactUsageTemplate = `{{define "FormatCommand" -}}
+{{if .FlagSummary}} {{.FlagSummary}}{{end -}}
+{{range .Args}}{{if not .Hidden}} {{if not .Required}}[{{end}}{{if .PlaceHolder}}{{.PlaceHolder}}{{else}}<{{.Name}}>{{end}}{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}{{end -}}
+{{end -}}
+
+{{define "FormatCommandList" -}}
+{{range . -}}
+{{if not .Hidden -}}
+{{.Depth|Indent}}{{.Name}}{{if .Default}}*{{end}}{{template "FormatCommand" .}}
+{{end -}}
+{{template "FormatCommandList" .Commands -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatUsage" -}}
+{{template "FormatCommand" .}}{{if .Commands}}  [ ...]{{end}}
+{{if .Help}}
+{{.Help|Wrap 0 -}}
+{{end -}}
+
+{{end -}}
+
+{{if .Context.SelectedCommand -}}
+usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}}
+{{else -}}
+usage: {{.App.Name}}{{template "FormatUsage" .App}}
+{{end -}}
+{{if .Context.Flags -}}
+Flags:
+{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.Args -}}
+Args:
+{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.SelectedCommand -}}
+{{if .Context.SelectedCommand.Commands -}}
+Commands:
+  {{.Context.SelectedCommand}}
+{{template "FormatCommandList" .Context.SelectedCommand.Commands}}
+{{end -}}
+{{else if .App.Commands -}}
+Commands:
+{{template "FormatCommandList" .App.Commands}}
+{{end -}}
+`
+
+var ManPageTemplate = `{{define "FormatFlags" -}}
+{{range .Flags -}}
+{{if not .Hidden -}}
+.TP
+\fB{{if .Short}}-{{.Short|Char}}, {{end}}--{{.Name}}{{if not .IsBoolFlag}}={{.FormatPlaceHolder}}{{end -}}\fR
+{{.Help}}
+{{end -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatCommand" -}}
+{{if .FlagSummary}} {{.FlagSummary}}{{end -}}
+{{range .Args}}{{if not .Hidden}} {{if not .Required}}[{{end}}{{if .PlaceHolder}}{{.PlaceHolder}}{{else}}<{{.Name}}>{{end}}{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}{{end -}}
+{{end -}}
+
+{{define "FormatCommands" -}}
+{{range .FlattenedCommands -}}
+{{if not .Hidden -}}
+.SS
+\fB{{.FullCommand}}{{template "FormatCommand" . -}}\fR
+.PP
+{{.Help}}
+{{template "FormatFlags" . -}}
+{{end -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatUsage" -}}
+{{template "FormatCommand" .}}{{if .Commands}}  [ ...]{{end -}}\fR
+{{end -}}
+
+.TH {{.App.Name}} 1 {{.App.Version}} "{{.App.Author}}"
+.SH "NAME"
+{{.App.Name}}
+.SH "SYNOPSIS"
+.TP
+\fB{{.App.Name}}{{template "FormatUsage" .App}}
+.SH "DESCRIPTION"
+{{.App.Help}}
+.SH "OPTIONS"
+{{template "FormatFlags" .App -}}
+{{if .App.Commands -}}
+.SH "COMMANDS"
+{{template "FormatCommands" .App -}}
+{{end -}}
+`
+
+// Default usage template.
+var LongHelpTemplate = `{{define "FormatCommand" -}}
+{{if .FlagSummary}} {{.FlagSummary}}{{end -}}
+{{range .Args}}{{if not .Hidden}} {{if not .Required}}[{{end}}{{if .PlaceHolder}}{{.PlaceHolder}}{{else}}<{{.Name}}>{{end}}{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}{{end -}}
+{{end -}}
+
+{{define "FormatCommands" -}}
+{{range .FlattenedCommands -}}
+{{if not .Hidden -}}
+  {{.FullCommand}}{{template "FormatCommand" .}}
+{{.Help|Wrap 4}}
+{{with .Flags|FlagsToTwoColumns}}{{FormatTwoColumnsWithIndent . 4 2}}{{end}}
+{{end -}}
+{{end -}}
+{{end -}}
+
+{{define "FormatUsage" -}}
+{{template "FormatCommand" .}}{{if .Commands}}  [ ...]{{end}}
+{{if .Help}}
+{{.Help|Wrap 0 -}}
+{{end -}}
+
+{{end -}}
+
+usage: {{.App.Name}}{{template "FormatUsage" .App}}
+{{if .Context.Flags -}}
+Flags:
+{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .Context.Args -}}
+Args:
+{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}}
+{{end -}}
+{{if .App.Commands -}}
+Commands:
+{{template "FormatCommands" .App}}
+{{end -}}
+`
+
+var BashCompletionTemplate = `
+_{{.App.Name}}_bash_autocomplete() {
+    local cur prev opts base
+    COMPREPLY=()
+    cur="${COMP_WORDS[COMP_CWORD]}"
+    opts=$( ${COMP_WORDS[0]} --completion-bash "${COMP_WORDS[@]:1:$COMP_CWORD}" )
+    COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
+    return 0
+}
+complete -F _{{.App.Name}}_bash_autocomplete -o default {{.App.Name}}
+
+`
+
+var ZshCompletionTemplate = `#compdef {{.App.Name}}
+
+_{{.App.Name}}() {
+    local matches=($(${words[1]} --completion-bash "${(@)words[2,$CURRENT]}"))
+    compadd -a matches
+
+    if [[ $compstate[nmatches] -eq 0 && $words[$CURRENT] != -* ]]; then
+        _files
+    fi
+}
+
+if [[ "$(basename -- ${(%):-%x})" != "_{{.App.Name}}" ]]; then
+    compdef _{{.App.Name}} {{.App.Name}}
+fi
+`
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go b/vendor/github.com/alecthomas/kingpin/v2/usage.go
similarity index 87%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/usage.go
rename to vendor/github.com/alecthomas/kingpin/v2/usage.go
index 44af6f65729f..9b3dd731f13a 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/usage.go
@@ -6,8 +6,7 @@ import (
 	"go/doc"
 	"io"
 	"strings"
-
-	"github.com/alecthomas/template"
+	"text/template"
 )
 
 var (
@@ -64,24 +63,28 @@ func formatAppUsage(app *ApplicationModel) string {
 
 func formatCmdUsage(app *ApplicationModel, cmd *CmdModel) string {
 	s := []string{app.Name, cmd.String()}
-	if len(app.Flags) > 0 {
-		s = append(s, app.FlagSummary())
+	if len(cmd.Flags) > 0 {
+		s = append(s, cmd.FlagSummary())
 	}
-	if len(app.Args) > 0 {
-		s = append(s, app.ArgSummary())
+	if len(cmd.Args) > 0 {
+		s = append(s, cmd.ArgSummary())
 	}
 	return strings.Join(s, " ")
 }
 
 func formatFlag(haveShort bool, flag *FlagModel) string {
 	flagString := ""
+	flagName := flag.Name
+	if flag.IsBoolFlag() {
+		flagName = "[no-]" + flagName
+	}
 	if flag.Short != 0 {
-		flagString += fmt.Sprintf("-%c, --%s", flag.Short, flag.Name)
+		flagString += fmt.Sprintf("-%c, --%s", flag.Short, flagName)
 	} else {
 		if haveShort {
-			flagString += fmt.Sprintf("    --%s", flag.Name)
+			flagString += fmt.Sprintf("    --%s", flagName)
 		} else {
-			flagString += fmt.Sprintf("--%s", flag.Name)
+			flagString += fmt.Sprintf("--%s", flagName)
 		}
 	}
 	if !flag.IsBoolFlag() {
@@ -136,7 +139,7 @@ func (a *Application) UsageForContextWithTemplate(context *ParseContext, indent
 			}
 			for _, flag := range f {
 				if !flag.Hidden {
-					rows = append(rows, [2]string{formatFlag(haveShort, flag), flag.Help})
+					rows = append(rows, [2]string{formatFlag(haveShort, flag), flag.HelpWithEnvar()})
 				}
 			}
 			return rows
@@ -162,11 +165,18 @@ func (a *Application) UsageForContextWithTemplate(context *ParseContext, indent
 		"ArgsToTwoColumns": func(a []*ArgModel) [][2]string {
 			rows := [][2]string{}
 			for _, arg := range a {
-				s := "<" + arg.Name + ">"
-				if !arg.Required {
-					s = "[" + s + "]"
+				if !arg.Hidden {
+					var s string
+					if arg.PlaceHolder != "" {
+						s = arg.PlaceHolder
+					} else {
+						s = "<" + arg.Name + ">"
+					}
+					if !arg.Required {
+						s = "[" + s + "]"
+					}
+					rows = append(rows, [2]string{s, arg.HelpWithEnvar()})
 				}
-				rows = append(rows, [2]string{s, arg.Help})
 			}
 			return rows
 		},
@@ -190,6 +200,10 @@ func (a *Application) UsageForContextWithTemplate(context *ParseContext, indent
 			return string(c)
 		},
 	}
+	for k, v := range a.usageFuncs {
+		funcs[k] = v
+	}
+
 	t, err := template.New("usage").Funcs(funcs).Parse(tmpl)
 	if err != nil {
 		return err
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values.go b/vendor/github.com/alecthomas/kingpin/v2/values.go
similarity index 95%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/values.go
rename to vendor/github.com/alecthomas/kingpin/v2/values.go
index 7ee9a3b3e181..e6e05bc21ddb 100644
--- a/vendor/gopkg.in/alecthomas/kingpin.v2/values.go
+++ b/vendor/github.com/alecthomas/kingpin/v2/values.go
@@ -3,6 +3,7 @@ package kingpin
 //go:generate go run ./cmd/genvalues/main.go
 
 import (
+	"encoding"
 	"fmt"
 	"net"
 	"net/url"
@@ -13,6 +14,7 @@ import (
 	"time"
 
 	"github.com/alecthomas/units"
+	"github.com/xhit/go-str2duration/v2"
 )
 
 // NOTE: Most of the base type values were lifted from:
@@ -42,23 +44,40 @@ type Getter interface {
 // Optional interface to indicate boolean flags that don't accept a value, and
 // implicitly have a --no- negation counterpart.
 type boolFlag interface {
-	Value
 	IsBoolFlag() bool
 }
 
 // Optional interface for arguments that cumulatively consume all remaining
 // input.
 type remainderArg interface {
-	Value
 	IsCumulative() bool
 }
 
 // Optional interface for flags that can be repeated.
 type repeatableFlag interface {
-	Value
 	IsCumulative() bool
 }
 
+// Text is the interface to the dynamic value stored in a flag.
+// (The default value is represented as a string.)
+type Text interface {
+	encoding.TextMarshaler
+	encoding.TextUnmarshaler
+}
+
+type wrapText struct {
+	text Text
+}
+
+func (w wrapText) String() string {
+	buf, _ := w.text.MarshalText()
+	return string(buf)
+}
+
+func (w *wrapText) Set(s string) error {
+	return w.text.UnmarshalText([]byte(s))
+}
+
 type accumulator struct {
 	element func(value interface{}) Value
 	typ     reflect.Type
@@ -120,7 +139,7 @@ func newDurationValue(p *time.Duration) *durationValue {
 }
 
 func (d *durationValue) Set(s string) error {
-	v, err := time.ParseDuration(s)
+	v, err := str2duration.ParseDuration(s)
 	*d = durationValue(v)
 	return err
 }
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values.json b/vendor/github.com/alecthomas/kingpin/v2/values.json
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/values.json
rename to vendor/github.com/alecthomas/kingpin/v2/values.json
diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go b/vendor/github.com/alecthomas/kingpin/v2/values_generated.go
similarity index 100%
rename from vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go
rename to vendor/github.com/alecthomas/kingpin/v2/values_generated.go
diff --git a/vendor/github.com/alecthomas/template/README.md b/vendor/github.com/alecthomas/template/README.md
deleted file mode 100644
index ef6a8ee303e6..000000000000
--- a/vendor/github.com/alecthomas/template/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Go's `text/template` package with newline elision
-
-This is a fork of Go 1.4's [text/template](http://golang.org/pkg/text/template/) package with one addition: a backslash immediately after a closing delimiter will delete all subsequent newlines until a non-newline.
-
-eg.
-
-```
-{{if true}}\
-hello
-{{end}}\
-```
-
-Will result in:
-
-```
-hello\n
-```
-
-Rather than:
-
-```
-\n
-hello\n
-\n
-```
diff --git a/vendor/github.com/alecthomas/template/doc.go b/vendor/github.com/alecthomas/template/doc.go
deleted file mode 100644
index 223c595c25d6..000000000000
--- a/vendor/github.com/alecthomas/template/doc.go
+++ /dev/null
@@ -1,406 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-/*
-Package template implements data-driven templates for generating textual output.
-
-To generate HTML output, see package html/template, which has the same interface
-as this package but automatically secures HTML output against certain attacks.
-
-Templates are executed by applying them to a data structure. Annotations in the
-template refer to elements of the data structure (typically a field of a struct
-or a key in a map) to control execution and derive values to be displayed.
-Execution of the template walks the structure and sets the cursor, represented
-by a period '.' and called "dot", to the value at the current location in the
-structure as execution proceeds.
-
-The input text for a template is UTF-8-encoded text in any format.
-"Actions"--data evaluations or control structures--are delimited by
-"{{" and "}}"; all text outside actions is copied to the output unchanged.
-Actions may not span newlines, although comments can.
-
-Once parsed, a template may be executed safely in parallel.
-
-Here is a trivial example that prints "17 items are made of wool".
-
-	type Inventory struct {
-		Material string
-		Count    uint
-	}
-	sweaters := Inventory{"wool", 17}
-	tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}")
-	if err != nil { panic(err) }
-	err = tmpl.Execute(os.Stdout, sweaters)
-	if err != nil { panic(err) }
-
-More intricate examples appear below.
-
-Actions
-
-Here is the list of actions. "Arguments" and "pipelines" are evaluations of
-data, defined in detail below.
-
-*/
-//	{{/* a comment */}}
-//		A comment; discarded. May contain newlines.
-//		Comments do not nest and must start and end at the
-//		delimiters, as shown here.
-/*
-
-	{{pipeline}}
-		The default textual representation of the value of the pipeline
-		is copied to the output.
-
-	{{if pipeline}} T1 {{end}}
-		If the value of the pipeline is empty, no output is generated;
-		otherwise, T1 is executed.  The empty values are false, 0, any
-		nil pointer or interface value, and any array, slice, map, or
-		string of length zero.
-		Dot is unaffected.
-
-	{{if pipeline}} T1 {{else}} T0 {{end}}
-		If the value of the pipeline is empty, T0 is executed;
-		otherwise, T1 is executed.  Dot is unaffected.
-
-	{{if pipeline}} T1 {{else if pipeline}} T0 {{end}}
-		To simplify the appearance of if-else chains, the else action
-		of an if may include another if directly; the effect is exactly
-		the same as writing
-			{{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}}
-
-	{{range pipeline}} T1 {{end}}
-		The value of the pipeline must be an array, slice, map, or channel.
-		If the value of the pipeline has length zero, nothing is output;
-		otherwise, dot is set to the successive elements of the array,
-		slice, or map and T1 is executed. If the value is a map and the
-		keys are of basic type with a defined order ("comparable"), the
-		elements will be visited in sorted key order.
-
-	{{range pipeline}} T1 {{else}} T0 {{end}}
-		The value of the pipeline must be an array, slice, map, or channel.
-		If the value of the pipeline has length zero, dot is unaffected and
-		T0 is executed; otherwise, dot is set to the successive elements
-		of the array, slice, or map and T1 is executed.
-
-	{{template "name"}}
-		The template with the specified name is executed with nil data.
-
-	{{template "name" pipeline}}
-		The template with the specified name is executed with dot set
-		to the value of the pipeline.
-
-	{{with pipeline}} T1 {{end}}
-		If the value of the pipeline is empty, no output is generated;
-		otherwise, dot is set to the value of the pipeline and T1 is
-		executed.
-
-	{{with pipeline}} T1 {{else}} T0 {{end}}
-		If the value of the pipeline is empty, dot is unaffected and T0
-		is executed; otherwise, dot is set to the value of the pipeline
-		and T1 is executed.
-
-Arguments
-
-An argument is a simple value, denoted by one of the following.
-
-	- A boolean, string, character, integer, floating-point, imaginary
-	  or complex constant in Go syntax. These behave like Go's untyped
-	  constants, although raw strings may not span newlines.
-	- The keyword nil, representing an untyped Go nil.
-	- The character '.' (period):
-		.
-	  The result is the value of dot.
-	- A variable name, which is a (possibly empty) alphanumeric string
-	  preceded by a dollar sign, such as
-		$piOver2
-	  or
-		$
-	  The result is the value of the variable.
-	  Variables are described below.
-	- The name of a field of the data, which must be a struct, preceded
-	  by a period, such as
-		.Field
-	  The result is the value of the field. Field invocations may be
-	  chained:
-	    .Field1.Field2
-	  Fields can also be evaluated on variables, including chaining:
-	    $x.Field1.Field2
-	- The name of a key of the data, which must be a map, preceded
-	  by a period, such as
-		.Key
-	  The result is the map element value indexed by the key.
-	  Key invocations may be chained and combined with fields to any
-	  depth:
-	    .Field1.Key1.Field2.Key2
-	  Although the key must be an alphanumeric identifier, unlike with
-	  field names they do not need to start with an upper case letter.
-	  Keys can also be evaluated on variables, including chaining:
-	    $x.key1.key2
-	- The name of a niladic method of the data, preceded by a period,
-	  such as
-		.Method
-	  The result is the value of invoking the method with dot as the
-	  receiver, dot.Method(). Such a method must have one return value (of
-	  any type) or two return values, the second of which is an error.
-	  If it has two and the returned error is non-nil, execution terminates
-	  and an error is returned to the caller as the value of Execute.
-	  Method invocations may be chained and combined with fields and keys
-	  to any depth:
-	    .Field1.Key1.Method1.Field2.Key2.Method2
-	  Methods can also be evaluated on variables, including chaining:
-	    $x.Method1.Field
-	- The name of a niladic function, such as
-		fun
-	  The result is the value of invoking the function, fun(). The return
-	  types and values behave as in methods. Functions and function
-	  names are described below.
-	- A parenthesized instance of one the above, for grouping. The result
-	  may be accessed by a field or map key invocation.
-		print (.F1 arg1) (.F2 arg2)
-		(.StructValuedMethod "arg").Field
-
-Arguments may evaluate to any type; if they are pointers the implementation
-automatically indirects to the base type when required.
-If an evaluation yields a function value, such as a function-valued
-field of a struct, the function is not invoked automatically, but it
-can be used as a truth value for an if action and the like. To invoke
-it, use the call function, defined below.
-
-A pipeline is a possibly chained sequence of "commands". A command is a simple
-value (argument) or a function or method call, possibly with multiple arguments:
-
-	Argument
-		The result is the value of evaluating the argument.
-	.Method [Argument...]
-		The method can be alone or the last element of a chain but,
-		unlike methods in the middle of a chain, it can take arguments.
-		The result is the value of calling the method with the
-		arguments:
-			dot.Method(Argument1, etc.)
-	functionName [Argument...]
-		The result is the value of calling the function associated
-		with the name:
-			function(Argument1, etc.)
-		Functions and function names are described below.
-
-Pipelines
-
-A pipeline may be "chained" by separating a sequence of commands with pipeline
-characters '|'. In a chained pipeline, the result of the each command is
-passed as the last argument of the following command. The output of the final
-command in the pipeline is the value of the pipeline.
-
-The output of a command will be either one value or two values, the second of
-which has type error. If that second value is present and evaluates to
-non-nil, execution terminates and the error is returned to the caller of
-Execute.
-
-Variables
-
-A pipeline inside an action may initialize a variable to capture the result.
-The initialization has syntax
-
-	$variable := pipeline
-
-where $variable is the name of the variable. An action that declares a
-variable produces no output.
-
-If a "range" action initializes a variable, the variable is set to the
-successive elements of the iteration.  Also, a "range" may declare two
-variables, separated by a comma:
-
-	range $index, $element := pipeline
-
-in which case $index and $element are set to the successive values of the
-array/slice index or map key and element, respectively.  Note that if there is
-only one variable, it is assigned the element; this is opposite to the
-convention in Go range clauses.
-
-A variable's scope extends to the "end" action of the control structure ("if",
-"with", or "range") in which it is declared, or to the end of the template if
-there is no such control structure.  A template invocation does not inherit
-variables from the point of its invocation.
-
-When execution begins, $ is set to the data argument passed to Execute, that is,
-to the starting value of dot.
-
-Examples
-
-Here are some example one-line templates demonstrating pipelines and variables.
-All produce the quoted word "output":
-
-	{{"\"output\""}}
-		A string constant.
-	{{`"output"`}}
-		A raw string constant.
-	{{printf "%q" "output"}}
-		A function call.
-	{{"output" | printf "%q"}}
-		A function call whose final argument comes from the previous
-		command.
-	{{printf "%q" (print "out" "put")}}
-		A parenthesized argument.
-	{{"put" | printf "%s%s" "out" | printf "%q"}}
-		A more elaborate call.
-	{{"output" | printf "%s" | printf "%q"}}
-		A longer chain.
-	{{with "output"}}{{printf "%q" .}}{{end}}
-		A with action using dot.
-	{{with $x := "output" | printf "%q"}}{{$x}}{{end}}
-		A with action that creates and uses a variable.
-	{{with $x := "output"}}{{printf "%q" $x}}{{end}}
-		A with action that uses the variable in another action.
-	{{with $x := "output"}}{{$x | printf "%q"}}{{end}}
-		The same, but pipelined.
-
-Functions
-
-During execution functions are found in two function maps: first in the
-template, then in the global function map. By default, no functions are defined
-in the template but the Funcs method can be used to add them.
-
-Predefined global functions are named as follows.
-
-	and
-		Returns the boolean AND of its arguments by returning the
-		first empty argument or the last argument, that is,
-		"and x y" behaves as "if x then y else x". All the
-		arguments are evaluated.
-	call
-		Returns the result of calling the first argument, which
-		must be a function, with the remaining arguments as parameters.
-		Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where
-		Y is a func-valued field, map entry, or the like.
-		The first argument must be the result of an evaluation
-		that yields a value of function type (as distinct from
-		a predefined function such as print). The function must
-		return either one or two result values, the second of which
-		is of type error. If the arguments don't match the function
-		or the returned error value is non-nil, execution stops.
-	html
-		Returns the escaped HTML equivalent of the textual
-		representation of its arguments.
-	index
-		Returns the result of indexing its first argument by the
-		following arguments. Thus "index x 1 2 3" is, in Go syntax,
-		x[1][2][3]. Each indexed item must be a map, slice, or array.
-	js
-		Returns the escaped JavaScript equivalent of the textual
-		representation of its arguments.
-	len
-		Returns the integer length of its argument.
-	not
-		Returns the boolean negation of its single argument.
-	or
-		Returns the boolean OR of its arguments by returning the
-		first non-empty argument or the last argument, that is,
-		"or x y" behaves as "if x then x else y". All the
-		arguments are evaluated.
-	print
-		An alias for fmt.Sprint
-	printf
-		An alias for fmt.Sprintf
-	println
-		An alias for fmt.Sprintln
-	urlquery
-		Returns the escaped value of the textual representation of
-		its arguments in a form suitable for embedding in a URL query.
-
-The boolean functions take any zero value to be false and a non-zero
-value to be true.
-
-There is also a set of binary comparison operators defined as
-functions:
-
-	eq
-		Returns the boolean truth of arg1 == arg2
-	ne
-		Returns the boolean truth of arg1 != arg2
-	lt
-		Returns the boolean truth of arg1 < arg2
-	le
-		Returns the boolean truth of arg1 <= arg2
-	gt
-		Returns the boolean truth of arg1 > arg2
-	ge
-		Returns the boolean truth of arg1 >= arg2
-
-For simpler multi-way equality tests, eq (only) accepts two or more
-arguments and compares the second and subsequent to the first,
-returning in effect
-
-	arg1==arg2 || arg1==arg3 || arg1==arg4 ...
-
-(Unlike with || in Go, however, eq is a function call and all the
-arguments will be evaluated.)
-
-The comparison functions work on basic types only (or named basic
-types, such as "type Celsius float32"). They implement the Go rules
-for comparison of values, except that size and exact type are
-ignored, so any integer value, signed or unsigned, may be compared
-with any other integer value. (The arithmetic value is compared,
-not the bit pattern, so all negative integers are less than all
-unsigned integers.) However, as usual, one may not compare an int
-with a float32 and so on.
-
-Associated templates
-
-Each template is named by a string specified when it is created. Also, each
-template is associated with zero or more other templates that it may invoke by
-name; such associations are transitive and form a name space of templates.
-
-A template may use a template invocation to instantiate another associated
-template; see the explanation of the "template" action above. The name must be
-that of a template associated with the template that contains the invocation.
-
-Nested template definitions
-
-When parsing a template, another template may be defined and associated with the
-template being parsed. Template definitions must appear at the top level of the
-template, much like global variables in a Go program.
-
-The syntax of such definitions is to surround each template declaration with a
-"define" and "end" action.
-
-The define action names the template being created by providing a string
-constant. Here is a simple example:
-
-	`{{define "T1"}}ONE{{end}}
-	{{define "T2"}}TWO{{end}}
-	{{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}}
-	{{template "T3"}}`
-
-This defines two templates, T1 and T2, and a third T3 that invokes the other two
-when it is executed. Finally it invokes T3. If executed this template will
-produce the text
-
-	ONE TWO
-
-By construction, a template may reside in only one association. If it's
-necessary to have a template addressable from multiple associations, the
-template definition must be parsed multiple times to create distinct *Template
-values, or must be copied with the Clone or AddParseTree method.
-
-Parse may be called multiple times to assemble the various associated templates;
-see the ParseFiles and ParseGlob functions and methods for simple ways to parse
-related templates stored in files.
-
-A template may be executed directly or through ExecuteTemplate, which executes
-an associated template identified by name. To invoke our example above, we
-might write,
-
-	err := tmpl.Execute(os.Stdout, "no data needed")
-	if err != nil {
-		log.Fatalf("execution failed: %s", err)
-	}
-
-or to invoke a particular template explicitly by name,
-
-	err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed")
-	if err != nil {
-		log.Fatalf("execution failed: %s", err)
-	}
-
-*/
-package template
diff --git a/vendor/github.com/alecthomas/template/exec.go b/vendor/github.com/alecthomas/template/exec.go
deleted file mode 100644
index c3078e5d0c06..000000000000
--- a/vendor/github.com/alecthomas/template/exec.go
+++ /dev/null
@@ -1,845 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package template
-
-import (
-	"bytes"
-	"fmt"
-	"io"
-	"reflect"
-	"runtime"
-	"sort"
-	"strings"
-
-	"github.com/alecthomas/template/parse"
-)
-
-// state represents the state of an execution. It's not part of the
-// template so that multiple executions of the same template
-// can execute in parallel.
-type state struct {
-	tmpl *Template
-	wr   io.Writer
-	node parse.Node // current node, for errors
-	vars []variable // push-down stack of variable values.
-}
-
-// variable holds the dynamic value of a variable such as $, $x etc.
-type variable struct {
-	name  string
-	value reflect.Value
-}
-
-// push pushes a new variable on the stack.
-func (s *state) push(name string, value reflect.Value) {
-	s.vars = append(s.vars, variable{name, value})
-}
-
-// mark returns the length of the variable stack.
-func (s *state) mark() int {
-	return len(s.vars)
-}
-
-// pop pops the variable stack up to the mark.
-func (s *state) pop(mark int) {
-	s.vars = s.vars[0:mark]
-}
-
-// setVar overwrites the top-nth variable on the stack. Used by range iterations.
-func (s *state) setVar(n int, value reflect.Value) {
-	s.vars[len(s.vars)-n].value = value
-}
-
-// varValue returns the value of the named variable.
-func (s *state) varValue(name string) reflect.Value {
-	for i := s.mark() - 1; i >= 0; i-- {
-		if s.vars[i].name == name {
-			return s.vars[i].value
-		}
-	}
-	s.errorf("undefined variable: %s", name)
-	return zero
-}
-
-var zero reflect.Value
-
-// at marks the state to be on node n, for error reporting.
-func (s *state) at(node parse.Node) {
-	s.node = node
-}
-
-// doublePercent returns the string with %'s replaced by %%, if necessary,
-// so it can be used safely inside a Printf format string.
-func doublePercent(str string) string {
-	if strings.Contains(str, "%") {
-		str = strings.Replace(str, "%", "%%", -1)
-	}
-	return str
-}
-
-// errorf formats the error and terminates processing.
-func (s *state) errorf(format string, args ...interface{}) {
-	name := doublePercent(s.tmpl.Name())
-	if s.node == nil {
-		format = fmt.Sprintf("template: %s: %s", name, format)
-	} else {
-		location, context := s.tmpl.ErrorContext(s.node)
-		format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format)
-	}
-	panic(fmt.Errorf(format, args...))
-}
-
-// errRecover is the handler that turns panics into returns from the top
-// level of Parse.
-func errRecover(errp *error) {
-	e := recover()
-	if e != nil {
-		switch err := e.(type) {
-		case runtime.Error:
-			panic(e)
-		case error:
-			*errp = err
-		default:
-			panic(e)
-		}
-	}
-}
-
-// ExecuteTemplate applies the template associated with t that has the given name
-// to the specified data object and writes the output to wr.
-// If an error occurs executing the template or writing its output,
-// execution stops, but partial results may already have been written to
-// the output writer.
-// A template may be executed safely in parallel.
-func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
-	tmpl := t.tmpl[name]
-	if tmpl == nil {
-		return fmt.Errorf("template: no template %q associated with template %q", name, t.name)
-	}
-	return tmpl.Execute(wr, data)
-}
-
-// Execute applies a parsed template to the specified data object,
-// and writes the output to wr.
-// If an error occurs executing the template or writing its output,
-// execution stops, but partial results may already have been written to
-// the output writer.
-// A template may be executed safely in parallel.
-func (t *Template) Execute(wr io.Writer, data interface{}) (err error) {
-	defer errRecover(&err)
-	value := reflect.ValueOf(data)
-	state := &state{
-		tmpl: t,
-		wr:   wr,
-		vars: []variable{{"$", value}},
-	}
-	t.init()
-	if t.Tree == nil || t.Root == nil {
-		var b bytes.Buffer
-		for name, tmpl := range t.tmpl {
-			if tmpl.Tree == nil || tmpl.Root == nil {
-				continue
-			}
-			if b.Len() > 0 {
-				b.WriteString(", ")
-			}
-			fmt.Fprintf(&b, "%q", name)
-		}
-		var s string
-		if b.Len() > 0 {
-			s = "; defined templates are: " + b.String()
-		}
-		state.errorf("%q is an incomplete or empty template%s", t.Name(), s)
-	}
-	state.walk(value, t.Root)
-	return
-}
-
-// Walk functions step through the major pieces of the template structure,
-// generating output as they go.
-func (s *state) walk(dot reflect.Value, node parse.Node) {
-	s.at(node)
-	switch node := node.(type) {
-	case *parse.ActionNode:
-		// Do not pop variables so they persist until next end.
-		// Also, if the action declares variables, don't print the result.
-		val := s.evalPipeline(dot, node.Pipe)
-		if len(node.Pipe.Decl) == 0 {
-			s.printValue(node, val)
-		}
-	case *parse.IfNode:
-		s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList)
-	case *parse.ListNode:
-		for _, node := range node.Nodes {
-			s.walk(dot, node)
-		}
-	case *parse.RangeNode:
-		s.walkRange(dot, node)
-	case *parse.TemplateNode:
-		s.walkTemplate(dot, node)
-	case *parse.TextNode:
-		if _, err := s.wr.Write(node.Text); err != nil {
-			s.errorf("%s", err)
-		}
-	case *parse.WithNode:
-		s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList)
-	default:
-		s.errorf("unknown node: %s", node)
-	}
-}
-
-// walkIfOrWith walks an 'if' or 'with' node. The two control structures
-// are identical in behavior except that 'with' sets dot.
-func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) {
-	defer s.pop(s.mark())
-	val := s.evalPipeline(dot, pipe)
-	truth, ok := isTrue(val)
-	if !ok {
-		s.errorf("if/with can't use %v", val)
-	}
-	if truth {
-		if typ == parse.NodeWith {
-			s.walk(val, list)
-		} else {
-			s.walk(dot, list)
-		}
-	} else if elseList != nil {
-		s.walk(dot, elseList)
-	}
-}
-
-// isTrue reports whether the value is 'true', in the sense of not the zero of its type,
-// and whether the value has a meaningful truth value.
-func isTrue(val reflect.Value) (truth, ok bool) {
-	if !val.IsValid() {
-		// Something like var x interface{}, never set. It's a form of nil.
-		return false, true
-	}
-	switch val.Kind() {
-	case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
-		truth = val.Len() > 0
-	case reflect.Bool:
-		truth = val.Bool()
-	case reflect.Complex64, reflect.Complex128:
-		truth = val.Complex() != 0
-	case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface:
-		truth = !val.IsNil()
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		truth = val.Int() != 0
-	case reflect.Float32, reflect.Float64:
-		truth = val.Float() != 0
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-		truth = val.Uint() != 0
-	case reflect.Struct:
-		truth = true // Struct values are always true.
-	default:
-		return
-	}
-	return truth, true
-}
-
-func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) {
-	s.at(r)
-	defer s.pop(s.mark())
-	val, _ := indirect(s.evalPipeline(dot, r.Pipe))
-	// mark top of stack before any variables in the body are pushed.
-	mark := s.mark()
-	oneIteration := func(index, elem reflect.Value) {
-		// Set top var (lexically the second if there are two) to the element.
-		if len(r.Pipe.Decl) > 0 {
-			s.setVar(1, elem)
-		}
-		// Set next var (lexically the first if there are two) to the index.
-		if len(r.Pipe.Decl) > 1 {
-			s.setVar(2, index)
-		}
-		s.walk(elem, r.List)
-		s.pop(mark)
-	}
-	switch val.Kind() {
-	case reflect.Array, reflect.Slice:
-		if val.Len() == 0 {
-			break
-		}
-		for i := 0; i < val.Len(); i++ {
-			oneIteration(reflect.ValueOf(i), val.Index(i))
-		}
-		return
-	case reflect.Map:
-		if val.Len() == 0 {
-			break
-		}
-		for _, key := range sortKeys(val.MapKeys()) {
-			oneIteration(key, val.MapIndex(key))
-		}
-		return
-	case reflect.Chan:
-		if val.IsNil() {
-			break
-		}
-		i := 0
-		for ; ; i++ {
-			elem, ok := val.Recv()
-			if !ok {
-				break
-			}
-			oneIteration(reflect.ValueOf(i), elem)
-		}
-		if i == 0 {
-			break
-		}
-		return
-	case reflect.Invalid:
-		break // An invalid value is likely a nil map, etc. and acts like an empty map.
-	default:
-		s.errorf("range can't iterate over %v", val)
-	}
-	if r.ElseList != nil {
-		s.walk(dot, r.ElseList)
-	}
-}
-
-func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) {
-	s.at(t)
-	tmpl := s.tmpl.tmpl[t.Name]
-	if tmpl == nil {
-		s.errorf("template %q not defined", t.Name)
-	}
-	// Variables declared by the pipeline persist.
-	dot = s.evalPipeline(dot, t.Pipe)
-	newState := *s
-	newState.tmpl = tmpl
-	// No dynamic scoping: template invocations inherit no variables.
-	newState.vars = []variable{{"$", dot}}
-	newState.walk(dot, tmpl.Root)
-}
-
-// Eval functions evaluate pipelines, commands, and their elements and extract
-// values from the data structure by examining fields, calling methods, and so on.
-// The printing of those values happens only through walk functions.
-
-// evalPipeline returns the value acquired by evaluating a pipeline. If the
-// pipeline has a variable declaration, the variable will be pushed on the
-// stack. Callers should therefore pop the stack after they are finished
-// executing commands depending on the pipeline value.
-func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) {
-	if pipe == nil {
-		return
-	}
-	s.at(pipe)
-	for _, cmd := range pipe.Cmds {
-		value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg.
-		// If the object has type interface{}, dig down one level to the thing inside.
-		if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 {
-			value = reflect.ValueOf(value.Interface()) // lovely!
-		}
-	}
-	for _, variable := range pipe.Decl {
-		s.push(variable.Ident[0], value)
-	}
-	return value
-}
-
-func (s *state) notAFunction(args []parse.Node, final reflect.Value) {
-	if len(args) > 1 || final.IsValid() {
-		s.errorf("can't give argument to non-function %s", args[0])
-	}
-}
-
-func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value {
-	firstWord := cmd.Args[0]
-	switch n := firstWord.(type) {
-	case *parse.FieldNode:
-		return s.evalFieldNode(dot, n, cmd.Args, final)
-	case *parse.ChainNode:
-		return s.evalChainNode(dot, n, cmd.Args, final)
-	case *parse.IdentifierNode:
-		// Must be a function.
-		return s.evalFunction(dot, n, cmd, cmd.Args, final)
-	case *parse.PipeNode:
-		// Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored.
-		return s.evalPipeline(dot, n)
-	case *parse.VariableNode:
-		return s.evalVariableNode(dot, n, cmd.Args, final)
-	}
-	s.at(firstWord)
-	s.notAFunction(cmd.Args, final)
-	switch word := firstWord.(type) {
-	case *parse.BoolNode:
-		return reflect.ValueOf(word.True)
-	case *parse.DotNode:
-		return dot
-	case *parse.NilNode:
-		s.errorf("nil is not a command")
-	case *parse.NumberNode:
-		return s.idealConstant(word)
-	case *parse.StringNode:
-		return reflect.ValueOf(word.Text)
-	}
-	s.errorf("can't evaluate command %q", firstWord)
-	panic("not reached")
-}
-
-// idealConstant is called to return the value of a number in a context where
-// we don't know the type. In that case, the syntax of the number tells us
-// its type, and we use Go rules to resolve.  Note there is no such thing as
-// a uint ideal constant in this situation - the value must be of int type.
-func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value {
-	// These are ideal constants but we don't know the type
-	// and we have no context.  (If it was a method argument,
-	// we'd know what we need.) The syntax guides us to some extent.
-	s.at(constant)
-	switch {
-	case constant.IsComplex:
-		return reflect.ValueOf(constant.Complex128) // incontrovertible.
-	case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0:
-		return reflect.ValueOf(constant.Float64)
-	case constant.IsInt:
-		n := int(constant.Int64)
-		if int64(n) != constant.Int64 {
-			s.errorf("%s overflows int", constant.Text)
-		}
-		return reflect.ValueOf(n)
-	case constant.IsUint:
-		s.errorf("%s overflows int", constant.Text)
-	}
-	return zero
-}
-
-func isHexConstant(s string) bool {
-	return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X')
-}
-
-func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value {
-	s.at(field)
-	return s.evalFieldChain(dot, dot, field, field.Ident, args, final)
-}
-
-func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value {
-	s.at(chain)
-	// (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields.
-	pipe := s.evalArg(dot, nil, chain.Node)
-	if len(chain.Field) == 0 {
-		s.errorf("internal error: no fields in evalChainNode")
-	}
-	return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final)
-}
-
-func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value {
-	// $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields.
-	s.at(variable)
-	value := s.varValue(variable.Ident[0])
-	if len(variable.Ident) == 1 {
-		s.notAFunction(args, final)
-		return value
-	}
-	return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final)
-}
-
-// evalFieldChain evaluates .X.Y.Z possibly followed by arguments.
-// dot is the environment in which to evaluate arguments, while
-// receiver is the value being walked along the chain.
-func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value {
-	n := len(ident)
-	for i := 0; i < n-1; i++ {
-		receiver = s.evalField(dot, ident[i], node, nil, zero, receiver)
-	}
-	// Now if it's a method, it gets the arguments.
-	return s.evalField(dot, ident[n-1], node, args, final, receiver)
-}
-
-func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value {
-	s.at(node)
-	name := node.Ident
-	function, ok := findFunction(name, s.tmpl)
-	if !ok {
-		s.errorf("%q is not a defined function", name)
-	}
-	return s.evalCall(dot, function, cmd, name, args, final)
-}
-
-// evalField evaluates an expression like (.Field) or (.Field arg1 arg2).
-// The 'final' argument represents the return value from the preceding
-// value of the pipeline, if any.
-func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value {
-	if !receiver.IsValid() {
-		return zero
-	}
-	typ := receiver.Type()
-	receiver, _ = indirect(receiver)
-	// Unless it's an interface, need to get to a value of type *T to guarantee
-	// we see all methods of T and *T.
-	ptr := receiver
-	if ptr.Kind() != reflect.Interface && ptr.CanAddr() {
-		ptr = ptr.Addr()
-	}
-	if method := ptr.MethodByName(fieldName); method.IsValid() {
-		return s.evalCall(dot, method, node, fieldName, args, final)
-	}
-	hasArgs := len(args) > 1 || final.IsValid()
-	// It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil.
-	receiver, isNil := indirect(receiver)
-	if isNil {
-		s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
-	}
-	switch receiver.Kind() {
-	case reflect.Struct:
-		tField, ok := receiver.Type().FieldByName(fieldName)
-		if ok {
-			field := receiver.FieldByIndex(tField.Index)
-			if tField.PkgPath != "" { // field is unexported
-				s.errorf("%s is an unexported field of struct type %s", fieldName, typ)
-			}
-			// If it's a function, we must call it.
-			if hasArgs {
-				s.errorf("%s has arguments but cannot be invoked as function", fieldName)
-			}
-			return field
-		}
-		s.errorf("%s is not a field of struct type %s", fieldName, typ)
-	case reflect.Map:
-		// If it's a map, attempt to use the field name as a key.
-		nameVal := reflect.ValueOf(fieldName)
-		if nameVal.Type().AssignableTo(receiver.Type().Key()) {
-			if hasArgs {
-				s.errorf("%s is not a method but has arguments", fieldName)
-			}
-			return receiver.MapIndex(nameVal)
-		}
-	}
-	s.errorf("can't evaluate field %s in type %s", fieldName, typ)
-	panic("not reached")
-}
-
-var (
-	errorType       = reflect.TypeOf((*error)(nil)).Elem()
-	fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
-)
-
-// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so
-// it looks just like a function call.  The arg list, if non-nil, includes (in the manner of the shell), arg[0]
-// as the function itself.
-func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value {
-	if args != nil {
-		args = args[1:] // Zeroth arg is function name/node; not passed to function.
-	}
-	typ := fun.Type()
-	numIn := len(args)
-	if final.IsValid() {
-		numIn++
-	}
-	numFixed := len(args)
-	if typ.IsVariadic() {
-		numFixed = typ.NumIn() - 1 // last arg is the variadic one.
-		if numIn < numFixed {
-			s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args))
-		}
-	} else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() {
-		s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args))
-	}
-	if !goodFunc(typ) {
-		// TODO: This could still be a confusing error; maybe goodFunc should provide info.
-		s.errorf("can't call method/function %q with %d results", name, typ.NumOut())
-	}
-	// Build the arg list.
-	argv := make([]reflect.Value, numIn)
-	// Args must be evaluated. Fixed args first.
-	i := 0
-	for ; i < numFixed && i < len(args); i++ {
-		argv[i] = s.evalArg(dot, typ.In(i), args[i])
-	}
-	// Now the ... args.
-	if typ.IsVariadic() {
-		argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice.
-		for ; i < len(args); i++ {
-			argv[i] = s.evalArg(dot, argType, args[i])
-		}
-	}
-	// Add final value if necessary.
-	if final.IsValid() {
-		t := typ.In(typ.NumIn() - 1)
-		if typ.IsVariadic() {
-			t = t.Elem()
-		}
-		argv[i] = s.validateType(final, t)
-	}
-	result := fun.Call(argv)
-	// If we have an error that is not nil, stop execution and return that error to the caller.
-	if len(result) == 2 && !result[1].IsNil() {
-		s.at(node)
-		s.errorf("error calling %s: %s", name, result[1].Interface().(error))
-	}
-	return result[0]
-}
-
-// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero.
-func canBeNil(typ reflect.Type) bool {
-	switch typ.Kind() {
-	case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
-		return true
-	}
-	return false
-}
-
-// validateType guarantees that the value is valid and assignable to the type.
-func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value {
-	if !value.IsValid() {
-		if typ == nil || canBeNil(typ) {
-			// An untyped nil interface{}. Accept as a proper nil value.
-			return reflect.Zero(typ)
-		}
-		s.errorf("invalid value; expected %s", typ)
-	}
-	if typ != nil && !value.Type().AssignableTo(typ) {
-		if value.Kind() == reflect.Interface && !value.IsNil() {
-			value = value.Elem()
-			if value.Type().AssignableTo(typ) {
-				return value
-			}
-			// fallthrough
-		}
-		// Does one dereference or indirection work? We could do more, as we
-		// do with method receivers, but that gets messy and method receivers
-		// are much more constrained, so it makes more sense there than here.
-		// Besides, one is almost always all you need.
-		switch {
-		case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ):
-			value = value.Elem()
-			if !value.IsValid() {
-				s.errorf("dereference of nil pointer of type %s", typ)
-			}
-		case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr():
-			value = value.Addr()
-		default:
-			s.errorf("wrong type for value; expected %s; got %s", typ, value.Type())
-		}
-	}
-	return value
-}
-
-func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	switch arg := n.(type) {
-	case *parse.DotNode:
-		return s.validateType(dot, typ)
-	case *parse.NilNode:
-		if canBeNil(typ) {
-			return reflect.Zero(typ)
-		}
-		s.errorf("cannot assign nil to %s", typ)
-	case *parse.FieldNode:
-		return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ)
-	case *parse.VariableNode:
-		return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ)
-	case *parse.PipeNode:
-		return s.validateType(s.evalPipeline(dot, arg), typ)
-	case *parse.IdentifierNode:
-		return s.evalFunction(dot, arg, arg, nil, zero)
-	case *parse.ChainNode:
-		return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ)
-	}
-	switch typ.Kind() {
-	case reflect.Bool:
-		return s.evalBool(typ, n)
-	case reflect.Complex64, reflect.Complex128:
-		return s.evalComplex(typ, n)
-	case reflect.Float32, reflect.Float64:
-		return s.evalFloat(typ, n)
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		return s.evalInteger(typ, n)
-	case reflect.Interface:
-		if typ.NumMethod() == 0 {
-			return s.evalEmptyInterface(dot, n)
-		}
-	case reflect.String:
-		return s.evalString(typ, n)
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-		return s.evalUnsignedInteger(typ, n)
-	}
-	s.errorf("can't handle %s for arg of type %s", n, typ)
-	panic("not reached")
-}
-
-func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	if n, ok := n.(*parse.BoolNode); ok {
-		value := reflect.New(typ).Elem()
-		value.SetBool(n.True)
-		return value
-	}
-	s.errorf("expected bool; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	if n, ok := n.(*parse.StringNode); ok {
-		value := reflect.New(typ).Elem()
-		value.SetString(n.Text)
-		return value
-	}
-	s.errorf("expected string; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	if n, ok := n.(*parse.NumberNode); ok && n.IsInt {
-		value := reflect.New(typ).Elem()
-		value.SetInt(n.Int64)
-		return value
-	}
-	s.errorf("expected integer; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	if n, ok := n.(*parse.NumberNode); ok && n.IsUint {
-		value := reflect.New(typ).Elem()
-		value.SetUint(n.Uint64)
-		return value
-	}
-	s.errorf("expected unsigned integer; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value {
-	s.at(n)
-	if n, ok := n.(*parse.NumberNode); ok && n.IsFloat {
-		value := reflect.New(typ).Elem()
-		value.SetFloat(n.Float64)
-		return value
-	}
-	s.errorf("expected float; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value {
-	if n, ok := n.(*parse.NumberNode); ok && n.IsComplex {
-		value := reflect.New(typ).Elem()
-		value.SetComplex(n.Complex128)
-		return value
-	}
-	s.errorf("expected complex; found %s", n)
-	panic("not reached")
-}
-
-func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value {
-	s.at(n)
-	switch n := n.(type) {
-	case *parse.BoolNode:
-		return reflect.ValueOf(n.True)
-	case *parse.DotNode:
-		return dot
-	case *parse.FieldNode:
-		return s.evalFieldNode(dot, n, nil, zero)
-	case *parse.IdentifierNode:
-		return s.evalFunction(dot, n, n, nil, zero)
-	case *parse.NilNode:
-		// NilNode is handled in evalArg, the only place that calls here.
-		s.errorf("evalEmptyInterface: nil (can't happen)")
-	case *parse.NumberNode:
-		return s.idealConstant(n)
-	case *parse.StringNode:
-		return reflect.ValueOf(n.Text)
-	case *parse.VariableNode:
-		return s.evalVariableNode(dot, n, nil, zero)
-	case *parse.PipeNode:
-		return s.evalPipeline(dot, n)
-	}
-	s.errorf("can't handle assignment of %s to empty interface argument", n)
-	panic("not reached")
-}
-
-// indirect returns the item at the end of indirection, and a bool to indicate if it's nil.
-// We indirect through pointers and empty interfaces (only) because
-// non-empty interfaces have methods we might need.
-func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
-	for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
-		if v.IsNil() {
-			return v, true
-		}
-		if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
-			break
-		}
-	}
-	return v, false
-}
-
-// printValue writes the textual representation of the value to the output of
-// the template.
-func (s *state) printValue(n parse.Node, v reflect.Value) {
-	s.at(n)
-	iface, ok := printableValue(v)
-	if !ok {
-		s.errorf("can't print %s of type %s", n, v.Type())
-	}
-	fmt.Fprint(s.wr, iface)
-}
-
-// printableValue returns the, possibly indirected, interface value inside v that
-// is best for a call to formatted printer.
-func printableValue(v reflect.Value) (interface{}, bool) {
-	if v.Kind() == reflect.Ptr {
-		v, _ = indirect(v) // fmt.Fprint handles nil.
-	}
-	if !v.IsValid() {
-		return "", true
-	}
-
-	if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) {
-		if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) {
-			v = v.Addr()
-		} else {
-			switch v.Kind() {
-			case reflect.Chan, reflect.Func:
-				return nil, false
-			}
-		}
-	}
-	return v.Interface(), true
-}
-
-// Types to help sort the keys in a map for reproducible output.
-
-type rvs []reflect.Value
-
-func (x rvs) Len() int      { return len(x) }
-func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
-
-type rvInts struct{ rvs }
-
-func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() }
-
-type rvUints struct{ rvs }
-
-func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() }
-
-type rvFloats struct{ rvs }
-
-func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() }
-
-type rvStrings struct{ rvs }
-
-func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() }
-
-// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys.
-func sortKeys(v []reflect.Value) []reflect.Value {
-	if len(v) <= 1 {
-		return v
-	}
-	switch v[0].Kind() {
-	case reflect.Float32, reflect.Float64:
-		sort.Sort(rvFloats{v})
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		sort.Sort(rvInts{v})
-	case reflect.String:
-		sort.Sort(rvStrings{v})
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-		sort.Sort(rvUints{v})
-	}
-	return v
-}
diff --git a/vendor/github.com/alecthomas/template/funcs.go b/vendor/github.com/alecthomas/template/funcs.go
deleted file mode 100644
index 39ee5ed68fbc..000000000000
--- a/vendor/github.com/alecthomas/template/funcs.go
+++ /dev/null
@@ -1,598 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package template
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"io"
-	"net/url"
-	"reflect"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-)
-
-// FuncMap is the type of the map defining the mapping from names to functions.
-// Each function must have either a single return value, or two return values of
-// which the second has type error. In that case, if the second (error)
-// return value evaluates to non-nil during execution, execution terminates and
-// Execute returns that error.
-type FuncMap map[string]interface{}
-
-var builtins = FuncMap{
-	"and":      and,
-	"call":     call,
-	"html":     HTMLEscaper,
-	"index":    index,
-	"js":       JSEscaper,
-	"len":      length,
-	"not":      not,
-	"or":       or,
-	"print":    fmt.Sprint,
-	"printf":   fmt.Sprintf,
-	"println":  fmt.Sprintln,
-	"urlquery": URLQueryEscaper,
-
-	// Comparisons
-	"eq": eq, // ==
-	"ge": ge, // >=
-	"gt": gt, // >
-	"le": le, // <=
-	"lt": lt, // <
-	"ne": ne, // !=
-}
-
-var builtinFuncs = createValueFuncs(builtins)
-
-// createValueFuncs turns a FuncMap into a map[string]reflect.Value
-func createValueFuncs(funcMap FuncMap) map[string]reflect.Value {
-	m := make(map[string]reflect.Value)
-	addValueFuncs(m, funcMap)
-	return m
-}
-
-// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values.
-func addValueFuncs(out map[string]reflect.Value, in FuncMap) {
-	for name, fn := range in {
-		v := reflect.ValueOf(fn)
-		if v.Kind() != reflect.Func {
-			panic("value for " + name + " not a function")
-		}
-		if !goodFunc(v.Type()) {
-			panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut()))
-		}
-		out[name] = v
-	}
-}
-
-// addFuncs adds to values the functions in funcs. It does no checking of the input -
-// call addValueFuncs first.
-func addFuncs(out, in FuncMap) {
-	for name, fn := range in {
-		out[name] = fn
-	}
-}
-
-// goodFunc checks that the function or method has the right result signature.
-func goodFunc(typ reflect.Type) bool {
-	// We allow functions with 1 result or 2 results where the second is an error.
-	switch {
-	case typ.NumOut() == 1:
-		return true
-	case typ.NumOut() == 2 && typ.Out(1) == errorType:
-		return true
-	}
-	return false
-}
-
-// findFunction looks for a function in the template, and global map.
-func findFunction(name string, tmpl *Template) (reflect.Value, bool) {
-	if tmpl != nil && tmpl.common != nil {
-		if fn := tmpl.execFuncs[name]; fn.IsValid() {
-			return fn, true
-		}
-	}
-	if fn := builtinFuncs[name]; fn.IsValid() {
-		return fn, true
-	}
-	return reflect.Value{}, false
-}
-
-// Indexing.
-
-// index returns the result of indexing its first argument by the following
-// arguments.  Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each
-// indexed item must be a map, slice, or array.
-func index(item interface{}, indices ...interface{}) (interface{}, error) {
-	v := reflect.ValueOf(item)
-	for _, i := range indices {
-		index := reflect.ValueOf(i)
-		var isNil bool
-		if v, isNil = indirect(v); isNil {
-			return nil, fmt.Errorf("index of nil pointer")
-		}
-		switch v.Kind() {
-		case reflect.Array, reflect.Slice, reflect.String:
-			var x int64
-			switch index.Kind() {
-			case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-				x = index.Int()
-			case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-				x = int64(index.Uint())
-			default:
-				return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type())
-			}
-			if x < 0 || x >= int64(v.Len()) {
-				return nil, fmt.Errorf("index out of range: %d", x)
-			}
-			v = v.Index(int(x))
-		case reflect.Map:
-			if !index.IsValid() {
-				index = reflect.Zero(v.Type().Key())
-			}
-			if !index.Type().AssignableTo(v.Type().Key()) {
-				return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type())
-			}
-			if x := v.MapIndex(index); x.IsValid() {
-				v = x
-			} else {
-				v = reflect.Zero(v.Type().Elem())
-			}
-		default:
-			return nil, fmt.Errorf("can't index item of type %s", v.Type())
-		}
-	}
-	return v.Interface(), nil
-}
-
-// Length
-
-// length returns the length of the item, with an error if it has no defined length.
-func length(item interface{}) (int, error) {
-	v, isNil := indirect(reflect.ValueOf(item))
-	if isNil {
-		return 0, fmt.Errorf("len of nil pointer")
-	}
-	switch v.Kind() {
-	case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
-		return v.Len(), nil
-	}
-	return 0, fmt.Errorf("len of type %s", v.Type())
-}
-
-// Function invocation
-
-// call returns the result of evaluating the first argument as a function.
-// The function must return 1 result, or 2 results, the second of which is an error.
-func call(fn interface{}, args ...interface{}) (interface{}, error) {
-	v := reflect.ValueOf(fn)
-	typ := v.Type()
-	if typ.Kind() != reflect.Func {
-		return nil, fmt.Errorf("non-function of type %s", typ)
-	}
-	if !goodFunc(typ) {
-		return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut())
-	}
-	numIn := typ.NumIn()
-	var dddType reflect.Type
-	if typ.IsVariadic() {
-		if len(args) < numIn-1 {
-			return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1)
-		}
-		dddType = typ.In(numIn - 1).Elem()
-	} else {
-		if len(args) != numIn {
-			return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn)
-		}
-	}
-	argv := make([]reflect.Value, len(args))
-	for i, arg := range args {
-		value := reflect.ValueOf(arg)
-		// Compute the expected type. Clumsy because of variadics.
-		var argType reflect.Type
-		if !typ.IsVariadic() || i < numIn-1 {
-			argType = typ.In(i)
-		} else {
-			argType = dddType
-		}
-		if !value.IsValid() && canBeNil(argType) {
-			value = reflect.Zero(argType)
-		}
-		if !value.Type().AssignableTo(argType) {
-			return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType)
-		}
-		argv[i] = value
-	}
-	result := v.Call(argv)
-	if len(result) == 2 && !result[1].IsNil() {
-		return result[0].Interface(), result[1].Interface().(error)
-	}
-	return result[0].Interface(), nil
-}
-
-// Boolean logic.
-
-func truth(a interface{}) bool {
-	t, _ := isTrue(reflect.ValueOf(a))
-	return t
-}
-
-// and computes the Boolean AND of its arguments, returning
-// the first false argument it encounters, or the last argument.
-func and(arg0 interface{}, args ...interface{}) interface{} {
-	if !truth(arg0) {
-		return arg0
-	}
-	for i := range args {
-		arg0 = args[i]
-		if !truth(arg0) {
-			break
-		}
-	}
-	return arg0
-}
-
-// or computes the Boolean OR of its arguments, returning
-// the first true argument it encounters, or the last argument.
-func or(arg0 interface{}, args ...interface{}) interface{} {
-	if truth(arg0) {
-		return arg0
-	}
-	for i := range args {
-		arg0 = args[i]
-		if truth(arg0) {
-			break
-		}
-	}
-	return arg0
-}
-
-// not returns the Boolean negation of its argument.
-func not(arg interface{}) (truth bool) {
-	truth, _ = isTrue(reflect.ValueOf(arg))
-	return !truth
-}
-
-// Comparison.
-
-// TODO: Perhaps allow comparison between signed and unsigned integers.
-
-var (
-	errBadComparisonType = errors.New("invalid type for comparison")
-	errBadComparison     = errors.New("incompatible types for comparison")
-	errNoComparison      = errors.New("missing argument for comparison")
-)
-
-type kind int
-
-const (
-	invalidKind kind = iota
-	boolKind
-	complexKind
-	intKind
-	floatKind
-	integerKind
-	stringKind
-	uintKind
-)
-
-func basicKind(v reflect.Value) (kind, error) {
-	switch v.Kind() {
-	case reflect.Bool:
-		return boolKind, nil
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
-		return intKind, nil
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
-		return uintKind, nil
-	case reflect.Float32, reflect.Float64:
-		return floatKind, nil
-	case reflect.Complex64, reflect.Complex128:
-		return complexKind, nil
-	case reflect.String:
-		return stringKind, nil
-	}
-	return invalidKind, errBadComparisonType
-}
-
-// eq evaluates the comparison a == b || a == c || ...
-func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) {
-	v1 := reflect.ValueOf(arg1)
-	k1, err := basicKind(v1)
-	if err != nil {
-		return false, err
-	}
-	if len(arg2) == 0 {
-		return false, errNoComparison
-	}
-	for _, arg := range arg2 {
-		v2 := reflect.ValueOf(arg)
-		k2, err := basicKind(v2)
-		if err != nil {
-			return false, err
-		}
-		truth := false
-		if k1 != k2 {
-			// Special case: Can compare integer values regardless of type's sign.
-			switch {
-			case k1 == intKind && k2 == uintKind:
-				truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint()
-			case k1 == uintKind && k2 == intKind:
-				truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int())
-			default:
-				return false, errBadComparison
-			}
-		} else {
-			switch k1 {
-			case boolKind:
-				truth = v1.Bool() == v2.Bool()
-			case complexKind:
-				truth = v1.Complex() == v2.Complex()
-			case floatKind:
-				truth = v1.Float() == v2.Float()
-			case intKind:
-				truth = v1.Int() == v2.Int()
-			case stringKind:
-				truth = v1.String() == v2.String()
-			case uintKind:
-				truth = v1.Uint() == v2.Uint()
-			default:
-				panic("invalid kind")
-			}
-		}
-		if truth {
-			return true, nil
-		}
-	}
-	return false, nil
-}
-
-// ne evaluates the comparison a != b.
-func ne(arg1, arg2 interface{}) (bool, error) {
-	// != is the inverse of ==.
-	equal, err := eq(arg1, arg2)
-	return !equal, err
-}
-
-// lt evaluates the comparison a < b.
-func lt(arg1, arg2 interface{}) (bool, error) {
-	v1 := reflect.ValueOf(arg1)
-	k1, err := basicKind(v1)
-	if err != nil {
-		return false, err
-	}
-	v2 := reflect.ValueOf(arg2)
-	k2, err := basicKind(v2)
-	if err != nil {
-		return false, err
-	}
-	truth := false
-	if k1 != k2 {
-		// Special case: Can compare integer values regardless of type's sign.
-		switch {
-		case k1 == intKind && k2 == uintKind:
-			truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint()
-		case k1 == uintKind && k2 == intKind:
-			truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int())
-		default:
-			return false, errBadComparison
-		}
-	} else {
-		switch k1 {
-		case boolKind, complexKind:
-			return false, errBadComparisonType
-		case floatKind:
-			truth = v1.Float() < v2.Float()
-		case intKind:
-			truth = v1.Int() < v2.Int()
-		case stringKind:
-			truth = v1.String() < v2.String()
-		case uintKind:
-			truth = v1.Uint() < v2.Uint()
-		default:
-			panic("invalid kind")
-		}
-	}
-	return truth, nil
-}
-
-// le evaluates the comparison <= b.
-func le(arg1, arg2 interface{}) (bool, error) {
-	// <= is < or ==.
-	lessThan, err := lt(arg1, arg2)
-	if lessThan || err != nil {
-		return lessThan, err
-	}
-	return eq(arg1, arg2)
-}
-
-// gt evaluates the comparison a > b.
-func gt(arg1, arg2 interface{}) (bool, error) {
-	// > is the inverse of <=.
-	lessOrEqual, err := le(arg1, arg2)
-	if err != nil {
-		return false, err
-	}
-	return !lessOrEqual, nil
-}
-
-// ge evaluates the comparison a >= b.
-func ge(arg1, arg2 interface{}) (bool, error) {
-	// >= is the inverse of <.
-	lessThan, err := lt(arg1, arg2)
-	if err != nil {
-		return false, err
-	}
-	return !lessThan, nil
-}
-
-// HTML escaping.
-
-var (
-	htmlQuot = []byte(""") // shorter than """
-	htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5
-	htmlAmp  = []byte("&")
-	htmlLt   = []byte("<")
-	htmlGt   = []byte(">")
-)
-
-// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b.
-func HTMLEscape(w io.Writer, b []byte) {
-	last := 0
-	for i, c := range b {
-		var html []byte
-		switch c {
-		case '"':
-			html = htmlQuot
-		case '\'':
-			html = htmlApos
-		case '&':
-			html = htmlAmp
-		case '<':
-			html = htmlLt
-		case '>':
-			html = htmlGt
-		default:
-			continue
-		}
-		w.Write(b[last:i])
-		w.Write(html)
-		last = i + 1
-	}
-	w.Write(b[last:])
-}
-
-// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s.
-func HTMLEscapeString(s string) string {
-	// Avoid allocation if we can.
-	if strings.IndexAny(s, `'"&<>`) < 0 {
-		return s
-	}
-	var b bytes.Buffer
-	HTMLEscape(&b, []byte(s))
-	return b.String()
-}
-
-// HTMLEscaper returns the escaped HTML equivalent of the textual
-// representation of its arguments.
-func HTMLEscaper(args ...interface{}) string {
-	return HTMLEscapeString(evalArgs(args))
-}
-
-// JavaScript escaping.
-
-var (
-	jsLowUni = []byte(`\u00`)
-	hex      = []byte("0123456789ABCDEF")
-
-	jsBackslash = []byte(`\\`)
-	jsApos      = []byte(`\'`)
-	jsQuot      = []byte(`\"`)
-	jsLt        = []byte(`\x3C`)
-	jsGt        = []byte(`\x3E`)
-)
-
-// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b.
-func JSEscape(w io.Writer, b []byte) {
-	last := 0
-	for i := 0; i < len(b); i++ {
-		c := b[i]
-
-		if !jsIsSpecial(rune(c)) {
-			// fast path: nothing to do
-			continue
-		}
-		w.Write(b[last:i])
-
-		if c < utf8.RuneSelf {
-			// Quotes, slashes and angle brackets get quoted.
-			// Control characters get written as \u00XX.
-			switch c {
-			case '\\':
-				w.Write(jsBackslash)
-			case '\'':
-				w.Write(jsApos)
-			case '"':
-				w.Write(jsQuot)
-			case '<':
-				w.Write(jsLt)
-			case '>':
-				w.Write(jsGt)
-			default:
-				w.Write(jsLowUni)
-				t, b := c>>4, c&0x0f
-				w.Write(hex[t : t+1])
-				w.Write(hex[b : b+1])
-			}
-		} else {
-			// Unicode rune.
-			r, size := utf8.DecodeRune(b[i:])
-			if unicode.IsPrint(r) {
-				w.Write(b[i : i+size])
-			} else {
-				fmt.Fprintf(w, "\\u%04X", r)
-			}
-			i += size - 1
-		}
-		last = i + 1
-	}
-	w.Write(b[last:])
-}
-
-// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s.
-func JSEscapeString(s string) string {
-	// Avoid allocation if we can.
-	if strings.IndexFunc(s, jsIsSpecial) < 0 {
-		return s
-	}
-	var b bytes.Buffer
-	JSEscape(&b, []byte(s))
-	return b.String()
-}
-
-func jsIsSpecial(r rune) bool {
-	switch r {
-	case '\\', '\'', '"', '<', '>':
-		return true
-	}
-	return r < ' ' || utf8.RuneSelf <= r
-}
-
-// JSEscaper returns the escaped JavaScript equivalent of the textual
-// representation of its arguments.
-func JSEscaper(args ...interface{}) string {
-	return JSEscapeString(evalArgs(args))
-}
-
-// URLQueryEscaper returns the escaped value of the textual representation of
-// its arguments in a form suitable for embedding in a URL query.
-func URLQueryEscaper(args ...interface{}) string {
-	return url.QueryEscape(evalArgs(args))
-}
-
-// evalArgs formats the list of arguments into a string. It is therefore equivalent to
-//	fmt.Sprint(args...)
-// except that each argument is indirected (if a pointer), as required,
-// using the same rules as the default string evaluation during template
-// execution.
-func evalArgs(args []interface{}) string {
-	ok := false
-	var s string
-	// Fast path for simple common case.
-	if len(args) == 1 {
-		s, ok = args[0].(string)
-	}
-	if !ok {
-		for i, arg := range args {
-			a, ok := printableValue(reflect.ValueOf(arg))
-			if ok {
-				args[i] = a
-			} // else left fmt do its thing
-		}
-		s = fmt.Sprint(args...)
-	}
-	return s
-}
diff --git a/vendor/github.com/alecthomas/template/helper.go b/vendor/github.com/alecthomas/template/helper.go
deleted file mode 100644
index 3636fb54d697..000000000000
--- a/vendor/github.com/alecthomas/template/helper.go
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Helper functions to make constructing templates easier.
-
-package template
-
-import (
-	"fmt"
-	"io/ioutil"
-	"path/filepath"
-)
-
-// Functions and methods to parse templates.
-
-// Must is a helper that wraps a call to a function returning (*Template, error)
-// and panics if the error is non-nil. It is intended for use in variable
-// initializations such as
-//	var t = template.Must(template.New("name").Parse("text"))
-func Must(t *Template, err error) *Template {
-	if err != nil {
-		panic(err)
-	}
-	return t
-}
-
-// ParseFiles creates a new Template and parses the template definitions from
-// the named files. The returned template's name will have the (base) name and
-// (parsed) contents of the first file. There must be at least one file.
-// If an error occurs, parsing stops and the returned *Template is nil.
-func ParseFiles(filenames ...string) (*Template, error) {
-	return parseFiles(nil, filenames...)
-}
-
-// ParseFiles parses the named files and associates the resulting templates with
-// t. If an error occurs, parsing stops and the returned template is nil;
-// otherwise it is t. There must be at least one file.
-func (t *Template) ParseFiles(filenames ...string) (*Template, error) {
-	return parseFiles(t, filenames...)
-}
-
-// parseFiles is the helper for the method and function. If the argument
-// template is nil, it is created from the first file.
-func parseFiles(t *Template, filenames ...string) (*Template, error) {
-	if len(filenames) == 0 {
-		// Not really a problem, but be consistent.
-		return nil, fmt.Errorf("template: no files named in call to ParseFiles")
-	}
-	for _, filename := range filenames {
-		b, err := ioutil.ReadFile(filename)
-		if err != nil {
-			return nil, err
-		}
-		s := string(b)
-		name := filepath.Base(filename)
-		// First template becomes return value if not already defined,
-		// and we use that one for subsequent New calls to associate
-		// all the templates together. Also, if this file has the same name
-		// as t, this file becomes the contents of t, so
-		//  t, err := New(name).Funcs(xxx).ParseFiles(name)
-		// works. Otherwise we create a new template associated with t.
-		var tmpl *Template
-		if t == nil {
-			t = New(name)
-		}
-		if name == t.Name() {
-			tmpl = t
-		} else {
-			tmpl = t.New(name)
-		}
-		_, err = tmpl.Parse(s)
-		if err != nil {
-			return nil, err
-		}
-	}
-	return t, nil
-}
-
-// ParseGlob creates a new Template and parses the template definitions from the
-// files identified by the pattern, which must match at least one file. The
-// returned template will have the (base) name and (parsed) contents of the
-// first file matched by the pattern. ParseGlob is equivalent to calling
-// ParseFiles with the list of files matched by the pattern.
-func ParseGlob(pattern string) (*Template, error) {
-	return parseGlob(nil, pattern)
-}
-
-// ParseGlob parses the template definitions in the files identified by the
-// pattern and associates the resulting templates with t. The pattern is
-// processed by filepath.Glob and must match at least one file. ParseGlob is
-// equivalent to calling t.ParseFiles with the list of files matched by the
-// pattern.
-func (t *Template) ParseGlob(pattern string) (*Template, error) {
-	return parseGlob(t, pattern)
-}
-
-// parseGlob is the implementation of the function and method ParseGlob.
-func parseGlob(t *Template, pattern string) (*Template, error) {
-	filenames, err := filepath.Glob(pattern)
-	if err != nil {
-		return nil, err
-	}
-	if len(filenames) == 0 {
-		return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
-	}
-	return parseFiles(t, filenames...)
-}
diff --git a/vendor/github.com/alecthomas/template/parse/lex.go b/vendor/github.com/alecthomas/template/parse/lex.go
deleted file mode 100644
index 55f1c051e862..000000000000
--- a/vendor/github.com/alecthomas/template/parse/lex.go
+++ /dev/null
@@ -1,556 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package parse
-
-import (
-	"fmt"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-)
-
-// item represents a token or text string returned from the scanner.
-type item struct {
-	typ itemType // The type of this item.
-	pos Pos      // The starting position, in bytes, of this item in the input string.
-	val string   // The value of this item.
-}
-
-func (i item) String() string {
-	switch {
-	case i.typ == itemEOF:
-		return "EOF"
-	case i.typ == itemError:
-		return i.val
-	case i.typ > itemKeyword:
-		return fmt.Sprintf("<%s>", i.val)
-	case len(i.val) > 10:
-		return fmt.Sprintf("%.10q...", i.val)
-	}
-	return fmt.Sprintf("%q", i.val)
-}
-
-// itemType identifies the type of lex items.
-type itemType int
-
-const (
-	itemError        itemType = iota // error occurred; value is text of error
-	itemBool                         // boolean constant
-	itemChar                         // printable ASCII character; grab bag for comma etc.
-	itemCharConstant                 // character constant
-	itemComplex                      // complex constant (1+2i); imaginary is just a number
-	itemColonEquals                  // colon-equals (':=') introducing a declaration
-	itemEOF
-	itemField        // alphanumeric identifier starting with '.'
-	itemIdentifier   // alphanumeric identifier not starting with '.'
-	itemLeftDelim    // left action delimiter
-	itemLeftParen    // '(' inside action
-	itemNumber       // simple number, including imaginary
-	itemPipe         // pipe symbol
-	itemRawString    // raw quoted string (includes quotes)
-	itemRightDelim   // right action delimiter
-	itemElideNewline // elide newline after right delim
-	itemRightParen   // ')' inside action
-	itemSpace        // run of spaces separating arguments
-	itemString       // quoted string (includes quotes)
-	itemText         // plain text
-	itemVariable     // variable starting with '$', such as '$' or  '$1' or '$hello'
-	// Keywords appear after all the rest.
-	itemKeyword  // used only to delimit the keywords
-	itemDot      // the cursor, spelled '.'
-	itemDefine   // define keyword
-	itemElse     // else keyword
-	itemEnd      // end keyword
-	itemIf       // if keyword
-	itemNil      // the untyped nil constant, easiest to treat as a keyword
-	itemRange    // range keyword
-	itemTemplate // template keyword
-	itemWith     // with keyword
-)
-
-var key = map[string]itemType{
-	".":        itemDot,
-	"define":   itemDefine,
-	"else":     itemElse,
-	"end":      itemEnd,
-	"if":       itemIf,
-	"range":    itemRange,
-	"nil":      itemNil,
-	"template": itemTemplate,
-	"with":     itemWith,
-}
-
-const eof = -1
-
-// stateFn represents the state of the scanner as a function that returns the next state.
-type stateFn func(*lexer) stateFn
-
-// lexer holds the state of the scanner.
-type lexer struct {
-	name       string    // the name of the input; used only for error reports
-	input      string    // the string being scanned
-	leftDelim  string    // start of action
-	rightDelim string    // end of action
-	state      stateFn   // the next lexing function to enter
-	pos        Pos       // current position in the input
-	start      Pos       // start position of this item
-	width      Pos       // width of last rune read from input
-	lastPos    Pos       // position of most recent item returned by nextItem
-	items      chan item // channel of scanned items
-	parenDepth int       // nesting depth of ( ) exprs
-}
-
-// next returns the next rune in the input.
-func (l *lexer) next() rune {
-	if int(l.pos) >= len(l.input) {
-		l.width = 0
-		return eof
-	}
-	r, w := utf8.DecodeRuneInString(l.input[l.pos:])
-	l.width = Pos(w)
-	l.pos += l.width
-	return r
-}
-
-// peek returns but does not consume the next rune in the input.
-func (l *lexer) peek() rune {
-	r := l.next()
-	l.backup()
-	return r
-}
-
-// backup steps back one rune. Can only be called once per call of next.
-func (l *lexer) backup() {
-	l.pos -= l.width
-}
-
-// emit passes an item back to the client.
-func (l *lexer) emit(t itemType) {
-	l.items <- item{t, l.start, l.input[l.start:l.pos]}
-	l.start = l.pos
-}
-
-// ignore skips over the pending input before this point.
-func (l *lexer) ignore() {
-	l.start = l.pos
-}
-
-// accept consumes the next rune if it's from the valid set.
-func (l *lexer) accept(valid string) bool {
-	if strings.IndexRune(valid, l.next()) >= 0 {
-		return true
-	}
-	l.backup()
-	return false
-}
-
-// acceptRun consumes a run of runes from the valid set.
-func (l *lexer) acceptRun(valid string) {
-	for strings.IndexRune(valid, l.next()) >= 0 {
-	}
-	l.backup()
-}
-
-// lineNumber reports which line we're on, based on the position of
-// the previous item returned by nextItem. Doing it this way
-// means we don't have to worry about peek double counting.
-func (l *lexer) lineNumber() int {
-	return 1 + strings.Count(l.input[:l.lastPos], "\n")
-}
-
-// errorf returns an error token and terminates the scan by passing
-// back a nil pointer that will be the next state, terminating l.nextItem.
-func (l *lexer) errorf(format string, args ...interface{}) stateFn {
-	l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)}
-	return nil
-}
-
-// nextItem returns the next item from the input.
-func (l *lexer) nextItem() item {
-	item := <-l.items
-	l.lastPos = item.pos
-	return item
-}
-
-// lex creates a new scanner for the input string.
-func lex(name, input, left, right string) *lexer {
-	if left == "" {
-		left = leftDelim
-	}
-	if right == "" {
-		right = rightDelim
-	}
-	l := &lexer{
-		name:       name,
-		input:      input,
-		leftDelim:  left,
-		rightDelim: right,
-		items:      make(chan item),
-	}
-	go l.run()
-	return l
-}
-
-// run runs the state machine for the lexer.
-func (l *lexer) run() {
-	for l.state = lexText; l.state != nil; {
-		l.state = l.state(l)
-	}
-}
-
-// state functions
-
-const (
-	leftDelim    = "{{"
-	rightDelim   = "}}"
-	leftComment  = "/*"
-	rightComment = "*/"
-)
-
-// lexText scans until an opening action delimiter, "{{".
-func lexText(l *lexer) stateFn {
-	for {
-		if strings.HasPrefix(l.input[l.pos:], l.leftDelim) {
-			if l.pos > l.start {
-				l.emit(itemText)
-			}
-			return lexLeftDelim
-		}
-		if l.next() == eof {
-			break
-		}
-	}
-	// Correctly reached EOF.
-	if l.pos > l.start {
-		l.emit(itemText)
-	}
-	l.emit(itemEOF)
-	return nil
-}
-
-// lexLeftDelim scans the left delimiter, which is known to be present.
-func lexLeftDelim(l *lexer) stateFn {
-	l.pos += Pos(len(l.leftDelim))
-	if strings.HasPrefix(l.input[l.pos:], leftComment) {
-		return lexComment
-	}
-	l.emit(itemLeftDelim)
-	l.parenDepth = 0
-	return lexInsideAction
-}
-
-// lexComment scans a comment. The left comment marker is known to be present.
-func lexComment(l *lexer) stateFn {
-	l.pos += Pos(len(leftComment))
-	i := strings.Index(l.input[l.pos:], rightComment)
-	if i < 0 {
-		return l.errorf("unclosed comment")
-	}
-	l.pos += Pos(i + len(rightComment))
-	if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
-		return l.errorf("comment ends before closing delimiter")
-
-	}
-	l.pos += Pos(len(l.rightDelim))
-	l.ignore()
-	return lexText
-}
-
-// lexRightDelim scans the right delimiter, which is known to be present.
-func lexRightDelim(l *lexer) stateFn {
-	l.pos += Pos(len(l.rightDelim))
-	l.emit(itemRightDelim)
-	if l.peek() == '\\' {
-		l.pos++
-		l.emit(itemElideNewline)
-	}
-	return lexText
-}
-
-// lexInsideAction scans the elements inside action delimiters.
-func lexInsideAction(l *lexer) stateFn {
-	// Either number, quoted string, or identifier.
-	// Spaces separate arguments; runs of spaces turn into itemSpace.
-	// Pipe symbols separate and are emitted.
-	if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) {
-		if l.parenDepth == 0 {
-			return lexRightDelim
-		}
-		return l.errorf("unclosed left paren")
-	}
-	switch r := l.next(); {
-	case r == eof || isEndOfLine(r):
-		return l.errorf("unclosed action")
-	case isSpace(r):
-		return lexSpace
-	case r == ':':
-		if l.next() != '=' {
-			return l.errorf("expected :=")
-		}
-		l.emit(itemColonEquals)
-	case r == '|':
-		l.emit(itemPipe)
-	case r == '"':
-		return lexQuote
-	case r == '`':
-		return lexRawQuote
-	case r == '$':
-		return lexVariable
-	case r == '\'':
-		return lexChar
-	case r == '.':
-		// special look-ahead for ".field" so we don't break l.backup().
-		if l.pos < Pos(len(l.input)) {
-			r := l.input[l.pos]
-			if r < '0' || '9' < r {
-				return lexField
-			}
-		}
-		fallthrough // '.' can start a number.
-	case r == '+' || r == '-' || ('0' <= r && r <= '9'):
-		l.backup()
-		return lexNumber
-	case isAlphaNumeric(r):
-		l.backup()
-		return lexIdentifier
-	case r == '(':
-		l.emit(itemLeftParen)
-		l.parenDepth++
-		return lexInsideAction
-	case r == ')':
-		l.emit(itemRightParen)
-		l.parenDepth--
-		if l.parenDepth < 0 {
-			return l.errorf("unexpected right paren %#U", r)
-		}
-		return lexInsideAction
-	case r <= unicode.MaxASCII && unicode.IsPrint(r):
-		l.emit(itemChar)
-		return lexInsideAction
-	default:
-		return l.errorf("unrecognized character in action: %#U", r)
-	}
-	return lexInsideAction
-}
-
-// lexSpace scans a run of space characters.
-// One space has already been seen.
-func lexSpace(l *lexer) stateFn {
-	for isSpace(l.peek()) {
-		l.next()
-	}
-	l.emit(itemSpace)
-	return lexInsideAction
-}
-
-// lexIdentifier scans an alphanumeric.
-func lexIdentifier(l *lexer) stateFn {
-Loop:
-	for {
-		switch r := l.next(); {
-		case isAlphaNumeric(r):
-			// absorb.
-		default:
-			l.backup()
-			word := l.input[l.start:l.pos]
-			if !l.atTerminator() {
-				return l.errorf("bad character %#U", r)
-			}
-			switch {
-			case key[word] > itemKeyword:
-				l.emit(key[word])
-			case word[0] == '.':
-				l.emit(itemField)
-			case word == "true", word == "false":
-				l.emit(itemBool)
-			default:
-				l.emit(itemIdentifier)
-			}
-			break Loop
-		}
-	}
-	return lexInsideAction
-}
-
-// lexField scans a field: .Alphanumeric.
-// The . has been scanned.
-func lexField(l *lexer) stateFn {
-	return lexFieldOrVariable(l, itemField)
-}
-
-// lexVariable scans a Variable: $Alphanumeric.
-// The $ has been scanned.
-func lexVariable(l *lexer) stateFn {
-	if l.atTerminator() { // Nothing interesting follows -> "$".
-		l.emit(itemVariable)
-		return lexInsideAction
-	}
-	return lexFieldOrVariable(l, itemVariable)
-}
-
-// lexVariable scans a field or variable: [.$]Alphanumeric.
-// The . or $ has been scanned.
-func lexFieldOrVariable(l *lexer, typ itemType) stateFn {
-	if l.atTerminator() { // Nothing interesting follows -> "." or "$".
-		if typ == itemVariable {
-			l.emit(itemVariable)
-		} else {
-			l.emit(itemDot)
-		}
-		return lexInsideAction
-	}
-	var r rune
-	for {
-		r = l.next()
-		if !isAlphaNumeric(r) {
-			l.backup()
-			break
-		}
-	}
-	if !l.atTerminator() {
-		return l.errorf("bad character %#U", r)
-	}
-	l.emit(typ)
-	return lexInsideAction
-}
-
-// atTerminator reports whether the input is at valid termination character to
-// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases
-// like "$x+2" not being acceptable without a space, in case we decide one
-// day to implement arithmetic.
-func (l *lexer) atTerminator() bool {
-	r := l.peek()
-	if isSpace(r) || isEndOfLine(r) {
-		return true
-	}
-	switch r {
-	case eof, '.', ',', '|', ':', ')', '(':
-		return true
-	}
-	// Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will
-	// succeed but should fail) but only in extremely rare cases caused by willfully
-	// bad choice of delimiter.
-	if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r {
-		return true
-	}
-	return false
-}
-
-// lexChar scans a character constant. The initial quote is already
-// scanned. Syntax checking is done by the parser.
-func lexChar(l *lexer) stateFn {
-Loop:
-	for {
-		switch l.next() {
-		case '\\':
-			if r := l.next(); r != eof && r != '\n' {
-				break
-			}
-			fallthrough
-		case eof, '\n':
-			return l.errorf("unterminated character constant")
-		case '\'':
-			break Loop
-		}
-	}
-	l.emit(itemCharConstant)
-	return lexInsideAction
-}
-
-// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
-// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
-// and "089" - but when it's wrong the input is invalid and the parser (via
-// strconv) will notice.
-func lexNumber(l *lexer) stateFn {
-	if !l.scanNumber() {
-		return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
-	}
-	if sign := l.peek(); sign == '+' || sign == '-' {
-		// Complex: 1+2i. No spaces, must end in 'i'.
-		if !l.scanNumber() || l.input[l.pos-1] != 'i' {
-			return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
-		}
-		l.emit(itemComplex)
-	} else {
-		l.emit(itemNumber)
-	}
-	return lexInsideAction
-}
-
-func (l *lexer) scanNumber() bool {
-	// Optional leading sign.
-	l.accept("+-")
-	// Is it hex?
-	digits := "0123456789"
-	if l.accept("0") && l.accept("xX") {
-		digits = "0123456789abcdefABCDEF"
-	}
-	l.acceptRun(digits)
-	if l.accept(".") {
-		l.acceptRun(digits)
-	}
-	if l.accept("eE") {
-		l.accept("+-")
-		l.acceptRun("0123456789")
-	}
-	// Is it imaginary?
-	l.accept("i")
-	// Next thing mustn't be alphanumeric.
-	if isAlphaNumeric(l.peek()) {
-		l.next()
-		return false
-	}
-	return true
-}
-
-// lexQuote scans a quoted string.
-func lexQuote(l *lexer) stateFn {
-Loop:
-	for {
-		switch l.next() {
-		case '\\':
-			if r := l.next(); r != eof && r != '\n' {
-				break
-			}
-			fallthrough
-		case eof, '\n':
-			return l.errorf("unterminated quoted string")
-		case '"':
-			break Loop
-		}
-	}
-	l.emit(itemString)
-	return lexInsideAction
-}
-
-// lexRawQuote scans a raw quoted string.
-func lexRawQuote(l *lexer) stateFn {
-Loop:
-	for {
-		switch l.next() {
-		case eof, '\n':
-			return l.errorf("unterminated raw quoted string")
-		case '`':
-			break Loop
-		}
-	}
-	l.emit(itemRawString)
-	return lexInsideAction
-}
-
-// isSpace reports whether r is a space character.
-func isSpace(r rune) bool {
-	return r == ' ' || r == '\t'
-}
-
-// isEndOfLine reports whether r is an end-of-line character.
-func isEndOfLine(r rune) bool {
-	return r == '\r' || r == '\n'
-}
-
-// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
-func isAlphaNumeric(r rune) bool {
-	return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
-}
diff --git a/vendor/github.com/alecthomas/template/parse/node.go b/vendor/github.com/alecthomas/template/parse/node.go
deleted file mode 100644
index 55c37f6dbaca..000000000000
--- a/vendor/github.com/alecthomas/template/parse/node.go
+++ /dev/null
@@ -1,834 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Parse nodes.
-
-package parse
-
-import (
-	"bytes"
-	"fmt"
-	"strconv"
-	"strings"
-)
-
-var textFormat = "%s" // Changed to "%q" in tests for better error messages.
-
-// A Node is an element in the parse tree. The interface is trivial.
-// The interface contains an unexported method so that only
-// types local to this package can satisfy it.
-type Node interface {
-	Type() NodeType
-	String() string
-	// Copy does a deep copy of the Node and all its components.
-	// To avoid type assertions, some XxxNodes also have specialized
-	// CopyXxx methods that return *XxxNode.
-	Copy() Node
-	Position() Pos // byte position of start of node in full original input string
-	// tree returns the containing *Tree.
-	// It is unexported so all implementations of Node are in this package.
-	tree() *Tree
-}
-
-// NodeType identifies the type of a parse tree node.
-type NodeType int
-
-// Pos represents a byte position in the original input text from which
-// this template was parsed.
-type Pos int
-
-func (p Pos) Position() Pos {
-	return p
-}
-
-// Type returns itself and provides an easy default implementation
-// for embedding in a Node. Embedded in all non-trivial Nodes.
-func (t NodeType) Type() NodeType {
-	return t
-}
-
-const (
-	NodeText       NodeType = iota // Plain text.
-	NodeAction                     // A non-control action such as a field evaluation.
-	NodeBool                       // A boolean constant.
-	NodeChain                      // A sequence of field accesses.
-	NodeCommand                    // An element of a pipeline.
-	NodeDot                        // The cursor, dot.
-	nodeElse                       // An else action. Not added to tree.
-	nodeEnd                        // An end action. Not added to tree.
-	NodeField                      // A field or method name.
-	NodeIdentifier                 // An identifier; always a function name.
-	NodeIf                         // An if action.
-	NodeList                       // A list of Nodes.
-	NodeNil                        // An untyped nil constant.
-	NodeNumber                     // A numerical constant.
-	NodePipe                       // A pipeline of commands.
-	NodeRange                      // A range action.
-	NodeString                     // A string constant.
-	NodeTemplate                   // A template invocation action.
-	NodeVariable                   // A $ variable.
-	NodeWith                       // A with action.
-)
-
-// Nodes.
-
-// ListNode holds a sequence of nodes.
-type ListNode struct {
-	NodeType
-	Pos
-	tr    *Tree
-	Nodes []Node // The element nodes in lexical order.
-}
-
-func (t *Tree) newList(pos Pos) *ListNode {
-	return &ListNode{tr: t, NodeType: NodeList, Pos: pos}
-}
-
-func (l *ListNode) append(n Node) {
-	l.Nodes = append(l.Nodes, n)
-}
-
-func (l *ListNode) tree() *Tree {
-	return l.tr
-}
-
-func (l *ListNode) String() string {
-	b := new(bytes.Buffer)
-	for _, n := range l.Nodes {
-		fmt.Fprint(b, n)
-	}
-	return b.String()
-}
-
-func (l *ListNode) CopyList() *ListNode {
-	if l == nil {
-		return l
-	}
-	n := l.tr.newList(l.Pos)
-	for _, elem := range l.Nodes {
-		n.append(elem.Copy())
-	}
-	return n
-}
-
-func (l *ListNode) Copy() Node {
-	return l.CopyList()
-}
-
-// TextNode holds plain text.
-type TextNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Text []byte // The text; may span newlines.
-}
-
-func (t *Tree) newText(pos Pos, text string) *TextNode {
-	return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)}
-}
-
-func (t *TextNode) String() string {
-	return fmt.Sprintf(textFormat, t.Text)
-}
-
-func (t *TextNode) tree() *Tree {
-	return t.tr
-}
-
-func (t *TextNode) Copy() Node {
-	return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)}
-}
-
-// PipeNode holds a pipeline with optional declaration
-type PipeNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Line int             // The line number in the input (deprecated; kept for compatibility)
-	Decl []*VariableNode // Variable declarations in lexical order.
-	Cmds []*CommandNode  // The commands in lexical order.
-}
-
-func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode {
-	return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl}
-}
-
-func (p *PipeNode) append(command *CommandNode) {
-	p.Cmds = append(p.Cmds, command)
-}
-
-func (p *PipeNode) String() string {
-	s := ""
-	if len(p.Decl) > 0 {
-		for i, v := range p.Decl {
-			if i > 0 {
-				s += ", "
-			}
-			s += v.String()
-		}
-		s += " := "
-	}
-	for i, c := range p.Cmds {
-		if i > 0 {
-			s += " | "
-		}
-		s += c.String()
-	}
-	return s
-}
-
-func (p *PipeNode) tree() *Tree {
-	return p.tr
-}
-
-func (p *PipeNode) CopyPipe() *PipeNode {
-	if p == nil {
-		return p
-	}
-	var decl []*VariableNode
-	for _, d := range p.Decl {
-		decl = append(decl, d.Copy().(*VariableNode))
-	}
-	n := p.tr.newPipeline(p.Pos, p.Line, decl)
-	for _, c := range p.Cmds {
-		n.append(c.Copy().(*CommandNode))
-	}
-	return n
-}
-
-func (p *PipeNode) Copy() Node {
-	return p.CopyPipe()
-}
-
-// ActionNode holds an action (something bounded by delimiters).
-// Control actions have their own nodes; ActionNode represents simple
-// ones such as field evaluations and parenthesized pipelines.
-type ActionNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Line int       // The line number in the input (deprecated; kept for compatibility)
-	Pipe *PipeNode // The pipeline in the action.
-}
-
-func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode {
-	return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe}
-}
-
-func (a *ActionNode) String() string {
-	return fmt.Sprintf("{{%s}}", a.Pipe)
-
-}
-
-func (a *ActionNode) tree() *Tree {
-	return a.tr
-}
-
-func (a *ActionNode) Copy() Node {
-	return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe())
-
-}
-
-// CommandNode holds a command (a pipeline inside an evaluating action).
-type CommandNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Args []Node // Arguments in lexical order: Identifier, field, or constant.
-}
-
-func (t *Tree) newCommand(pos Pos) *CommandNode {
-	return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos}
-}
-
-func (c *CommandNode) append(arg Node) {
-	c.Args = append(c.Args, arg)
-}
-
-func (c *CommandNode) String() string {
-	s := ""
-	for i, arg := range c.Args {
-		if i > 0 {
-			s += " "
-		}
-		if arg, ok := arg.(*PipeNode); ok {
-			s += "(" + arg.String() + ")"
-			continue
-		}
-		s += arg.String()
-	}
-	return s
-}
-
-func (c *CommandNode) tree() *Tree {
-	return c.tr
-}
-
-func (c *CommandNode) Copy() Node {
-	if c == nil {
-		return c
-	}
-	n := c.tr.newCommand(c.Pos)
-	for _, c := range c.Args {
-		n.append(c.Copy())
-	}
-	return n
-}
-
-// IdentifierNode holds an identifier.
-type IdentifierNode struct {
-	NodeType
-	Pos
-	tr    *Tree
-	Ident string // The identifier's name.
-}
-
-// NewIdentifier returns a new IdentifierNode with the given identifier name.
-func NewIdentifier(ident string) *IdentifierNode {
-	return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident}
-}
-
-// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature.
-// Chained for convenience.
-// TODO: fix one day?
-func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode {
-	i.Pos = pos
-	return i
-}
-
-// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature.
-// Chained for convenience.
-// TODO: fix one day?
-func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode {
-	i.tr = t
-	return i
-}
-
-func (i *IdentifierNode) String() string {
-	return i.Ident
-}
-
-func (i *IdentifierNode) tree() *Tree {
-	return i.tr
-}
-
-func (i *IdentifierNode) Copy() Node {
-	return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos)
-}
-
-// VariableNode holds a list of variable names, possibly with chained field
-// accesses. The dollar sign is part of the (first) name.
-type VariableNode struct {
-	NodeType
-	Pos
-	tr    *Tree
-	Ident []string // Variable name and fields in lexical order.
-}
-
-func (t *Tree) newVariable(pos Pos, ident string) *VariableNode {
-	return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")}
-}
-
-func (v *VariableNode) String() string {
-	s := ""
-	for i, id := range v.Ident {
-		if i > 0 {
-			s += "."
-		}
-		s += id
-	}
-	return s
-}
-
-func (v *VariableNode) tree() *Tree {
-	return v.tr
-}
-
-func (v *VariableNode) Copy() Node {
-	return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)}
-}
-
-// DotNode holds the special identifier '.'.
-type DotNode struct {
-	NodeType
-	Pos
-	tr *Tree
-}
-
-func (t *Tree) newDot(pos Pos) *DotNode {
-	return &DotNode{tr: t, NodeType: NodeDot, Pos: pos}
-}
-
-func (d *DotNode) Type() NodeType {
-	// Override method on embedded NodeType for API compatibility.
-	// TODO: Not really a problem; could change API without effect but
-	// api tool complains.
-	return NodeDot
-}
-
-func (d *DotNode) String() string {
-	return "."
-}
-
-func (d *DotNode) tree() *Tree {
-	return d.tr
-}
-
-func (d *DotNode) Copy() Node {
-	return d.tr.newDot(d.Pos)
-}
-
-// NilNode holds the special identifier 'nil' representing an untyped nil constant.
-type NilNode struct {
-	NodeType
-	Pos
-	tr *Tree
-}
-
-func (t *Tree) newNil(pos Pos) *NilNode {
-	return &NilNode{tr: t, NodeType: NodeNil, Pos: pos}
-}
-
-func (n *NilNode) Type() NodeType {
-	// Override method on embedded NodeType for API compatibility.
-	// TODO: Not really a problem; could change API without effect but
-	// api tool complains.
-	return NodeNil
-}
-
-func (n *NilNode) String() string {
-	return "nil"
-}
-
-func (n *NilNode) tree() *Tree {
-	return n.tr
-}
-
-func (n *NilNode) Copy() Node {
-	return n.tr.newNil(n.Pos)
-}
-
-// FieldNode holds a field (identifier starting with '.').
-// The names may be chained ('.x.y').
-// The period is dropped from each ident.
-type FieldNode struct {
-	NodeType
-	Pos
-	tr    *Tree
-	Ident []string // The identifiers in lexical order.
-}
-
-func (t *Tree) newField(pos Pos, ident string) *FieldNode {
-	return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period
-}
-
-func (f *FieldNode) String() string {
-	s := ""
-	for _, id := range f.Ident {
-		s += "." + id
-	}
-	return s
-}
-
-func (f *FieldNode) tree() *Tree {
-	return f.tr
-}
-
-func (f *FieldNode) Copy() Node {
-	return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)}
-}
-
-// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.').
-// The names may be chained ('.x.y').
-// The periods are dropped from each ident.
-type ChainNode struct {
-	NodeType
-	Pos
-	tr    *Tree
-	Node  Node
-	Field []string // The identifiers in lexical order.
-}
-
-func (t *Tree) newChain(pos Pos, node Node) *ChainNode {
-	return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node}
-}
-
-// Add adds the named field (which should start with a period) to the end of the chain.
-func (c *ChainNode) Add(field string) {
-	if len(field) == 0 || field[0] != '.' {
-		panic("no dot in field")
-	}
-	field = field[1:] // Remove leading dot.
-	if field == "" {
-		panic("empty field")
-	}
-	c.Field = append(c.Field, field)
-}
-
-func (c *ChainNode) String() string {
-	s := c.Node.String()
-	if _, ok := c.Node.(*PipeNode); ok {
-		s = "(" + s + ")"
-	}
-	for _, field := range c.Field {
-		s += "." + field
-	}
-	return s
-}
-
-func (c *ChainNode) tree() *Tree {
-	return c.tr
-}
-
-func (c *ChainNode) Copy() Node {
-	return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)}
-}
-
-// BoolNode holds a boolean constant.
-type BoolNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	True bool // The value of the boolean constant.
-}
-
-func (t *Tree) newBool(pos Pos, true bool) *BoolNode {
-	return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true}
-}
-
-func (b *BoolNode) String() string {
-	if b.True {
-		return "true"
-	}
-	return "false"
-}
-
-func (b *BoolNode) tree() *Tree {
-	return b.tr
-}
-
-func (b *BoolNode) Copy() Node {
-	return b.tr.newBool(b.Pos, b.True)
-}
-
-// NumberNode holds a number: signed or unsigned integer, float, or complex.
-// The value is parsed and stored under all the types that can represent the value.
-// This simulates in a small amount of code the behavior of Go's ideal constants.
-type NumberNode struct {
-	NodeType
-	Pos
-	tr         *Tree
-	IsInt      bool       // Number has an integral value.
-	IsUint     bool       // Number has an unsigned integral value.
-	IsFloat    bool       // Number has a floating-point value.
-	IsComplex  bool       // Number is complex.
-	Int64      int64      // The signed integer value.
-	Uint64     uint64     // The unsigned integer value.
-	Float64    float64    // The floating-point value.
-	Complex128 complex128 // The complex value.
-	Text       string     // The original textual representation from the input.
-}
-
-func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) {
-	n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text}
-	switch typ {
-	case itemCharConstant:
-		rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0])
-		if err != nil {
-			return nil, err
-		}
-		if tail != "'" {
-			return nil, fmt.Errorf("malformed character constant: %s", text)
-		}
-		n.Int64 = int64(rune)
-		n.IsInt = true
-		n.Uint64 = uint64(rune)
-		n.IsUint = true
-		n.Float64 = float64(rune) // odd but those are the rules.
-		n.IsFloat = true
-		return n, nil
-	case itemComplex:
-		// fmt.Sscan can parse the pair, so let it do the work.
-		if _, err := fmt.Sscan(text, &n.Complex128); err != nil {
-			return nil, err
-		}
-		n.IsComplex = true
-		n.simplifyComplex()
-		return n, nil
-	}
-	// Imaginary constants can only be complex unless they are zero.
-	if len(text) > 0 && text[len(text)-1] == 'i' {
-		f, err := strconv.ParseFloat(text[:len(text)-1], 64)
-		if err == nil {
-			n.IsComplex = true
-			n.Complex128 = complex(0, f)
-			n.simplifyComplex()
-			return n, nil
-		}
-	}
-	// Do integer test first so we get 0x123 etc.
-	u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below.
-	if err == nil {
-		n.IsUint = true
-		n.Uint64 = u
-	}
-	i, err := strconv.ParseInt(text, 0, 64)
-	if err == nil {
-		n.IsInt = true
-		n.Int64 = i
-		if i == 0 {
-			n.IsUint = true // in case of -0.
-			n.Uint64 = u
-		}
-	}
-	// If an integer extraction succeeded, promote the float.
-	if n.IsInt {
-		n.IsFloat = true
-		n.Float64 = float64(n.Int64)
-	} else if n.IsUint {
-		n.IsFloat = true
-		n.Float64 = float64(n.Uint64)
-	} else {
-		f, err := strconv.ParseFloat(text, 64)
-		if err == nil {
-			n.IsFloat = true
-			n.Float64 = f
-			// If a floating-point extraction succeeded, extract the int if needed.
-			if !n.IsInt && float64(int64(f)) == f {
-				n.IsInt = true
-				n.Int64 = int64(f)
-			}
-			if !n.IsUint && float64(uint64(f)) == f {
-				n.IsUint = true
-				n.Uint64 = uint64(f)
-			}
-		}
-	}
-	if !n.IsInt && !n.IsUint && !n.IsFloat {
-		return nil, fmt.Errorf("illegal number syntax: %q", text)
-	}
-	return n, nil
-}
-
-// simplifyComplex pulls out any other types that are represented by the complex number.
-// These all require that the imaginary part be zero.
-func (n *NumberNode) simplifyComplex() {
-	n.IsFloat = imag(n.Complex128) == 0
-	if n.IsFloat {
-		n.Float64 = real(n.Complex128)
-		n.IsInt = float64(int64(n.Float64)) == n.Float64
-		if n.IsInt {
-			n.Int64 = int64(n.Float64)
-		}
-		n.IsUint = float64(uint64(n.Float64)) == n.Float64
-		if n.IsUint {
-			n.Uint64 = uint64(n.Float64)
-		}
-	}
-}
-
-func (n *NumberNode) String() string {
-	return n.Text
-}
-
-func (n *NumberNode) tree() *Tree {
-	return n.tr
-}
-
-func (n *NumberNode) Copy() Node {
-	nn := new(NumberNode)
-	*nn = *n // Easy, fast, correct.
-	return nn
-}
-
-// StringNode holds a string constant. The value has been "unquoted".
-type StringNode struct {
-	NodeType
-	Pos
-	tr     *Tree
-	Quoted string // The original text of the string, with quotes.
-	Text   string // The string, after quote processing.
-}
-
-func (t *Tree) newString(pos Pos, orig, text string) *StringNode {
-	return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
-}
-
-func (s *StringNode) String() string {
-	return s.Quoted
-}
-
-func (s *StringNode) tree() *Tree {
-	return s.tr
-}
-
-func (s *StringNode) Copy() Node {
-	return s.tr.newString(s.Pos, s.Quoted, s.Text)
-}
-
-// endNode represents an {{end}} action.
-// It does not appear in the final parse tree.
-type endNode struct {
-	NodeType
-	Pos
-	tr *Tree
-}
-
-func (t *Tree) newEnd(pos Pos) *endNode {
-	return &endNode{tr: t, NodeType: nodeEnd, Pos: pos}
-}
-
-func (e *endNode) String() string {
-	return "{{end}}"
-}
-
-func (e *endNode) tree() *Tree {
-	return e.tr
-}
-
-func (e *endNode) Copy() Node {
-	return e.tr.newEnd(e.Pos)
-}
-
-// elseNode represents an {{else}} action. Does not appear in the final tree.
-type elseNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Line int // The line number in the input (deprecated; kept for compatibility)
-}
-
-func (t *Tree) newElse(pos Pos, line int) *elseNode {
-	return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line}
-}
-
-func (e *elseNode) Type() NodeType {
-	return nodeElse
-}
-
-func (e *elseNode) String() string {
-	return "{{else}}"
-}
-
-func (e *elseNode) tree() *Tree {
-	return e.tr
-}
-
-func (e *elseNode) Copy() Node {
-	return e.tr.newElse(e.Pos, e.Line)
-}
-
-// BranchNode is the common representation of if, range, and with.
-type BranchNode struct {
-	NodeType
-	Pos
-	tr       *Tree
-	Line     int       // The line number in the input (deprecated; kept for compatibility)
-	Pipe     *PipeNode // The pipeline to be evaluated.
-	List     *ListNode // What to execute if the value is non-empty.
-	ElseList *ListNode // What to execute if the value is empty (nil if absent).
-}
-
-func (b *BranchNode) String() string {
-	name := ""
-	switch b.NodeType {
-	case NodeIf:
-		name = "if"
-	case NodeRange:
-		name = "range"
-	case NodeWith:
-		name = "with"
-	default:
-		panic("unknown branch type")
-	}
-	if b.ElseList != nil {
-		return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList)
-	}
-	return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List)
-}
-
-func (b *BranchNode) tree() *Tree {
-	return b.tr
-}
-
-func (b *BranchNode) Copy() Node {
-	switch b.NodeType {
-	case NodeIf:
-		return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
-	case NodeRange:
-		return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
-	case NodeWith:
-		return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
-	default:
-		panic("unknown branch type")
-	}
-}
-
-// IfNode represents an {{if}} action and its commands.
-type IfNode struct {
-	BranchNode
-}
-
-func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode {
-	return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
-}
-
-func (i *IfNode) Copy() Node {
-	return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList())
-}
-
-// RangeNode represents a {{range}} action and its commands.
-type RangeNode struct {
-	BranchNode
-}
-
-func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode {
-	return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
-}
-
-func (r *RangeNode) Copy() Node {
-	return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList())
-}
-
-// WithNode represents a {{with}} action and its commands.
-type WithNode struct {
-	BranchNode
-}
-
-func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode {
-	return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
-}
-
-func (w *WithNode) Copy() Node {
-	return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList())
-}
-
-// TemplateNode represents a {{template}} action.
-type TemplateNode struct {
-	NodeType
-	Pos
-	tr   *Tree
-	Line int       // The line number in the input (deprecated; kept for compatibility)
-	Name string    // The name of the template (unquoted).
-	Pipe *PipeNode // The command to evaluate as dot for the template.
-}
-
-func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode {
-	return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe}
-}
-
-func (t *TemplateNode) String() string {
-	if t.Pipe == nil {
-		return fmt.Sprintf("{{template %q}}", t.Name)
-	}
-	return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe)
-}
-
-func (t *TemplateNode) tree() *Tree {
-	return t.tr
-}
-
-func (t *TemplateNode) Copy() Node {
-	return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe())
-}
diff --git a/vendor/github.com/alecthomas/template/parse/parse.go b/vendor/github.com/alecthomas/template/parse/parse.go
deleted file mode 100644
index 0d77ade87188..000000000000
--- a/vendor/github.com/alecthomas/template/parse/parse.go
+++ /dev/null
@@ -1,700 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package parse builds parse trees for templates as defined by text/template
-// and html/template. Clients should use those packages to construct templates
-// rather than this one, which provides shared internal data structures not
-// intended for general use.
-package parse
-
-import (
-	"bytes"
-	"fmt"
-	"runtime"
-	"strconv"
-	"strings"
-)
-
-// Tree is the representation of a single parsed template.
-type Tree struct {
-	Name      string    // name of the template represented by the tree.
-	ParseName string    // name of the top-level template during parsing, for error messages.
-	Root      *ListNode // top-level root of the tree.
-	text      string    // text parsed to create the template (or its parent)
-	// Parsing only; cleared after parse.
-	funcs     []map[string]interface{}
-	lex       *lexer
-	token     [3]item // three-token lookahead for parser.
-	peekCount int
-	vars      []string // variables defined at the moment.
-}
-
-// Copy returns a copy of the Tree. Any parsing state is discarded.
-func (t *Tree) Copy() *Tree {
-	if t == nil {
-		return nil
-	}
-	return &Tree{
-		Name:      t.Name,
-		ParseName: t.ParseName,
-		Root:      t.Root.CopyList(),
-		text:      t.text,
-	}
-}
-
-// Parse returns a map from template name to parse.Tree, created by parsing the
-// templates described in the argument string. The top-level template will be
-// given the specified name. If an error is encountered, parsing stops and an
-// empty map is returned with the error.
-func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) {
-	treeSet = make(map[string]*Tree)
-	t := New(name)
-	t.text = text
-	_, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...)
-	return
-}
-
-// next returns the next token.
-func (t *Tree) next() item {
-	if t.peekCount > 0 {
-		t.peekCount--
-	} else {
-		t.token[0] = t.lex.nextItem()
-	}
-	return t.token[t.peekCount]
-}
-
-// backup backs the input stream up one token.
-func (t *Tree) backup() {
-	t.peekCount++
-}
-
-// backup2 backs the input stream up two tokens.
-// The zeroth token is already there.
-func (t *Tree) backup2(t1 item) {
-	t.token[1] = t1
-	t.peekCount = 2
-}
-
-// backup3 backs the input stream up three tokens
-// The zeroth token is already there.
-func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back.
-	t.token[1] = t1
-	t.token[2] = t2
-	t.peekCount = 3
-}
-
-// peek returns but does not consume the next token.
-func (t *Tree) peek() item {
-	if t.peekCount > 0 {
-		return t.token[t.peekCount-1]
-	}
-	t.peekCount = 1
-	t.token[0] = t.lex.nextItem()
-	return t.token[0]
-}
-
-// nextNonSpace returns the next non-space token.
-func (t *Tree) nextNonSpace() (token item) {
-	for {
-		token = t.next()
-		if token.typ != itemSpace {
-			break
-		}
-	}
-	return token
-}
-
-// peekNonSpace returns but does not consume the next non-space token.
-func (t *Tree) peekNonSpace() (token item) {
-	for {
-		token = t.next()
-		if token.typ != itemSpace {
-			break
-		}
-	}
-	t.backup()
-	return token
-}
-
-// Parsing.
-
-// New allocates a new parse tree with the given name.
-func New(name string, funcs ...map[string]interface{}) *Tree {
-	return &Tree{
-		Name:  name,
-		funcs: funcs,
-	}
-}
-
-// ErrorContext returns a textual representation of the location of the node in the input text.
-// The receiver is only used when the node does not have a pointer to the tree inside,
-// which can occur in old code.
-func (t *Tree) ErrorContext(n Node) (location, context string) {
-	pos := int(n.Position())
-	tree := n.tree()
-	if tree == nil {
-		tree = t
-	}
-	text := tree.text[:pos]
-	byteNum := strings.LastIndex(text, "\n")
-	if byteNum == -1 {
-		byteNum = pos // On first line.
-	} else {
-		byteNum++ // After the newline.
-		byteNum = pos - byteNum
-	}
-	lineNum := 1 + strings.Count(text, "\n")
-	context = n.String()
-	if len(context) > 20 {
-		context = fmt.Sprintf("%.20s...", context)
-	}
-	return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context
-}
-
-// errorf formats the error and terminates processing.
-func (t *Tree) errorf(format string, args ...interface{}) {
-	t.Root = nil
-	format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format)
-	panic(fmt.Errorf(format, args...))
-}
-
-// error terminates processing.
-func (t *Tree) error(err error) {
-	t.errorf("%s", err)
-}
-
-// expect consumes the next token and guarantees it has the required type.
-func (t *Tree) expect(expected itemType, context string) item {
-	token := t.nextNonSpace()
-	if token.typ != expected {
-		t.unexpected(token, context)
-	}
-	return token
-}
-
-// expectOneOf consumes the next token and guarantees it has one of the required types.
-func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
-	token := t.nextNonSpace()
-	if token.typ != expected1 && token.typ != expected2 {
-		t.unexpected(token, context)
-	}
-	return token
-}
-
-// unexpected complains about the token and terminates processing.
-func (t *Tree) unexpected(token item, context string) {
-	t.errorf("unexpected %s in %s", token, context)
-}
-
-// recover is the handler that turns panics into returns from the top level of Parse.
-func (t *Tree) recover(errp *error) {
-	e := recover()
-	if e != nil {
-		if _, ok := e.(runtime.Error); ok {
-			panic(e)
-		}
-		if t != nil {
-			t.stopParse()
-		}
-		*errp = e.(error)
-	}
-	return
-}
-
-// startParse initializes the parser, using the lexer.
-func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) {
-	t.Root = nil
-	t.lex = lex
-	t.vars = []string{"$"}
-	t.funcs = funcs
-}
-
-// stopParse terminates parsing.
-func (t *Tree) stopParse() {
-	t.lex = nil
-	t.vars = nil
-	t.funcs = nil
-}
-
-// Parse parses the template definition string to construct a representation of
-// the template for execution. If either action delimiter string is empty, the
-// default ("{{" or "}}") is used. Embedded template definitions are added to
-// the treeSet map.
-func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) {
-	defer t.recover(&err)
-	t.ParseName = t.Name
-	t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim))
-	t.text = text
-	t.parse(treeSet)
-	t.add(treeSet)
-	t.stopParse()
-	return t, nil
-}
-
-// add adds tree to the treeSet.
-func (t *Tree) add(treeSet map[string]*Tree) {
-	tree := treeSet[t.Name]
-	if tree == nil || IsEmptyTree(tree.Root) {
-		treeSet[t.Name] = t
-		return
-	}
-	if !IsEmptyTree(t.Root) {
-		t.errorf("template: multiple definition of template %q", t.Name)
-	}
-}
-
-// IsEmptyTree reports whether this tree (node) is empty of everything but space.
-func IsEmptyTree(n Node) bool {
-	switch n := n.(type) {
-	case nil:
-		return true
-	case *ActionNode:
-	case *IfNode:
-	case *ListNode:
-		for _, node := range n.Nodes {
-			if !IsEmptyTree(node) {
-				return false
-			}
-		}
-		return true
-	case *RangeNode:
-	case *TemplateNode:
-	case *TextNode:
-		return len(bytes.TrimSpace(n.Text)) == 0
-	case *WithNode:
-	default:
-		panic("unknown node: " + n.String())
-	}
-	return false
-}
-
-// parse is the top-level parser for a template, essentially the same
-// as itemList except it also parses {{define}} actions.
-// It runs to EOF.
-func (t *Tree) parse(treeSet map[string]*Tree) (next Node) {
-	t.Root = t.newList(t.peek().pos)
-	for t.peek().typ != itemEOF {
-		if t.peek().typ == itemLeftDelim {
-			delim := t.next()
-			if t.nextNonSpace().typ == itemDefine {
-				newT := New("definition") // name will be updated once we know it.
-				newT.text = t.text
-				newT.ParseName = t.ParseName
-				newT.startParse(t.funcs, t.lex)
-				newT.parseDefinition(treeSet)
-				continue
-			}
-			t.backup2(delim)
-		}
-		n := t.textOrAction()
-		if n.Type() == nodeEnd {
-			t.errorf("unexpected %s", n)
-		}
-		t.Root.append(n)
-	}
-	return nil
-}
-
-// parseDefinition parses a {{define}} ...  {{end}} template definition and
-// installs the definition in the treeSet map.  The "define" keyword has already
-// been scanned.
-func (t *Tree) parseDefinition(treeSet map[string]*Tree) {
-	const context = "define clause"
-	name := t.expectOneOf(itemString, itemRawString, context)
-	var err error
-	t.Name, err = strconv.Unquote(name.val)
-	if err != nil {
-		t.error(err)
-	}
-	t.expect(itemRightDelim, context)
-	var end Node
-	t.Root, end = t.itemList()
-	if end.Type() != nodeEnd {
-		t.errorf("unexpected %s in %s", end, context)
-	}
-	t.add(treeSet)
-	t.stopParse()
-}
-
-// itemList:
-//	textOrAction*
-// Terminates at {{end}} or {{else}}, returned separately.
-func (t *Tree) itemList() (list *ListNode, next Node) {
-	list = t.newList(t.peekNonSpace().pos)
-	for t.peekNonSpace().typ != itemEOF {
-		n := t.textOrAction()
-		switch n.Type() {
-		case nodeEnd, nodeElse:
-			return list, n
-		}
-		list.append(n)
-	}
-	t.errorf("unexpected EOF")
-	return
-}
-
-// textOrAction:
-//	text | action
-func (t *Tree) textOrAction() Node {
-	switch token := t.nextNonSpace(); token.typ {
-	case itemElideNewline:
-		return t.elideNewline()
-	case itemText:
-		return t.newText(token.pos, token.val)
-	case itemLeftDelim:
-		return t.action()
-	default:
-		t.unexpected(token, "input")
-	}
-	return nil
-}
-
-// elideNewline:
-// Remove newlines trailing rightDelim if \\ is present.
-func (t *Tree) elideNewline() Node {
-	token := t.peek()
-	if token.typ != itemText {
-		t.unexpected(token, "input")
-		return nil
-	}
-
-	t.next()
-	stripped := strings.TrimLeft(token.val, "\n\r")
-	diff := len(token.val) - len(stripped)
-	if diff > 0 {
-		// This is a bit nasty. We mutate the token in-place to remove
-		// preceding newlines.
-		token.pos += Pos(diff)
-		token.val = stripped
-	}
-	return t.newText(token.pos, token.val)
-}
-
-// Action:
-//	control
-//	command ("|" command)*
-// Left delim is past. Now get actions.
-// First word could be a keyword such as range.
-func (t *Tree) action() (n Node) {
-	switch token := t.nextNonSpace(); token.typ {
-	case itemElse:
-		return t.elseControl()
-	case itemEnd:
-		return t.endControl()
-	case itemIf:
-		return t.ifControl()
-	case itemRange:
-		return t.rangeControl()
-	case itemTemplate:
-		return t.templateControl()
-	case itemWith:
-		return t.withControl()
-	}
-	t.backup()
-	// Do not pop variables; they persist until "end".
-	return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command"))
-}
-
-// Pipeline:
-//	declarations? command ('|' command)*
-func (t *Tree) pipeline(context string) (pipe *PipeNode) {
-	var decl []*VariableNode
-	pos := t.peekNonSpace().pos
-	// Are there declarations?
-	for {
-		if v := t.peekNonSpace(); v.typ == itemVariable {
-			t.next()
-			// Since space is a token, we need 3-token look-ahead here in the worst case:
-			// in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an
-			// argument variable rather than a declaration. So remember the token
-			// adjacent to the variable so we can push it back if necessary.
-			tokenAfterVariable := t.peek()
-			if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") {
-				t.nextNonSpace()
-				variable := t.newVariable(v.pos, v.val)
-				decl = append(decl, variable)
-				t.vars = append(t.vars, v.val)
-				if next.typ == itemChar && next.val == "," {
-					if context == "range" && len(decl) < 2 {
-						continue
-					}
-					t.errorf("too many declarations in %s", context)
-				}
-			} else if tokenAfterVariable.typ == itemSpace {
-				t.backup3(v, tokenAfterVariable)
-			} else {
-				t.backup2(v)
-			}
-		}
-		break
-	}
-	pipe = t.newPipeline(pos, t.lex.lineNumber(), decl)
-	for {
-		switch token := t.nextNonSpace(); token.typ {
-		case itemRightDelim, itemRightParen:
-			if len(pipe.Cmds) == 0 {
-				t.errorf("missing value for %s", context)
-			}
-			if token.typ == itemRightParen {
-				t.backup()
-			}
-			return
-		case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier,
-			itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen:
-			t.backup()
-			pipe.append(t.command())
-		default:
-			t.unexpected(token, context)
-		}
-	}
-}
-
-func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) {
-	defer t.popVars(len(t.vars))
-	line = t.lex.lineNumber()
-	pipe = t.pipeline(context)
-	var next Node
-	list, next = t.itemList()
-	switch next.Type() {
-	case nodeEnd: //done
-	case nodeElse:
-		if allowElseIf {
-			// Special case for "else if". If the "else" is followed immediately by an "if",
-			// the elseControl will have left the "if" token pending. Treat
-			//	{{if a}}_{{else if b}}_{{end}}
-			// as
-			//	{{if a}}_{{else}}{{if b}}_{{end}}{{end}}.
-			// To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}}
-			// is assumed. This technique works even for long if-else-if chains.
-			// TODO: Should we allow else-if in with and range?
-			if t.peek().typ == itemIf {
-				t.next() // Consume the "if" token.
-				elseList = t.newList(next.Position())
-				elseList.append(t.ifControl())
-				// Do not consume the next item - only one {{end}} required.
-				break
-			}
-		}
-		elseList, next = t.itemList()
-		if next.Type() != nodeEnd {
-			t.errorf("expected end; found %s", next)
-		}
-	}
-	return pipe.Position(), line, pipe, list, elseList
-}
-
-// If:
-//	{{if pipeline}} itemList {{end}}
-//	{{if pipeline}} itemList {{else}} itemList {{end}}
-// If keyword is past.
-func (t *Tree) ifControl() Node {
-	return t.newIf(t.parseControl(true, "if"))
-}
-
-// Range:
-//	{{range pipeline}} itemList {{end}}
-//	{{range pipeline}} itemList {{else}} itemList {{end}}
-// Range keyword is past.
-func (t *Tree) rangeControl() Node {
-	return t.newRange(t.parseControl(false, "range"))
-}
-
-// With:
-//	{{with pipeline}} itemList {{end}}
-//	{{with pipeline}} itemList {{else}} itemList {{end}}
-// If keyword is past.
-func (t *Tree) withControl() Node {
-	return t.newWith(t.parseControl(false, "with"))
-}
-
-// End:
-//	{{end}}
-// End keyword is past.
-func (t *Tree) endControl() Node {
-	return t.newEnd(t.expect(itemRightDelim, "end").pos)
-}
-
-// Else:
-//	{{else}}
-// Else keyword is past.
-func (t *Tree) elseControl() Node {
-	// Special case for "else if".
-	peek := t.peekNonSpace()
-	if peek.typ == itemIf {
-		// We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ".
-		return t.newElse(peek.pos, t.lex.lineNumber())
-	}
-	return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber())
-}
-
-// Template:
-//	{{template stringValue pipeline}}
-// Template keyword is past.  The name must be something that can evaluate
-// to a string.
-func (t *Tree) templateControl() Node {
-	var name string
-	token := t.nextNonSpace()
-	switch token.typ {
-	case itemString, itemRawString:
-		s, err := strconv.Unquote(token.val)
-		if err != nil {
-			t.error(err)
-		}
-		name = s
-	default:
-		t.unexpected(token, "template invocation")
-	}
-	var pipe *PipeNode
-	if t.nextNonSpace().typ != itemRightDelim {
-		t.backup()
-		// Do not pop variables; they persist until "end".
-		pipe = t.pipeline("template")
-	}
-	return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe)
-}
-
-// command:
-//	operand (space operand)*
-// space-separated arguments up to a pipeline character or right delimiter.
-// we consume the pipe character but leave the right delim to terminate the action.
-func (t *Tree) command() *CommandNode {
-	cmd := t.newCommand(t.peekNonSpace().pos)
-	for {
-		t.peekNonSpace() // skip leading spaces.
-		operand := t.operand()
-		if operand != nil {
-			cmd.append(operand)
-		}
-		switch token := t.next(); token.typ {
-		case itemSpace:
-			continue
-		case itemError:
-			t.errorf("%s", token.val)
-		case itemRightDelim, itemRightParen:
-			t.backup()
-		case itemPipe:
-		default:
-			t.errorf("unexpected %s in operand; missing space?", token)
-		}
-		break
-	}
-	if len(cmd.Args) == 0 {
-		t.errorf("empty command")
-	}
-	return cmd
-}
-
-// operand:
-//	term .Field*
-// An operand is a space-separated component of a command,
-// a term possibly followed by field accesses.
-// A nil return means the next item is not an operand.
-func (t *Tree) operand() Node {
-	node := t.term()
-	if node == nil {
-		return nil
-	}
-	if t.peek().typ == itemField {
-		chain := t.newChain(t.peek().pos, node)
-		for t.peek().typ == itemField {
-			chain.Add(t.next().val)
-		}
-		// Compatibility with original API: If the term is of type NodeField
-		// or NodeVariable, just put more fields on the original.
-		// Otherwise, keep the Chain node.
-		// TODO: Switch to Chains always when we can.
-		switch node.Type() {
-		case NodeField:
-			node = t.newField(chain.Position(), chain.String())
-		case NodeVariable:
-			node = t.newVariable(chain.Position(), chain.String())
-		default:
-			node = chain
-		}
-	}
-	return node
-}
-
-// term:
-//	literal (number, string, nil, boolean)
-//	function (identifier)
-//	.
-//	.Field
-//	$
-//	'(' pipeline ')'
-// A term is a simple "expression".
-// A nil return means the next item is not a term.
-func (t *Tree) term() Node {
-	switch token := t.nextNonSpace(); token.typ {
-	case itemError:
-		t.errorf("%s", token.val)
-	case itemIdentifier:
-		if !t.hasFunction(token.val) {
-			t.errorf("function %q not defined", token.val)
-		}
-		return NewIdentifier(token.val).SetTree(t).SetPos(token.pos)
-	case itemDot:
-		return t.newDot(token.pos)
-	case itemNil:
-		return t.newNil(token.pos)
-	case itemVariable:
-		return t.useVar(token.pos, token.val)
-	case itemField:
-		return t.newField(token.pos, token.val)
-	case itemBool:
-		return t.newBool(token.pos, token.val == "true")
-	case itemCharConstant, itemComplex, itemNumber:
-		number, err := t.newNumber(token.pos, token.val, token.typ)
-		if err != nil {
-			t.error(err)
-		}
-		return number
-	case itemLeftParen:
-		pipe := t.pipeline("parenthesized pipeline")
-		if token := t.next(); token.typ != itemRightParen {
-			t.errorf("unclosed right paren: unexpected %s", token)
-		}
-		return pipe
-	case itemString, itemRawString:
-		s, err := strconv.Unquote(token.val)
-		if err != nil {
-			t.error(err)
-		}
-		return t.newString(token.pos, token.val, s)
-	}
-	t.backup()
-	return nil
-}
-
-// hasFunction reports if a function name exists in the Tree's maps.
-func (t *Tree) hasFunction(name string) bool {
-	for _, funcMap := range t.funcs {
-		if funcMap == nil {
-			continue
-		}
-		if funcMap[name] != nil {
-			return true
-		}
-	}
-	return false
-}
-
-// popVars trims the variable list to the specified length
-func (t *Tree) popVars(n int) {
-	t.vars = t.vars[:n]
-}
-
-// useVar returns a node for a variable reference. It errors if the
-// variable is not defined.
-func (t *Tree) useVar(pos Pos, name string) Node {
-	v := t.newVariable(pos, name)
-	for _, varName := range t.vars {
-		if varName == v.Ident[0] {
-			return v
-		}
-	}
-	t.errorf("undefined variable %q", v.Ident[0])
-	return nil
-}
diff --git a/vendor/github.com/alecthomas/template/template.go b/vendor/github.com/alecthomas/template/template.go
deleted file mode 100644
index 447ed2abaea9..000000000000
--- a/vendor/github.com/alecthomas/template/template.go
+++ /dev/null
@@ -1,218 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package template
-
-import (
-	"fmt"
-	"reflect"
-
-	"github.com/alecthomas/template/parse"
-)
-
-// common holds the information shared by related templates.
-type common struct {
-	tmpl map[string]*Template
-	// We use two maps, one for parsing and one for execution.
-	// This separation makes the API cleaner since it doesn't
-	// expose reflection to the client.
-	parseFuncs FuncMap
-	execFuncs  map[string]reflect.Value
-}
-
-// Template is the representation of a parsed template. The *parse.Tree
-// field is exported only for use by html/template and should be treated
-// as unexported by all other clients.
-type Template struct {
-	name string
-	*parse.Tree
-	*common
-	leftDelim  string
-	rightDelim string
-}
-
-// New allocates a new template with the given name.
-func New(name string) *Template {
-	return &Template{
-		name: name,
-	}
-}
-
-// Name returns the name of the template.
-func (t *Template) Name() string {
-	return t.name
-}
-
-// New allocates a new template associated with the given one and with the same
-// delimiters. The association, which is transitive, allows one template to
-// invoke another with a {{template}} action.
-func (t *Template) New(name string) *Template {
-	t.init()
-	return &Template{
-		name:       name,
-		common:     t.common,
-		leftDelim:  t.leftDelim,
-		rightDelim: t.rightDelim,
-	}
-}
-
-func (t *Template) init() {
-	if t.common == nil {
-		t.common = new(common)
-		t.tmpl = make(map[string]*Template)
-		t.parseFuncs = make(FuncMap)
-		t.execFuncs = make(map[string]reflect.Value)
-	}
-}
-
-// Clone returns a duplicate of the template, including all associated
-// templates. The actual representation is not copied, but the name space of
-// associated templates is, so further calls to Parse in the copy will add
-// templates to the copy but not to the original. Clone can be used to prepare
-// common templates and use them with variant definitions for other templates
-// by adding the variants after the clone is made.
-func (t *Template) Clone() (*Template, error) {
-	nt := t.copy(nil)
-	nt.init()
-	nt.tmpl[t.name] = nt
-	for k, v := range t.tmpl {
-		if k == t.name { // Already installed.
-			continue
-		}
-		// The associated templates share nt's common structure.
-		tmpl := v.copy(nt.common)
-		nt.tmpl[k] = tmpl
-	}
-	for k, v := range t.parseFuncs {
-		nt.parseFuncs[k] = v
-	}
-	for k, v := range t.execFuncs {
-		nt.execFuncs[k] = v
-	}
-	return nt, nil
-}
-
-// copy returns a shallow copy of t, with common set to the argument.
-func (t *Template) copy(c *common) *Template {
-	nt := New(t.name)
-	nt.Tree = t.Tree
-	nt.common = c
-	nt.leftDelim = t.leftDelim
-	nt.rightDelim = t.rightDelim
-	return nt
-}
-
-// AddParseTree creates a new template with the name and parse tree
-// and associates it with t.
-func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
-	if t.common != nil && t.tmpl[name] != nil {
-		return nil, fmt.Errorf("template: redefinition of template %q", name)
-	}
-	nt := t.New(name)
-	nt.Tree = tree
-	t.tmpl[name] = nt
-	return nt, nil
-}
-
-// Templates returns a slice of the templates associated with t, including t
-// itself.
-func (t *Template) Templates() []*Template {
-	if t.common == nil {
-		return nil
-	}
-	// Return a slice so we don't expose the map.
-	m := make([]*Template, 0, len(t.tmpl))
-	for _, v := range t.tmpl {
-		m = append(m, v)
-	}
-	return m
-}
-
-// Delims sets the action delimiters to the specified strings, to be used in
-// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template
-// definitions will inherit the settings. An empty delimiter stands for the
-// corresponding default: {{ or }}.
-// The return value is the template, so calls can be chained.
-func (t *Template) Delims(left, right string) *Template {
-	t.leftDelim = left
-	t.rightDelim = right
-	return t
-}
-
-// Funcs adds the elements of the argument map to the template's function map.
-// It panics if a value in the map is not a function with appropriate return
-// type. However, it is legal to overwrite elements of the map. The return
-// value is the template, so calls can be chained.
-func (t *Template) Funcs(funcMap FuncMap) *Template {
-	t.init()
-	addValueFuncs(t.execFuncs, funcMap)
-	addFuncs(t.parseFuncs, funcMap)
-	return t
-}
-
-// Lookup returns the template with the given name that is associated with t,
-// or nil if there is no such template.
-func (t *Template) Lookup(name string) *Template {
-	if t.common == nil {
-		return nil
-	}
-	return t.tmpl[name]
-}
-
-// Parse parses a string into a template. Nested template definitions will be
-// associated with the top-level template t. Parse may be called multiple times
-// to parse definitions of templates to associate with t. It is an error if a
-// resulting template is non-empty (contains content other than template
-// definitions) and would replace a non-empty template with the same name.
-// (In multiple calls to Parse with the same receiver template, only one call
-// can contain text other than space, comments, and template definitions.)
-func (t *Template) Parse(text string) (*Template, error) {
-	t.init()
-	trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins)
-	if err != nil {
-		return nil, err
-	}
-	// Add the newly parsed trees, including the one for t, into our common structure.
-	for name, tree := range trees {
-		// If the name we parsed is the name of this template, overwrite this template.
-		// The associate method checks it's not a redefinition.
-		tmpl := t
-		if name != t.name {
-			tmpl = t.New(name)
-		}
-		// Even if t == tmpl, we need to install it in the common.tmpl map.
-		if replace, err := t.associate(tmpl, tree); err != nil {
-			return nil, err
-		} else if replace {
-			tmpl.Tree = tree
-		}
-		tmpl.leftDelim = t.leftDelim
-		tmpl.rightDelim = t.rightDelim
-	}
-	return t, nil
-}
-
-// associate installs the new template into the group of templates associated
-// with t. It is an error to reuse a name except to overwrite an empty
-// template. The two are already known to share the common structure.
-// The boolean return value reports wither to store this tree as t.Tree.
-func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) {
-	if new.common != t.common {
-		panic("internal error: associate not common")
-	}
-	name := new.name
-	if old := t.tmpl[name]; old != nil {
-		oldIsEmpty := parse.IsEmptyTree(old.Root)
-		newIsEmpty := parse.IsEmptyTree(tree.Root)
-		if newIsEmpty {
-			// Whether old is empty or not, new is empty; no reason to replace old.
-			return false, nil
-		}
-		if !oldIsEmpty {
-			return false, fmt.Errorf("template: redefinition of template %q", name)
-		}
-	}
-	t.tmpl[name] = new
-	return true, nil
-}
diff --git a/vendor/github.com/bmatcuk/doublestar/.travis.yml b/vendor/github.com/bmatcuk/doublestar/.travis.yml
deleted file mode 100644
index 78a90e7b5c26..000000000000
--- a/vendor/github.com/bmatcuk/doublestar/.travis.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-language: go
-
-go:
-  - 1.12
-  - 1.13
-  - 1.14
-
-os:
-  - linux
-  - windows
-
-before_install:
-  - go get -t -v ./...
-
-script:
-  - go test -race -coverprofile=coverage.txt -covermode=atomic
-
-after_success:
-  - bash <(curl -s https://codecov.io/bash)
-
diff --git a/vendor/github.com/bmatcuk/doublestar/README.md b/vendor/github.com/bmatcuk/doublestar/README.md
deleted file mode 100644
index 074b77d85df7..000000000000
--- a/vendor/github.com/bmatcuk/doublestar/README.md
+++ /dev/null
@@ -1,140 +0,0 @@
-# doublestar
-
-Path pattern matching and globbing supporting `doublestar` (`**`) patterns.
-
-![Release](https://img.shields.io/github/release/bmatcuk/doublestar.svg?branch=master)
-[![Build Status](https://travis-ci.org/bmatcuk/doublestar.svg?branch=master)](https://travis-ci.org/bmatcuk/doublestar)
-[![codecov.io](https://img.shields.io/codecov/c/github/bmatcuk/doublestar.svg?branch=master)](https://codecov.io/github/bmatcuk/doublestar?branch=master)
-
-## About
-
-**doublestar** is a [golang](http://golang.org/) implementation of path pattern
-matching and globbing with support for "doublestar" (aka globstar: `**`)
-patterns.
-
-doublestar patterns match files and directories recursively. For example, if
-you had the following directory structure:
-
-```bash
-grandparent
-`-- parent
-    |-- child1
-    `-- child2
-```
-
-You could find the children with patterns such as: `**/child*`,
-`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will
-return all files and directories recursively).
-
-Bash's globstar is doublestar's inspiration and, as such, works similarly.
-Note that the doublestar must appear as a path component by itself. A pattern
-such as `/path**` is invalid and will be treated the same as `/path*`, but
-`/path*/**` should achieve the desired result. Additionally, `/path/**` will
-match all directories and files under the path directory, but `/path/**/` will
-only match directories.
-
-## Installation
-
-**doublestar** can be installed via `go get`:
-
-```bash
-go get github.com/bmatcuk/doublestar
-```
-
-To use it in your code, you must import it:
-
-```go
-import "github.com/bmatcuk/doublestar"
-```
-
-## Usage
-
-### Match
-
-```go
-func Match(pattern, name string) (bool, error)
-```
-
-Match returns true if `name` matches the file name `pattern`
-([see below](#patterns)). `name` and `pattern` are split on forward slash (`/`)
-characters and may be relative or absolute.
-
-Note: `Match()` is meant to be a drop-in replacement for `path.Match()`. As
-such, it always uses `/` as the path separator. If you are writing code that
-will run on systems where `/` is not the path separator (such as Windows), you
-want to use `PathMatch()` (below) instead.
-
-
-### PathMatch
-
-```go
-func PathMatch(pattern, name string) (bool, error)
-```
-
-PathMatch returns true if `name` matches the file name `pattern`
-([see below](#patterns)). The difference between Match and PathMatch is that
-PathMatch will automatically use your system's path separator to split `name`
-and `pattern`.
-
-`PathMatch()` is meant to be a drop-in replacement for `filepath.Match()`.
-
-### Glob
-
-```go
-func Glob(pattern string) ([]string, error)
-```
-
-Glob finds all files and directories in the filesystem that match `pattern`
-([see below](#patterns)). `pattern` may be relative (to the current working
-directory), or absolute.
-
-`Glob()` is meant to be a drop-in replacement for `filepath.Glob()`.
-
-### Patterns
-
-**doublestar** supports the following special terms in the patterns:
-
-Special Terms | Meaning
-------------- | -------
-`*`           | matches any sequence of non-path-separators
-`**`          | matches any sequence of characters, including path separators
-`?`           | matches any single non-path-separator character
-`[class]`     | matches any single non-path-separator character against a class of characters ([see below](#character-classes))
-`{alt1,...}`  | matches a sequence of characters if one of the comma-separated alternatives matches
-
-Any character with a special meaning can be escaped with a backslash (`\`).
-
-#### Character Classes
-
-Character classes support the following:
-
-Class      | Meaning
----------- | -------
-`[abc]`    | matches any single character within the set
-`[a-z]`    | matches any single character in the range
-`[^class]` | matches any single character which does *not* match the class
-
-### Abstracting the `os` package
-
-**doublestar** by default uses the `Open`, `Stat`, and `Lstat`, functions and
-`PathSeparator` value from the standard library's `os` package. To abstract
-this, for example to be able to perform tests of Windows paths on Linux, or to
-interoperate with your own filesystem code, it includes the functions `GlobOS`
-and `PathMatchOS` which are identical to `Glob` and `PathMatch` except that they
-operate on an `OS` interface:
-
-```go
-type OS interface {
-    Lstat(name string) (os.FileInfo, error)
-    Open(name string) (*os.File, error)
-    PathSeparator() rune
-    Stat(name string) (os.FileInfo, error)
-}
-```
-
-`StandardOS` is a value that implements this interface by calling functions in
-the standard library's `os` package.
-
-## License
-
-[MIT License](LICENSE)
diff --git a/vendor/github.com/bmatcuk/doublestar/doublestar.go b/vendor/github.com/bmatcuk/doublestar/doublestar.go
deleted file mode 100644
index 206e012027f6..000000000000
--- a/vendor/github.com/bmatcuk/doublestar/doublestar.go
+++ /dev/null
@@ -1,684 +0,0 @@
-package doublestar
-
-import (
-	"fmt"
-	"os"
-	"path"
-	"path/filepath"
-	"sort"
-	"strings"
-	"unicode/utf8"
-)
-
-// An OS abstracts functions in the standard library's os package.
-type OS interface {
-	Lstat(name string) (os.FileInfo, error)
-	Open(name string) (*os.File, error)
-	PathSeparator() rune
-	Stat(name string) (os.FileInfo, error)
-}
-
-// StandardOS is a value that implements the OS interface by calling functions
-// in the standard libray's os package.
-var StandardOS OS = standardOS{}
-
-// A standardOS implements OS by calling functions in the standard library's os
-// package.
-type standardOS struct{}
-
-func (standardOS) Lstat(name string) (os.FileInfo, error) { return os.Lstat(name) }
-func (standardOS) Open(name string) (*os.File, error)     { return os.Open(name) }
-func (standardOS) PathSeparator() rune                    { return os.PathSeparator }
-func (standardOS) Stat(name string) (os.FileInfo, error)  { return os.Stat(name) }
-
-// ErrBadPattern indicates a pattern was malformed.
-var ErrBadPattern = path.ErrBadPattern
-
-// Split a path on the given separator, respecting escaping.
-func splitPathOnSeparator(path string, separator rune) (ret []string) {
-	idx := 0
-	if separator == '\\' {
-		// if the separator is '\\', then we can just split...
-		ret = strings.Split(path, string(separator))
-		idx = len(ret)
-	} else {
-		// otherwise, we need to be careful of situations where the separator was escaped
-		cnt := strings.Count(path, string(separator))
-		if cnt == 0 {
-			return []string{path}
-		}
-
-		ret = make([]string, cnt+1)
-		pathlen := len(path)
-		separatorLen := utf8.RuneLen(separator)
-		emptyEnd := false
-		for start := 0; start < pathlen; {
-			end := indexRuneWithEscaping(path[start:], separator)
-			if end == -1 {
-				emptyEnd = false
-				end = pathlen
-			} else {
-				emptyEnd = true
-				end += start
-			}
-			ret[idx] = path[start:end]
-			start = end + separatorLen
-			idx++
-		}
-
-		// If the last rune is a path separator, we need to append an empty string to
-		// represent the last, empty path component. By default, the strings from
-		// make([]string, ...) will be empty, so we just need to icrement the count
-		if emptyEnd {
-			idx++
-		}
-	}
-
-	return ret[:idx]
-}
-
-// Find the first index of a rune in a string,
-// ignoring any times the rune is escaped using "\".
-func indexRuneWithEscaping(s string, r rune) int {
-	end := strings.IndexRune(s, r)
-	if end == -1 {
-		return -1
-	}
-	if end > 0 && s[end-1] == '\\' {
-		start := end + utf8.RuneLen(r)
-		end = indexRuneWithEscaping(s[start:], r)
-		if end != -1 {
-			end += start
-		}
-	}
-	return end
-}
-
-// Find the last index of a rune in a string,
-// ignoring any times the rune is escaped using "\".
-func lastIndexRuneWithEscaping(s string, r rune) int {
-	end := strings.LastIndex(s, string(r))
-	if end == -1 {
-		return -1
-	}
-	if end > 0 && s[end-1] == '\\' {
-		end = lastIndexRuneWithEscaping(s[:end-1], r)
-	}
-	return end
-}
-
-// Find the index of the first instance of one of the unicode characters in
-// chars, ignoring any times those characters are escaped using "\".
-func indexAnyWithEscaping(s, chars string) int {
-	end := strings.IndexAny(s, chars)
-	if end == -1 {
-		return -1
-	}
-	if end > 0 && s[end-1] == '\\' {
-		_, adj := utf8.DecodeRuneInString(s[end:])
-		start := end + adj
-		end = indexAnyWithEscaping(s[start:], chars)
-		if end != -1 {
-			end += start
-		}
-	}
-	return end
-}
-
-// Split a set of alternatives such as {alt1,alt2,...} and returns the index of
-// the rune after the closing curly brace. Respects nested alternatives and
-// escaped runes.
-func splitAlternatives(s string) (ret []string, idx int) {
-	ret = make([]string, 0, 2)
-	idx = 0
-	slen := len(s)
-	braceCnt := 1
-	esc := false
-	start := 0
-	for braceCnt > 0 {
-		if idx >= slen {
-			return nil, -1
-		}
-
-		sRune, adj := utf8.DecodeRuneInString(s[idx:])
-		if esc {
-			esc = false
-		} else if sRune == '\\' {
-			esc = true
-		} else if sRune == '{' {
-			braceCnt++
-		} else if sRune == '}' {
-			braceCnt--
-		} else if sRune == ',' && braceCnt == 1 {
-			ret = append(ret, s[start:idx])
-			start = idx + adj
-		}
-
-		idx += adj
-	}
-	ret = append(ret, s[start:idx-1])
-	return
-}
-
-// Returns true if the pattern is "zero length", meaning
-// it could match zero or more characters.
-func isZeroLengthPattern(pattern string) (ret bool, err error) {
-	// * can match zero
-	if pattern == "" || pattern == "*" || pattern == "**" {
-		return true, nil
-	}
-
-	// an alternative with zero length can match zero, for example {,x} - the
-	// first alternative has zero length
-	r, adj := utf8.DecodeRuneInString(pattern)
-	if r == '{' {
-		options, endOptions := splitAlternatives(pattern[adj:])
-		if endOptions == -1 {
-			return false, ErrBadPattern
-		}
-		if ret, err = isZeroLengthPattern(pattern[adj+endOptions:]); !ret || err != nil {
-			return
-		}
-		for _, o := range options {
-			if ret, err = isZeroLengthPattern(o); ret || err != nil {
-				return
-			}
-		}
-	}
-
-	return false, nil
-}
-
-// Match returns true if name matches the shell file name pattern.
-// The pattern syntax is:
-//
-//  pattern:
-//    { term }
-//  term:
-//    '*'         matches any sequence of non-path-separators
-//    '**'        matches any sequence of characters, including
-//                path separators.
-//    '?'         matches any single non-path-separator character
-//    '[' [ '^' ] { character-range } ']'
-//          character class (must be non-empty)
-//    '{' { term } [ ',' { term } ... ] '}'
-//    c           matches character c (c != '*', '?', '\\', '[')
-//    '\\' c      matches character c
-//
-//  character-range:
-//    c           matches character c (c != '\\', '-', ']')
-//    '\\' c      matches character c
-//    lo '-' hi   matches character c for lo <= c <= hi
-//
-// Match requires pattern to match all of name, not just a substring.
-// The path-separator defaults to the '/' character. The only possible
-// returned error is ErrBadPattern, when pattern is malformed.
-//
-// Note: this is meant as a drop-in replacement for path.Match() which
-// always uses '/' as the path separator. If you want to support systems
-// which use a different path separator (such as Windows), what you want
-// is the PathMatch() function below.
-//
-func Match(pattern, name string) (bool, error) {
-	return matchWithSeparator(pattern, name, '/')
-}
-
-// PathMatch is like Match except that it uses your system's path separator.
-// For most systems, this will be '/'. However, for Windows, it would be '\\'.
-// Note that for systems where the path separator is '\\', escaping is
-// disabled.
-//
-// Note: this is meant as a drop-in replacement for filepath.Match().
-//
-func PathMatch(pattern, name string) (bool, error) {
-	return PathMatchOS(StandardOS, pattern, name)
-}
-
-// PathMatchOS is like PathMatch except that it uses vos's path separator.
-func PathMatchOS(vos OS, pattern, name string) (bool, error) {
-	pattern = filepath.ToSlash(pattern)
-	return matchWithSeparator(pattern, name, vos.PathSeparator())
-}
-
-// Match returns true if name matches the shell file name pattern.
-// The pattern syntax is:
-//
-//  pattern:
-//    { term }
-//  term:
-//    '*'         matches any sequence of non-path-separators
-//              '**'        matches any sequence of characters, including
-//                          path separators.
-//    '?'         matches any single non-path-separator character
-//    '[' [ '^' ] { character-range } ']'
-//          character class (must be non-empty)
-//    '{' { term } [ ',' { term } ... ] '}'
-//    c           matches character c (c != '*', '?', '\\', '[')
-//    '\\' c      matches character c
-//
-//  character-range:
-//    c           matches character c (c != '\\', '-', ']')
-//    '\\' c      matches character c, unless separator is '\\'
-//    lo '-' hi   matches character c for lo <= c <= hi
-//
-// Match requires pattern to match all of name, not just a substring.
-// The only possible returned error is ErrBadPattern, when pattern
-// is malformed.
-//
-func matchWithSeparator(pattern, name string, separator rune) (bool, error) {
-	nameComponents := splitPathOnSeparator(name, separator)
-	return doMatching(pattern, nameComponents)
-}
-
-func doMatching(pattern string, nameComponents []string) (matched bool, err error) {
-	// check for some base-cases
-	patternLen, nameLen := len(pattern), len(nameComponents)
-	if patternLen == 0 && nameLen == 0 {
-		return true, nil
-	}
-	if patternLen == 0 {
-		if nameLen == 1 && nameComponents[0] == "" {
-			return true, nil
-		} else if nameLen == 0 {
-			return false, nil
-		}
-	}
-
-	slashIdx := indexRuneWithEscaping(pattern, '/')
-	lastComponent := slashIdx == -1
-	if lastComponent {
-		slashIdx = len(pattern)
-	}
-	if pattern[:slashIdx] == "**" {
-		// if our last pattern component is a doublestar, we're done -
-		// doublestar will match any remaining name components, if any.
-		if lastComponent {
-			return true, nil
-		}
-
-		// otherwise, try matching remaining components
-		for nameIdx := 0; nameIdx < nameLen; nameIdx++ {
-			if m, _ := doMatching(pattern[slashIdx+1:], nameComponents[nameIdx:]); m {
-				return true, nil
-			}
-		}
-		return false, nil
-	}
-
-	var matches []string
-	matches, err = matchComponent(pattern, nameComponents[0])
-	if matches == nil || err != nil {
-		return
-	}
-	if len(matches) == 0 && nameLen == 1 {
-		return true, nil
-	}
-
-	if nameLen > 1 {
-		for _, alt := range matches {
-			matched, err = doMatching(alt, nameComponents[1:])
-			if matched || err != nil {
-				return
-			}
-		}
-	}
-
-	return false, nil
-}
-
-// Glob returns the names of all files matching pattern or nil
-// if there is no matching file. The syntax of pattern is the same
-// as in Match. The pattern may describe hierarchical names such as
-// /usr/*/bin/ed (assuming the Separator is '/').
-//
-// Glob ignores file system errors such as I/O errors reading directories.
-// The only possible returned error is ErrBadPattern, when pattern
-// is malformed.
-//
-// Your system path separator is automatically used. This means on
-// systems where the separator is '\\' (Windows), escaping will be
-// disabled.
-//
-// Note: this is meant as a drop-in replacement for filepath.Glob().
-//
-func Glob(pattern string) (matches []string, err error) {
-	return GlobOS(StandardOS, pattern)
-}
-
-// GlobOS is like Glob except that it operates on vos.
-func GlobOS(vos OS, pattern string) (matches []string, err error) {
-	if len(pattern) == 0 {
-		return nil, nil
-	}
-
-	// if the pattern starts with alternatives, we need to handle that here - the
-	// alternatives may be a mix of relative and absolute
-	if pattern[0] == '{' {
-		options, endOptions := splitAlternatives(pattern[1:])
-		if endOptions == -1 {
-			return nil, ErrBadPattern
-		}
-		for _, o := range options {
-			m, e := GlobOS(vos, o+pattern[endOptions+1:])
-			if e != nil {
-				return nil, e
-			}
-			matches = append(matches, m...)
-		}
-		return matches, nil
-	}
-
-	// If the pattern is relative or absolute and we're on a non-Windows machine,
-	// volumeName will be an empty string. If it is absolute and we're on a
-	// Windows machine, volumeName will be a drive letter ("C:") for filesystem
-	// paths or \\\ for UNC paths.
-	isAbs := filepath.IsAbs(pattern) || pattern[0] == '\\' || pattern[0] == '/'
-	volumeName := filepath.VolumeName(pattern)
-	isWindowsUNC := strings.HasPrefix(volumeName, `\\`)
-	if isWindowsUNC || isAbs {
-		startIdx := len(volumeName) + 1
-		return doGlob(vos, fmt.Sprintf("%s%s", volumeName, string(vos.PathSeparator())), filepath.ToSlash(pattern[startIdx:]), matches)
-	}
-
-	// otherwise, it's a relative pattern
-	return doGlob(vos, ".", filepath.ToSlash(pattern), matches)
-}
-
-// Perform a glob
-func doGlob(vos OS, basedir, pattern string, matches []string) (m []string, e error) {
-	m = matches
-	e = nil
-
-	// if the pattern starts with any path components that aren't globbed (ie,
-	// `path/to/glob*`), we can skip over the un-globbed components (`path/to` in
-	// our example).
-	globIdx := indexAnyWithEscaping(pattern, "*?[{\\")
-	if globIdx > 0 {
-		globIdx = lastIndexRuneWithEscaping(pattern[:globIdx], '/')
-	} else if globIdx == -1 {
-		globIdx = lastIndexRuneWithEscaping(pattern, '/')
-	}
-	if globIdx > 0 {
-		basedir = filepath.Join(basedir, pattern[:globIdx])
-		pattern = pattern[globIdx+1:]
-	}
-
-	// Lstat will return an error if the file/directory doesn't exist
-	fi, err := vos.Lstat(basedir)
-	if err != nil {
-		return
-	}
-
-	// if the pattern is empty, we've found a match
-	if len(pattern) == 0 {
-		m = append(m, basedir)
-		return
-	}
-
-	// otherwise, we need to check each item in the directory...
-
-	// first, if basedir is a symlink, follow it...
-	if (fi.Mode() & os.ModeSymlink) != 0 {
-		fi, err = vos.Stat(basedir)
-		if err != nil {
-			return
-		}
-	}
-
-	// confirm it's a directory...
-	if !fi.IsDir() {
-		return
-	}
-
-	files, err := filesInDir(vos, basedir)
-	if err != nil {
-		return
-	}
-
-	sort.Slice(files, func(i, j int) bool { return files[i].Name() < files[j].Name() })
-
-	slashIdx := indexRuneWithEscaping(pattern, '/')
-	lastComponent := slashIdx == -1
-	if lastComponent {
-		slashIdx = len(pattern)
-	}
-	if pattern[:slashIdx] == "**" {
-		// if the current component is a doublestar, we'll try depth-first
-		for _, file := range files {
-			// if symlink, we may want to follow
-			if (file.Mode() & os.ModeSymlink) != 0 {
-				file, err = vos.Stat(filepath.Join(basedir, file.Name()))
-				if err != nil {
-					continue
-				}
-			}
-
-			if file.IsDir() {
-				// recurse into directories
-				if lastComponent {
-					m = append(m, filepath.Join(basedir, file.Name()))
-				}
-				m, e = doGlob(vos, filepath.Join(basedir, file.Name()), pattern, m)
-			} else if lastComponent {
-				// if the pattern's last component is a doublestar, we match filenames, too
-				m = append(m, filepath.Join(basedir, file.Name()))
-			}
-		}
-		if lastComponent {
-			return // we're done
-		}
-
-		pattern = pattern[slashIdx+1:]
-	}
-
-	// check items in current directory and recurse
-	var match []string
-	for _, file := range files {
-		match, e = matchComponent(pattern, file.Name())
-		if e != nil {
-			return
-		}
-		if match != nil {
-			if len(match) == 0 {
-				m = append(m, filepath.Join(basedir, file.Name()))
-			} else {
-				for _, alt := range match {
-					m, e = doGlob(vos, filepath.Join(basedir, file.Name()), alt, m)
-				}
-			}
-		}
-	}
-	return
-}
-
-func filesInDir(vos OS, dirPath string) (files []os.FileInfo, e error) {
-	dir, err := vos.Open(dirPath)
-	if err != nil {
-		return nil, nil
-	}
-	defer func() {
-		if err := dir.Close(); e == nil {
-			e = err
-		}
-	}()
-
-	files, err = dir.Readdir(-1)
-	if err != nil {
-		return nil, nil
-	}
-
-	return
-}
-
-// Attempt to match a single path component with a pattern. Note that the
-// pattern may include multiple components but that the "name" is just a single
-// path component. The return value is a slice of patterns that should be
-// checked against subsequent path components or nil, indicating that the
-// pattern does not match this path. It is assumed that pattern components are
-// separated by '/'
-func matchComponent(pattern, name string) ([]string, error) {
-	// check for matches one rune at a time
-	patternLen, nameLen := len(pattern), len(name)
-	patIdx, nameIdx := 0, 0
-	for patIdx < patternLen && nameIdx < nameLen {
-		patRune, patAdj := utf8.DecodeRuneInString(pattern[patIdx:])
-		nameRune, nameAdj := utf8.DecodeRuneInString(name[nameIdx:])
-		if patRune == '/' {
-			patIdx++
-			break
-		} else if patRune == '\\' {
-			// handle escaped runes, only if separator isn't '\\'
-			patIdx += patAdj
-			patRune, patAdj = utf8.DecodeRuneInString(pattern[patIdx:])
-			if patRune == utf8.RuneError {
-				return nil, ErrBadPattern
-			} else if patRune == nameRune {
-				patIdx += patAdj
-				nameIdx += nameAdj
-			} else {
-				return nil, nil
-			}
-		} else if patRune == '*' {
-			// handle stars - a star at the end of the pattern or before a separator
-			// will always match the rest of the path component
-			if patIdx += patAdj; patIdx >= patternLen {
-				return []string{}, nil
-			}
-			if patRune, patAdj = utf8.DecodeRuneInString(pattern[patIdx:]); patRune == '/' {
-				return []string{pattern[patIdx+patAdj:]}, nil
-			}
-
-			// check if we can make any matches
-			for ; nameIdx < nameLen; nameIdx += nameAdj {
-				if m, e := matchComponent(pattern[patIdx:], name[nameIdx:]); m != nil || e != nil {
-					return m, e
-				}
-				_, nameAdj = utf8.DecodeRuneInString(name[nameIdx:])
-			}
-			return nil, nil
-		} else if patRune == '[' {
-			// handle character sets
-			patIdx += patAdj
-			endClass := indexRuneWithEscaping(pattern[patIdx:], ']')
-			if endClass == -1 {
-				return nil, ErrBadPattern
-			}
-			endClass += patIdx
-			classRunes := []rune(pattern[patIdx:endClass])
-			classRunesLen := len(classRunes)
-			if classRunesLen > 0 {
-				classIdx := 0
-				matchClass := false
-				if classRunes[0] == '^' {
-					classIdx++
-				}
-				for classIdx < classRunesLen {
-					low := classRunes[classIdx]
-					if low == '-' {
-						return nil, ErrBadPattern
-					}
-					classIdx++
-					if low == '\\' {
-						if classIdx < classRunesLen {
-							low = classRunes[classIdx]
-							classIdx++
-						} else {
-							return nil, ErrBadPattern
-						}
-					}
-					high := low
-					if classIdx < classRunesLen && classRunes[classIdx] == '-' {
-						// we have a range of runes
-						if classIdx++; classIdx >= classRunesLen {
-							return nil, ErrBadPattern
-						}
-						high = classRunes[classIdx]
-						if high == '-' {
-							return nil, ErrBadPattern
-						}
-						classIdx++
-						if high == '\\' {
-							if classIdx < classRunesLen {
-								high = classRunes[classIdx]
-								classIdx++
-							} else {
-								return nil, ErrBadPattern
-							}
-						}
-					}
-					if low <= nameRune && nameRune <= high {
-						matchClass = true
-					}
-				}
-				if matchClass == (classRunes[0] == '^') {
-					return nil, nil
-				}
-			} else {
-				return nil, ErrBadPattern
-			}
-			patIdx = endClass + 1
-			nameIdx += nameAdj
-		} else if patRune == '{' {
-			// handle alternatives such as {alt1,alt2,...}
-			patIdx += patAdj
-			options, endOptions := splitAlternatives(pattern[patIdx:])
-			if endOptions == -1 {
-				return nil, ErrBadPattern
-			}
-			patIdx += endOptions
-
-			results := make([][]string, 0, len(options))
-			totalResults := 0
-			for _, o := range options {
-				m, e := matchComponent(o+pattern[patIdx:], name[nameIdx:])
-				if e != nil {
-					return nil, e
-				}
-				if m != nil {
-					results = append(results, m)
-					totalResults += len(m)
-				}
-			}
-			if len(results) > 0 {
-				lst := make([]string, 0, totalResults)
-				for _, m := range results {
-					lst = append(lst, m...)
-				}
-				return lst, nil
-			}
-
-			return nil, nil
-		} else if patRune == '?' || patRune == nameRune {
-			// handle single-rune wildcard
-			patIdx += patAdj
-			nameIdx += nameAdj
-		} else {
-			return nil, nil
-		}
-	}
-	if nameIdx >= nameLen {
-		if patIdx >= patternLen {
-			return []string{}, nil
-		}
-
-		pattern = pattern[patIdx:]
-		slashIdx := indexRuneWithEscaping(pattern, '/')
-		testPattern := pattern
-		if slashIdx >= 0 {
-			testPattern = pattern[:slashIdx]
-		}
-
-		zeroLength, err := isZeroLengthPattern(testPattern)
-		if err != nil {
-			return nil, err
-		}
-		if zeroLength {
-			if slashIdx == -1 {
-				return []string{}, nil
-			} else {
-				return []string{pattern[slashIdx+1:]}, nil
-			}
-		}
-	}
-	return nil, nil
-}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml
new file mode 100644
index 000000000000..db6e504a9ac7
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml
@@ -0,0 +1,10 @@
+coverage:
+  status:
+    project:
+      default:
+        threshold: 1%
+    patch:
+      default:
+        target: 70%
+ignore:
+  - globoptions.go
diff --git a/vendor/github.com/bmatcuk/doublestar/.gitignore b/vendor/github.com/bmatcuk/doublestar/v4/.gitignore
similarity index 100%
rename from vendor/github.com/bmatcuk/doublestar/.gitignore
rename to vendor/github.com/bmatcuk/doublestar/v4/.gitignore
diff --git a/vendor/github.com/bmatcuk/doublestar/LICENSE b/vendor/github.com/bmatcuk/doublestar/v4/LICENSE
similarity index 100%
rename from vendor/github.com/bmatcuk/doublestar/LICENSE
rename to vendor/github.com/bmatcuk/doublestar/v4/LICENSE
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/README.md b/vendor/github.com/bmatcuk/doublestar/v4/README.md
new file mode 100644
index 000000000000..21929a95450c
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/README.md
@@ -0,0 +1,431 @@
+# doublestar
+
+Path pattern matching and globbing supporting `doublestar` (`**`) patterns.
+
+[![PkgGoDev](https://pkg.go.dev/badge/github.com/bmatcuk/doublestar)](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4)
+[![Release](https://img.shields.io/github/release/bmatcuk/doublestar.svg?branch=master)](https://github.com/bmatcuk/doublestar/releases)
+[![Build Status](https://github.com/bmatcuk/doublestar/actions/workflows/test.yml/badge.svg)](https://github.com/bmatcuk/doublestar/actions)
+[![codecov.io](https://img.shields.io/codecov/c/github/bmatcuk/doublestar.svg?branch=master)](https://codecov.io/github/bmatcuk/doublestar?branch=master)
+[![Sponsor](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/bmatcuk)
+
+## About
+
+#### [Upgrading?](UPGRADING.md)
+
+**doublestar** is a [golang] implementation of path pattern matching and
+globbing with support for "doublestar" (aka globstar: `**`) patterns.
+
+doublestar patterns match files and directories recursively. For example, if
+you had the following directory structure:
+
+```bash
+grandparent
+`-- parent
+    |-- child1
+    `-- child2
+```
+
+You could find the children with patterns such as: `**/child*`,
+`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will
+return all files and directories recursively).
+
+Bash's globstar is doublestar's inspiration and, as such, works similarly.
+Note that the doublestar must appear as a path component by itself. A pattern
+such as `/path**` is invalid and will be treated the same as `/path*`, but
+`/path*/**` should achieve the desired result. Additionally, `/path/**` will
+match all directories and files under the path directory, but `/path/**/` will
+only match directories.
+
+v4 is a complete rewrite with a focus on performance. Additionally,
+[doublestar] has been updated to use the new [io/fs] package for filesystem
+access. As a result, it is only supported by [golang] v1.16+.
+
+## Installation
+
+**doublestar** can be installed via `go get`:
+
+```bash
+go get github.com/bmatcuk/doublestar/v4
+```
+
+To use it in your code, you must import it:
+
+```go
+import "github.com/bmatcuk/doublestar/v4"
+```
+
+## Usage
+
+### ErrBadPattern
+
+```go
+doublestar.ErrBadPattern
+```
+
+Returned by various functions to report that the pattern is malformed. At the
+moment, this value is equal to `path.ErrBadPattern`, but, for portability, this
+equivalence should probably not be relied upon.
+
+### Match
+
+```go
+func Match(pattern, name string) (bool, error)
+```
+
+Match returns true if `name` matches the file name `pattern` ([see
+"patterns"]). `name` and `pattern` are split on forward slash (`/`) characters
+and may be relative or absolute.
+
+Match requires pattern to match all of name, not just a substring. The only
+possible returned error is `ErrBadPattern`, when pattern is malformed.
+
+Note: this is meant as a drop-in replacement for `path.Match()` which always
+uses `'/'` as the path separator. If you want to support systems which use a
+different path separator (such as Windows), what you want is `PathMatch()`.
+Alternatively, you can run `filepath.ToSlash()` on both pattern and name and
+then use this function.
+
+Note: users should _not_ count on the returned error,
+`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`.
+
+
+### MatchUnvalidated
+
+```go
+func MatchUnvalidated(pattern, name string) bool
+```
+
+MatchUnvalidated can provide a small performance improvement if you don't care
+about whether or not the pattern is valid (perhaps because you already ran
+`ValidatePattern`). Note that there's really only one case where this
+performance improvement is realized: when pattern matching reaches the end of
+`name` before reaching the end of `pattern`, such as `Match("a/b/c", "a")`.
+
+
+### PathMatch
+
+```go
+func PathMatch(pattern, name string) (bool, error)
+```
+
+PathMatch returns true if `name` matches the file name `pattern` ([see
+"patterns"]). The difference between Match and PathMatch is that PathMatch will
+automatically use your system's path separator to split `name` and `pattern`.
+On systems where the path separator is `'\'`, escaping will be disabled.
+
+Note: this is meant as a drop-in replacement for `filepath.Match()`. It assumes
+that both `pattern` and `name` are using the system's path separator. If you
+can't be sure of that, use `filepath.ToSlash()` on both `pattern` and `name`,
+and then use the `Match()` function instead.
+
+
+### PathMatchUnvalidated
+
+```go
+func PathMatchUnvalidated(pattern, name string) bool
+```
+
+PathMatchUnvalidated can provide a small performance improvement if you don't
+care about whether or not the pattern is valid (perhaps because you already ran
+`ValidatePattern`). Note that there's really only one case where this
+performance improvement is realized: when pattern matching reaches the end of
+`name` before reaching the end of `pattern`, such as `Match("a/b/c", "a")`.
+
+
+### GlobOption
+
+Options that may be passed to `Glob`, `GlobWalk`, or `FilepathGlob`. Any number
+of options may be passed to these functions, and in any order, as the last
+argument(s).
+
+```go
+WithFailOnIOErrors()
+```
+
+If passed, doublestar will abort and return IO errors when encountered. Note
+that if the glob pattern references a path that does not exist (such as
+`nonexistent/path/*`), this is _not_ considered an IO error: it is considered a
+pattern with no matches.
+
+```go
+WithFailOnPatternNotExist()
+```
+
+If passed, doublestar will abort and return `doublestar.ErrPatternNotExist` if
+the pattern references a path that does not exist before any meta characters
+such as `nonexistent/path/*`. Note that alts (ie, `{...}`) are expanded before
+this check. In other words, a pattern such as `{a,b}/*` may fail if either `a`
+or `b` do not exist but `*/{a,b}` will never fail because the star may match
+nothing.
+
+```go
+WithFilesOnly()
+```
+
+If passed, doublestar will only return "files" from `Glob`, `GlobWalk`, or
+`FilepathGlob`. In this context, "files" are anything that is not a directory
+or a symlink to a directory.
+
+Note: if combined with the WithNoFollow option, symlinks to directories _will_
+be included in the result since no attempt is made to follow the symlink.
+
+```go
+WithNoFollow()
+```
+
+If passed, doublestar will not follow symlinks while traversing the filesystem.
+However, due to io/fs's _very_ poor support for querying the filesystem about
+symlinks, there's a caveat here: if part of the pattern before any meta
+characters contains a reference to a symlink, it will be followed. For example,
+a pattern such as `path/to/symlink/*` will be followed assuming it is a valid
+symlink to a directory. However, from this same example, a pattern such as
+`path/to/**` will not traverse the `symlink`, nor would `path/*/symlink/*`
+
+Note: if combined with the WithFilesOnly option, symlinks to directories _will_
+be included in the result since no attempt is made to follow the symlink.
+
+### Glob
+
+```go
+func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error)
+```
+
+Glob returns the names of all files matching pattern or nil if there is no
+matching file. The syntax of patterns is the same as in `Match()`. The pattern
+may describe hierarchical names such as `usr/*/bin/ed`.
+
+Glob ignores file system errors such as I/O errors reading directories by
+default. The only possible returned error is `ErrBadPattern`, reporting that
+the pattern is malformed.
+
+To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be
+passed.
+
+Note: this is meant as a drop-in replacement for `io/fs.Glob()`. Like
+`io/fs.Glob()`, this function assumes that your pattern uses `/` as the path
+separator even if that's not correct for your OS (like Windows). If you aren't
+sure if that's the case, you can use `filepath.ToSlash()` on your pattern
+before calling `Glob()`.
+
+Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/`
+will return no results and no errors. This seems to be a [conscious
+decision](https://github.com/golang/go/issues/44092#issuecomment-774132549),
+even if counter-intuitive. You can use [SplitPattern] to divide a pattern into
+a base path (to initialize an `FS` object) and pattern.
+
+Note: users should _not_ count on the returned error,
+`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`.
+
+### GlobWalk
+
+```go
+type GlobWalkFunc func(path string, d fs.DirEntry) error
+
+func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error
+```
+
+GlobWalk calls the callback function `fn` for every file matching pattern.  The
+syntax of pattern is the same as in Match() and the behavior is the same as
+Glob(), with regard to limitations (such as patterns containing `/./`, `/../`,
+or starting with `/`). The pattern may describe hierarchical names such as
+usr/*/bin/ed.
+
+GlobWalk may have a small performance benefit over Glob if you do not need a
+slice of matches because it can avoid allocating memory for the matches.
+Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for each
+match, and lets you quit early by returning a non-nil error from your callback
+function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`, GlobWalk
+will skip the current directory. This means that if the current path _is_ a
+directory, GlobWalk will not recurse into it. If the current path is not a
+directory, the rest of the parent directory will be skipped.
+
+GlobWalk ignores file system errors such as I/O errors reading directories by
+default. GlobWalk may return `ErrBadPattern`, reporting that the pattern is
+malformed.
+
+To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be
+passed.
+
+Additionally, if the callback function `fn` returns an error, GlobWalk will
+exit immediately and return that error.
+
+Like Glob(), this function assumes that your pattern uses `/` as the path
+separator even if that's not correct for your OS (like Windows). If you aren't
+sure if that's the case, you can use filepath.ToSlash() on your pattern before
+calling GlobWalk().
+
+Note: users should _not_ count on the returned error,
+`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`.
+
+### FilepathGlob
+
+```go
+func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error)
+```
+
+FilepathGlob returns the names of all files matching pattern or nil if there is
+no matching file. The syntax of pattern is the same as in Match(). The pattern
+may describe hierarchical names such as usr/*/bin/ed.
+
+FilepathGlob ignores file system errors such as I/O errors reading directories
+by default. The only possible returned error is `ErrBadPattern`, reporting that
+the pattern is malformed.
+
+To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be
+passed.
+
+Note: FilepathGlob is a convenience function that is meant as a drop-in
+replacement for `path/filepath.Glob()` for users who don't need the
+complication of io/fs. Basically, it:
+
+* Runs `filepath.Clean()` and `ToSlash()` on the pattern
+* Runs `SplitPattern()` to get a base path and a pattern to Glob
+* Creates an FS object from the base path and `Glob()s` on the pattern
+* Joins the base path with all of the matches from `Glob()`
+
+Returned paths will use the system's path separator, just like
+`filepath.Glob()`.
+
+Note: the returned error `doublestar.ErrBadPattern` is not equal to
+`filepath.ErrBadPattern`.
+
+### SplitPattern
+
+```go
+func SplitPattern(p string) (base, pattern string)
+```
+
+SplitPattern is a utility function. Given a pattern, SplitPattern will return
+two strings: the first string is everything up to the last slash (`/`) that
+appears _before_ any unescaped "meta" characters (ie, `*?[{`).  The second
+string is everything after that slash. For example, given the pattern:
+
+```
+../../path/to/meta*/**
+             ^----------- split here
+```
+
+SplitPattern returns "../../path/to" and "meta*/**". This is useful for
+initializing os.DirFS() to call Glob() because Glob() will silently fail if
+your pattern includes `/./` or `/../`. For example:
+
+```go
+base, pattern := SplitPattern("../../path/to/meta*/**")
+fsys := os.DirFS(base)
+matches, err := Glob(fsys, pattern)
+```
+
+If SplitPattern cannot find somewhere to split the pattern (for example,
+`meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in this
+example).
+
+Of course, it is your responsibility to decide if the returned base path is
+"safe" in the context of your application. Perhaps you could use Match() to
+validate against a list of approved base directories?
+
+### ValidatePattern
+
+```go
+func ValidatePattern(s string) bool
+```
+
+Validate a pattern. Patterns are validated while they run in Match(),
+PathMatch(), and Glob(), so, you normally wouldn't need to call this.  However,
+there are cases where this might be useful: for example, if your program allows
+a user to enter a pattern that you'll run at a later time, you might want to
+validate it.
+
+ValidatePattern assumes your pattern uses '/' as the path separator.
+
+### ValidatePathPattern
+
+```go
+func ValidatePathPattern(s string) bool
+```
+
+Like ValidatePattern, only uses your OS path separator. In other words, use
+ValidatePattern if you would normally use Match() or Glob(). Use
+ValidatePathPattern if you would normally use PathMatch(). Keep in mind, Glob()
+requires '/' separators, even if your OS uses something else.
+
+### Patterns
+
+**doublestar** supports the following special terms in the patterns:
+
+Special Terms | Meaning
+------------- | -------
+`*`           | matches any sequence of non-path-separators
+`/**/`        | matches zero or more directories
+`?`           | matches any single non-path-separator character
+`[class]`     | matches any single non-path-separator character against a class of characters ([see "character classes"])
+`{alt1,...}`  | matches a sequence of characters if one of the comma-separated alternatives matches
+
+Any character with a special meaning can be escaped with a backslash (`\`).
+
+A doublestar (`**`) should appear surrounded by path separators such as `/**/`.
+A mid-pattern doublestar (`**`) behaves like bash's globstar option: a pattern
+such as `path/to/**.txt` would return the same results as `path/to/*.txt`. The
+pattern you're looking for is `path/to/**/*.txt`.
+
+#### Character Classes
+
+Character classes support the following:
+
+Class      | Meaning
+---------- | -------
+`[abc]`    | matches any single character within the set
+`[a-z]`    | matches any single character in the range
+`[^class]` | matches any single character which does *not* match the class
+`[!class]` | same as `^`: negates the class
+
+## Performance
+
+```
+goos: darwin
+goarch: amd64
+pkg: github.com/bmatcuk/doublestar/v4
+cpu: Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz
+BenchmarkMatch-8                  285639              3868 ns/op               0 B/op          0 allocs/op
+BenchmarkGoMatch-8                286945              3726 ns/op               0 B/op          0 allocs/op
+BenchmarkPathMatch-8              320511              3493 ns/op               0 B/op          0 allocs/op
+BenchmarkGoPathMatch-8            304236              3434 ns/op               0 B/op          0 allocs/op
+BenchmarkGlob-8                      466           2501123 ns/op          190225 B/op       2849 allocs/op
+BenchmarkGlobWalk-8                  476           2536293 ns/op          184017 B/op       2750 allocs/op
+BenchmarkGoGlob-8                    463           2574836 ns/op          194249 B/op       2929 allocs/op
+```
+
+These benchmarks (in `doublestar_test.go`) compare Match() to path.Match(),
+PathMath() to filepath.Match(), and Glob() + GlobWalk() to io/fs.Glob(). They
+only run patterns that the standard go packages can understand as well (so, no
+`{alts}` or `**`) for a fair comparison. Of course, alts and doublestars will
+be less performant than the other pattern meta characters.
+
+Alts are essentially like running multiple patterns, the number of which can
+get large if your pattern has alts nested inside alts. This affects both
+matching (ie, Match()) and globbing (Glob()).
+
+`**` performance in matching is actually pretty similar to a regular `*`, but
+can cause a large number of reads when globbing as it will need to recursively
+traverse your filesystem.
+
+## Sponsors
+I started this project in 2014 in my spare time and have been maintaining it
+ever since. In that time, it has grown into one of the most popular globbing
+libraries in the Go ecosystem. So, if **doublestar** is a useful library in
+your project, consider [sponsoring] my work! I'd really appreciate it!
+
+[![MASV](../sponsors/MASV.png?raw=true)](https://massive.io/)
+
+Thanks for sponsoring me!
+
+## License
+
+[MIT License](LICENSE)
+
+[SplitPattern]: #splitpattern
+[doublestar]: https://github.com/bmatcuk/doublestar
+[golang]: http://golang.org/
+[io/fs]: https://pkg.go.dev/io/fs
+[see "character classes"]: #character-classes
+[see "patterns"]: #patterns
+[sponsoring]: https://github.com/sponsors/bmatcuk
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md
new file mode 100644
index 000000000000..25aace3db015
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md
@@ -0,0 +1,63 @@
+# Upgrading from v3 to v4
+
+v4 is a complete rewrite with a focus on performance. Additionally,
+[doublestar] has been updated to use the new [io/fs] package for filesystem
+access. As a result, it is only supported by [golang] v1.16+.
+
+`Match()` and `PathMatch()` mostly did not change, besides big performance
+improvements. Their API is the same. However, note the following corner cases:
+
+* In previous versions of [doublestar], `PathMatch()` could accept patterns
+  that used either platform-specific path separators, or `/`. This was
+  undocumented and didn't match `filepath.Match()`. In v4, both `pattern` and
+  `name` must be using appropriate path separators for the platform. You can
+  use `filepath.FromSlash()` to change `/` to platform-specific separators if
+  you aren't sure.
+* In previous versions of [doublestar], a pattern such as `path/to/a/**` would
+  _not_ match `path/to/a`. In v4, this pattern _will_ match because if `a` was
+  a directory, `Glob()` would return it. In other words, the following returns
+  true: `Match("path/to/a/**", "path/to/a")`
+
+`Glob()` changed from using a [doublestar]-specific filesystem abstraction (the
+`OS` interface) to the [io/fs] package. As a result, it now takes a `fs.FS` as
+its first argument. This change has a couple ramifications:
+
+* Like `io/fs.Glob`, `pattern` must use a `/` as path separator, even on
+  platforms that use something else. You can use `filepath.ToSlash()` on your
+  patterns if you aren't sure.
+* Patterns that contain `/./` or `/../` are invalid. The [io/fs] package
+  rejects them, returning an IO error. Since `Glob()` ignores IO errors, it'll
+  end up being silently rejected. You can run `path.Clean()` to ensure they are
+  removed from the pattern.
+
+v4 also added a `GlobWalk()` function that is slightly more performant than
+`Glob()` if you just need to iterate over the results and don't need a string
+slice. You also get `fs.DirEntry` objects for each result, and can quit early
+if your callback returns an error.
+
+# Upgrading from v2 to v3
+
+v3 introduced using `!` to negate character classes, in addition to `^`. If any
+of your patterns include a character class that starts with an exclamation mark
+(ie, `[!...]`), you'll need to update the pattern to escape or move the
+exclamation mark. Note that, like the caret (`^`), it only negates the
+character class if it is the first character in the character class.
+
+# Upgrading from v1 to v2
+
+The change from v1 to v2 was fairly minor: the return type of the `Open` method
+on the `OS` interface was changed from `*os.File` to `File`, a new interface
+exported by doublestar. The new `File` interface only defines the functionality
+doublestar actually needs (`io.Closer` and `Readdir`), making it easier to use
+doublestar with [go-billy], [afero], or something similar. If you were using
+this functionality, updating should be as easy as updating `Open's` return
+type, since `os.File` already implements `doublestar.File`.
+
+If you weren't using this functionality, updating should be as easy as changing
+your dependencies to point to v2.
+
+[afero]: https://github.com/spf13/afero
+[doublestar]: https://github.com/bmatcuk/doublestar
+[go-billy]: https://github.com/src-d/go-billy
+[golang]: http://golang.org/
+[io/fs]: https://golang.org/pkg/io/fs/
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go
new file mode 100644
index 000000000000..210fd40ceb3e
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go
@@ -0,0 +1,13 @@
+package doublestar
+
+import (
+	"errors"
+	"path"
+)
+
+// ErrBadPattern indicates a pattern was malformed.
+var ErrBadPattern = path.ErrBadPattern
+
+// ErrPatternNotExist indicates that the pattern passed to Glob, GlobWalk, or
+// FilepathGlob references a path that does not exist.
+var ErrPatternNotExist = errors.New("pattern does not exist")
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/glob.go b/vendor/github.com/bmatcuk/doublestar/v4/glob.go
new file mode 100644
index 000000000000..519601b15c1b
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/glob.go
@@ -0,0 +1,473 @@
+package doublestar
+
+import (
+	"errors"
+	"io/fs"
+	"path"
+)
+
+// Glob returns the names of all files matching pattern or nil if there is no
+// matching file. The syntax of pattern is the same as in Match(). The pattern
+// may describe hierarchical names such as usr/*/bin/ed.
+//
+// Glob ignores file system errors such as I/O errors reading directories by
+// default. The only possible returned error is ErrBadPattern, reporting that
+// the pattern is malformed.
+//
+// To enable aborting on I/O errors, the WithFailOnIOErrors option can be
+// passed.
+//
+// Note: this is meant as a drop-in replacement for io/fs.Glob(). Like
+// io/fs.Glob(), this function assumes that your pattern uses `/` as the path
+// separator even if that's not correct for your OS (like Windows). If you
+// aren't sure if that's the case, you can use filepath.ToSlash() on your
+// pattern before calling Glob().
+//
+// Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/`
+// will return no results and no errors. You can use SplitPattern to divide a
+// pattern into a base path (to initialize an `FS` object) and pattern.
+//
+// Note: users should _not_ count on the returned error,
+// doublestar.ErrBadPattern, being equal to path.ErrBadPattern.
+//
+func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error) {
+	if !ValidatePattern(pattern) {
+		return nil, ErrBadPattern
+	}
+
+	g := newGlob(opts...)
+
+	if hasMidDoubleStar(pattern) {
+		// If the pattern has a `**` anywhere but the very end, GlobWalk is more
+		// performant because it can get away with less allocations. If the pattern
+		// ends in a `**`, both methods are pretty much the same, but Glob has a
+		// _very_ slight advantage because of lower function call overhead.
+		var matches []string
+		err := g.doGlobWalk(fsys, pattern, true, true, func(p string, d fs.DirEntry) error {
+			matches = append(matches, p)
+			return nil
+		})
+		return matches, err
+	}
+	return g.doGlob(fsys, pattern, nil, true, true)
+}
+
+// Does the actual globbin'
+//   - firstSegment is true if we're in the first segment of the pattern, ie,
+//     the right-most part where we can match files. If it's false, we're
+//     somewhere in the middle (or at the beginning) and can only match
+//     directories since there are path segments above us.
+//   - beforeMeta is true if we're exploring segments before any meta
+//     characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/`
+//     bit does not contain any meta characters.
+func (g *glob) doGlob(fsys fs.FS, pattern string, m []string, firstSegment, beforeMeta bool) (matches []string, err error) {
+	matches = m
+	patternStart := indexMeta(pattern)
+	if patternStart == -1 {
+		// pattern doesn't contain any meta characters - does a file matching the
+		// pattern exist?
+		// The pattern may contain escaped wildcard characters for an exact path match.
+		path := unescapeMeta(pattern)
+		pathInfo, pathExists, pathErr := g.exists(fsys, path, beforeMeta)
+		if pathErr != nil {
+			return nil, pathErr
+		}
+
+		if pathExists && (!firstSegment || !g.filesOnly || !pathInfo.IsDir()) {
+			matches = append(matches, path)
+		}
+
+		return
+	}
+
+	dir := "."
+	splitIdx := lastIndexSlashOrAlt(pattern)
+	if splitIdx != -1 {
+		if pattern[splitIdx] == '}' {
+			openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx])
+			if openingIdx == -1 {
+				// if there's no matching opening index, technically Match() will treat
+				// an unmatched `}` as nothing special, so... we will, too!
+				splitIdx = lastIndexSlash(pattern[:splitIdx])
+				if splitIdx != -1 {
+					dir = pattern[:splitIdx]
+					pattern = pattern[splitIdx+1:]
+				}
+			} else {
+				// otherwise, we have to handle the alts:
+				return g.globAlts(fsys, pattern, openingIdx, splitIdx, matches, firstSegment, beforeMeta)
+			}
+		} else {
+			dir = pattern[:splitIdx]
+			pattern = pattern[splitIdx+1:]
+		}
+	}
+
+	// if `splitIdx` is less than `patternStart`, we know `dir` has no meta
+	// characters. They would be equal if they are both -1, which means `dir`
+	// will be ".", and we know that doesn't have meta characters either.
+	if splitIdx <= patternStart {
+		return g.globDir(fsys, dir, pattern, matches, firstSegment, beforeMeta)
+	}
+
+	var dirs []string
+	dirs, err = g.doGlob(fsys, dir, matches, false, beforeMeta)
+	if err != nil {
+		return
+	}
+	for _, d := range dirs {
+		matches, err = g.globDir(fsys, d, pattern, matches, firstSegment, false)
+		if err != nil {
+			return
+		}
+	}
+
+	return
+}
+
+// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the
+// indexes of `{` and `}`, respectively
+func (g *glob) globAlts(fsys fs.FS, pattern string, openingIdx, closingIdx int, m []string, firstSegment, beforeMeta bool) (matches []string, err error) {
+	matches = m
+
+	var dirs []string
+	startIdx := 0
+	afterIdx := closingIdx + 1
+	splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx])
+	if splitIdx == -1 || pattern[splitIdx] == '}' {
+		// no common prefix
+		dirs = []string{""}
+	} else {
+		// our alts have a common prefix that we can process first
+		dirs, err = g.doGlob(fsys, pattern[:splitIdx], matches, false, beforeMeta)
+		if err != nil {
+			return
+		}
+
+		startIdx = splitIdx + 1
+	}
+
+	for _, d := range dirs {
+		patIdx := openingIdx + 1
+		altResultsStartIdx := len(matches)
+		thisResultStartIdx := altResultsStartIdx
+		for patIdx < closingIdx {
+			nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true)
+			if nextIdx == -1 {
+				nextIdx = closingIdx
+			} else {
+				nextIdx += patIdx
+			}
+
+			alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx)
+			matches, err = g.doGlob(fsys, alt, matches, firstSegment, beforeMeta)
+			if err != nil {
+				return
+			}
+
+			matchesLen := len(matches)
+			if altResultsStartIdx != thisResultStartIdx && thisResultStartIdx != matchesLen {
+				// Alts can result in matches that aren't sorted, or, worse, duplicates
+				// (consider the trivial pattern `path/to/{a,*}`). Since doGlob returns
+				// sorted results, we can do a sort of in-place merge and remove
+				// duplicates. But, we only need to do this if this isn't the first alt
+				// (ie, `altResultsStartIdx != thisResultsStartIdx`) and if the latest
+				// alt actually added some matches (`thisResultStartIdx !=
+				// len(matches)`)
+				matches = sortAndRemoveDups(matches, altResultsStartIdx, thisResultStartIdx, matchesLen)
+
+				// length of matches may have changed
+				thisResultStartIdx = len(matches)
+			} else {
+				thisResultStartIdx = matchesLen
+			}
+
+			patIdx = nextIdx + 1
+		}
+	}
+
+	return
+}
+
+// find files/subdirectories in the given `dir` that match `pattern`
+func (g *glob) globDir(fsys fs.FS, dir, pattern string, matches []string, canMatchFiles, beforeMeta bool) (m []string, e error) {
+	m = matches
+
+	if pattern == "" {
+		if !canMatchFiles || !g.filesOnly {
+			// pattern can be an empty string if the original pattern ended in a
+			// slash, in which case, we should just return dir, but only if it
+			// actually exists and it's a directory (or a symlink to a directory)
+			_, isDir, err := g.isPathDir(fsys, dir, beforeMeta)
+			if err != nil {
+				return nil, err
+			}
+			if isDir {
+				m = append(m, dir)
+			}
+		}
+		return
+	}
+
+	if pattern == "**" {
+		return g.globDoubleStar(fsys, dir, m, canMatchFiles, beforeMeta)
+	}
+
+	dirs, err := fs.ReadDir(fsys, dir)
+	if err != nil {
+		if errors.Is(err, fs.ErrNotExist) {
+			e = g.handlePatternNotExist(beforeMeta)
+		} else {
+			e = g.forwardErrIfFailOnIOErrors(err)
+		}
+		return
+	}
+
+	var matched bool
+	for _, info := range dirs {
+		name := info.Name()
+		matched, e = matchWithSeparator(pattern, name, '/', false)
+		if e != nil {
+			return
+		}
+		if matched {
+			matched = canMatchFiles
+			if !matched || g.filesOnly {
+				matched, e = g.isDir(fsys, dir, name, info)
+				if e != nil {
+					return
+				}
+				if canMatchFiles {
+					// if we're here, it's because g.filesOnly
+					// is set and we don't want directories
+					matched = !matched
+				}
+			}
+			if matched {
+				m = append(m, path.Join(dir, name))
+			}
+		}
+	}
+
+	return
+}
+
+func (g *glob) globDoubleStar(fsys fs.FS, dir string, matches []string, canMatchFiles, beforeMeta bool) ([]string, error) {
+	dirs, err := fs.ReadDir(fsys, dir)
+	if err != nil {
+		if errors.Is(err, fs.ErrNotExist) {
+			return matches, g.handlePatternNotExist(beforeMeta)
+		} else {
+			return matches, g.forwardErrIfFailOnIOErrors(err)
+		}
+	}
+
+	if !g.filesOnly {
+		// `**` can match *this* dir, so add it
+		matches = append(matches, dir)
+	}
+
+	for _, info := range dirs {
+		name := info.Name()
+		isDir, err := g.isDir(fsys, dir, name, info)
+		if err != nil {
+			return nil, err
+		}
+		if isDir {
+			matches, err = g.globDoubleStar(fsys, path.Join(dir, name), matches, canMatchFiles, false)
+			if err != nil {
+				return nil, err
+			}
+		} else if canMatchFiles {
+			matches = append(matches, path.Join(dir, name))
+		}
+	}
+
+	return matches, nil
+}
+
+// Returns true if the pattern has a doublestar in the middle of the pattern.
+// In this case, GlobWalk is faster because it can get away with less
+// allocations. However, Glob has a _very_ slight edge if the pattern ends in
+// `**`.
+func hasMidDoubleStar(p string) bool {
+	// subtract 3: 2 because we want to return false if the pattern ends in `**`
+	// (Glob is _very_ slightly faster in that case), and the extra 1 because our
+	// loop checks p[i] and p[i+1].
+	l := len(p) - 3
+	for i := 0; i < l; i++ {
+		if p[i] == '\\' {
+			// escape next byte
+			i++
+		} else if p[i] == '*' && p[i+1] == '*' {
+			return true
+		}
+	}
+	return false
+}
+
+// Returns the index of the first unescaped meta character, or negative 1.
+func indexMeta(s string) int {
+	var c byte
+	l := len(s)
+	for i := 0; i < l; i++ {
+		c = s[i]
+		if c == '*' || c == '?' || c == '[' || c == '{' {
+			return i
+		} else if c == '\\' {
+			// skip next byte
+			i++
+		}
+	}
+	return -1
+}
+
+// Returns the index of the last unescaped slash or closing alt (`}`) in the
+// string, or negative 1.
+func lastIndexSlashOrAlt(s string) int {
+	for i := len(s) - 1; i >= 0; i-- {
+		if (s[i] == '/' || s[i] == '}') && (i == 0 || s[i-1] != '\\') {
+			return i
+		}
+	}
+	return -1
+}
+
+// Returns the index of the last unescaped slash in the string, or negative 1.
+func lastIndexSlash(s string) int {
+	for i := len(s) - 1; i >= 0; i-- {
+		if s[i] == '/' && (i == 0 || s[i-1] != '\\') {
+			return i
+		}
+	}
+	return -1
+}
+
+// Assuming the byte after the end of `s` is a closing `}`, this function will
+// find the index of the matching `{`. That is, it'll skip over any nested `{}`
+// and account for escaping.
+func indexMatchedOpeningAlt(s string) int {
+	alts := 1
+	for i := len(s) - 1; i >= 0; i-- {
+		if s[i] == '}' && (i == 0 || s[i-1] != '\\') {
+			alts++
+		} else if s[i] == '{' && (i == 0 || s[i-1] != '\\') {
+			if alts--; alts == 0 {
+				return i
+			}
+		}
+	}
+	return -1
+}
+
+// Returns true if the path exists
+func (g *glob) exists(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) {
+	// name might end in a slash, but Stat doesn't like that
+	namelen := len(name)
+	if namelen > 1 && name[namelen-1] == '/' {
+		name = name[:namelen-1]
+	}
+
+	info, err := fs.Stat(fsys, name)
+	if errors.Is(err, fs.ErrNotExist) {
+		return nil, false, g.handlePatternNotExist(beforeMeta)
+	}
+	return info, err == nil, g.forwardErrIfFailOnIOErrors(err)
+}
+
+// Returns true if the path exists and is a directory or a symlink to a
+// directory
+func (g *glob) isPathDir(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) {
+	info, err := fs.Stat(fsys, name)
+	if errors.Is(err, fs.ErrNotExist) {
+		return nil, false, g.handlePatternNotExist(beforeMeta)
+	}
+	return info, err == nil && info.IsDir(), g.forwardErrIfFailOnIOErrors(err)
+}
+
+// Returns whether or not the given DirEntry is a directory. If the DirEntry
+// represents a symbolic link, the link is followed by running fs.Stat() on
+// `path.Join(dir, name)` (if dir is "", name will be used without joining)
+func (g *glob) isDir(fsys fs.FS, dir, name string, info fs.DirEntry) (bool, error) {
+	if !g.noFollow && (info.Type()&fs.ModeSymlink) > 0 {
+		p := name
+		if dir != "" {
+			p = path.Join(dir, name)
+		}
+		finfo, err := fs.Stat(fsys, p)
+		if err != nil {
+			if errors.Is(err, fs.ErrNotExist) {
+				// this function is only ever called while expanding a glob, so it can
+				// never return ErrPatternNotExist
+				return false, nil
+			}
+			return false, g.forwardErrIfFailOnIOErrors(err)
+		}
+		return finfo.IsDir(), nil
+	}
+	return info.IsDir(), nil
+}
+
+// Builds a string from an alt
+func buildAlt(prefix, pattern string, startIdx, openingIdx, currentIdx, nextIdx, afterIdx int) string {
+	// pattern:
+	//   ignored/start{alts,go,here}remaining - len = 36
+	//           |    |     | |     ^--- afterIdx   = 27
+	//           |    |     | \--------- nextIdx    = 21
+	//           |    |     \----------- currentIdx = 19
+	//           |    \----------------- openingIdx = 13
+	//           \---------------------- startIdx   = 8
+	//
+	// result:
+	//   prefix/startgoremaining - len = 7 + 5 + 2 + 9 = 23
+	var buf []byte
+	patLen := len(pattern)
+	size := (openingIdx - startIdx) + (nextIdx - currentIdx) + (patLen - afterIdx)
+	if prefix != "" && prefix != "." {
+		buf = make([]byte, 0, size+len(prefix)+1)
+		buf = append(buf, prefix...)
+		buf = append(buf, '/')
+	} else {
+		buf = make([]byte, 0, size)
+	}
+	buf = append(buf, pattern[startIdx:openingIdx]...)
+	buf = append(buf, pattern[currentIdx:nextIdx]...)
+	if afterIdx < patLen {
+		buf = append(buf, pattern[afterIdx:]...)
+	}
+	return string(buf)
+}
+
+// Running alts can produce results that are not sorted, and, worse, can cause
+// duplicates (consider the trivial pattern `path/to/{a,*}`). Since we know
+// each run of doGlob is sorted, we can basically do the "merge" step of a
+// merge sort in-place.
+func sortAndRemoveDups(matches []string, idx1, idx2, l int) []string {
+	var tmp string
+	for ; idx1 < idx2; idx1++ {
+		if matches[idx1] < matches[idx2] {
+			// order is correct
+			continue
+		} else if matches[idx1] > matches[idx2] {
+			// need to swap and then re-sort matches above idx2
+			tmp = matches[idx1]
+			matches[idx1] = matches[idx2]
+
+			shft := idx2 + 1
+			for ; shft < l && matches[shft] < tmp; shft++ {
+				matches[shft-1] = matches[shft]
+			}
+			matches[shft-1] = tmp
+		} else {
+			// duplicate - shift matches above idx2 down one and decrement l
+			for shft := idx2 + 1; shft < l; shft++ {
+				matches[shft-1] = matches[shft]
+			}
+			if l--; idx2 == l {
+				// nothing left to do... matches[idx2:] must have been full of dups
+				break
+			}
+		}
+	}
+	return matches[:l]
+}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go
new file mode 100644
index 000000000000..9483c4bb008b
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go
@@ -0,0 +1,144 @@
+package doublestar
+
+import "strings"
+
+// glob is an internal type to store options during globbing.
+type glob struct {
+	failOnIOErrors        bool
+	failOnPatternNotExist bool
+	filesOnly             bool
+	noFollow              bool
+}
+
+// GlobOption represents a setting that can be passed to Glob, GlobWalk, and
+// FilepathGlob.
+type GlobOption func(*glob)
+
+// Construct a new glob object with the given options
+func newGlob(opts ...GlobOption) *glob {
+	g := &glob{}
+	for _, opt := range opts {
+		opt(g)
+	}
+	return g
+}
+
+// WithFailOnIOErrors is an option that can be passed to Glob, GlobWalk, or
+// FilepathGlob. If passed, doublestar will abort and return IO errors when
+// encountered. Note that if the glob pattern references a path that does not
+// exist (such as `nonexistent/path/*`), this is _not_ considered an IO error:
+// it is considered a pattern with no matches.
+//
+func WithFailOnIOErrors() GlobOption {
+	return func(g *glob) {
+		g.failOnIOErrors = true
+	}
+}
+
+// WithFailOnPatternNotExist is an option that can be passed to Glob, GlobWalk,
+// or FilepathGlob. If passed, doublestar will abort and return
+// ErrPatternNotExist if the pattern references a path that does not exist
+// before any meta charcters such as `nonexistent/path/*`. Note that alts (ie,
+// `{...}`) are expanded before this check. In other words, a pattern such as
+// `{a,b}/*` may fail if either `a` or `b` do not exist but `*/{a,b}` will
+// never fail because the star may match nothing.
+//
+func WithFailOnPatternNotExist() GlobOption {
+	return func(g *glob) {
+		g.failOnPatternNotExist = true
+	}
+}
+
+// WithFilesOnly is an option that can be passed to Glob, GlobWalk, or
+// FilepathGlob. If passed, doublestar will only return files that match the
+// pattern, not directories.
+//
+// Note: if combined with the WithNoFollow option, symlinks to directories
+// _will_ be included in the result since no attempt is made to follow the
+// symlink.
+//
+func WithFilesOnly() GlobOption {
+	return func(g *glob) {
+		g.filesOnly = true
+	}
+}
+
+// WithNoFollow is an option that can be passed to Glob, GlobWalk, or
+// FilepathGlob. If passed, doublestar will not follow symlinks while
+// traversing the filesystem. However, due to io/fs's _very_ poor support for
+// querying the filesystem about symlinks, there's a caveat here: if part of
+// the pattern before any meta characters contains a reference to a symlink, it
+// will be followed. For example, a pattern such as `path/to/symlink/*` will be
+// followed assuming it is a valid symlink to a directory. However, from this
+// same example, a pattern such as `path/to/**` will not traverse the
+// `symlink`, nor would `path/*/symlink/*`
+//
+// Note: if combined with the WithFilesOnly option, symlinks to directories
+// _will_ be included in the result since no attempt is made to follow the
+// symlink.
+//
+func WithNoFollow() GlobOption {
+	return func(g *glob) {
+		g.noFollow = true
+	}
+}
+
+// forwardErrIfFailOnIOErrors is used to wrap the return values of I/O
+// functions. When failOnIOErrors is enabled, it will return err; otherwise, it
+// always returns nil.
+//
+func (g *glob) forwardErrIfFailOnIOErrors(err error) error {
+	if g.failOnIOErrors {
+		return err
+	}
+	return nil
+}
+
+// handleErrNotExist handles fs.ErrNotExist errors. If
+// WithFailOnPatternNotExist has been enabled and canFail is true, this will
+// return ErrPatternNotExist. Otherwise, it will return nil.
+//
+func (g *glob) handlePatternNotExist(canFail bool) error {
+	if canFail && g.failOnPatternNotExist {
+		return ErrPatternNotExist
+	}
+	return nil
+}
+
+// Format options for debugging/testing purposes
+func (g *glob) GoString() string {
+	var b strings.Builder
+	b.WriteString("opts: ")
+
+	hasOpts := false
+	if g.failOnIOErrors {
+		b.WriteString("WithFailOnIOErrors")
+		hasOpts = true
+	}
+	if g.failOnPatternNotExist {
+		if hasOpts {
+			b.WriteString(", ")
+		}
+		b.WriteString("WithFailOnPatternNotExist")
+		hasOpts = true
+	}
+	if g.filesOnly {
+		if hasOpts {
+			b.WriteString(", ")
+		}
+		b.WriteString("WithFilesOnly")
+		hasOpts = true
+	}
+	if g.noFollow {
+		if hasOpts {
+			b.WriteString(", ")
+		}
+		b.WriteString("WithNoFollow")
+		hasOpts = true
+	}
+
+	if !hasOpts {
+		b.WriteString("nil")
+	}
+	return b.String()
+}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go
new file mode 100644
index 000000000000..84e764f0e26e
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go
@@ -0,0 +1,414 @@
+package doublestar
+
+import (
+	"errors"
+	"io/fs"
+	"path"
+	"path/filepath"
+	"strings"
+)
+
+// If returned from GlobWalkFunc, will cause GlobWalk to skip the current
+// directory. In other words, if the current path is a directory, GlobWalk will
+// not recurse into it. Otherwise, GlobWalk will skip the rest of the current
+// directory.
+var SkipDir = fs.SkipDir
+
+// Callback function for GlobWalk(). If the function returns an error, GlobWalk
+// will end immediately and return the same error.
+type GlobWalkFunc func(path string, d fs.DirEntry) error
+
+// GlobWalk calls the callback function `fn` for every file matching pattern.
+// The syntax of pattern is the same as in Match() and the behavior is the same
+// as Glob(), with regard to limitations (such as patterns containing `/./`,
+// `/../`, or starting with `/`). The pattern may describe hierarchical names
+// such as usr/*/bin/ed.
+//
+// GlobWalk may have a small performance benefit over Glob if you do not need a
+// slice of matches because it can avoid allocating memory for the matches.
+// Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for
+// each match, and lets you quit early by returning a non-nil error from your
+// callback function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`,
+// GlobWalk will skip the current directory. This means that if the current
+// path _is_ a directory, GlobWalk will not recurse into it. If the current
+// path is not a directory, the rest of the parent directory will be skipped.
+//
+// GlobWalk ignores file system errors such as I/O errors reading directories
+// by default. GlobWalk may return ErrBadPattern, reporting that the pattern is
+// malformed.
+//
+// To enable aborting on I/O errors, the WithFailOnIOErrors option can be
+// passed.
+//
+// Additionally, if the callback function `fn` returns an error, GlobWalk will
+// exit immediately and return that error.
+//
+// Like Glob(), this function assumes that your pattern uses `/` as the path
+// separator even if that's not correct for your OS (like Windows). If you
+// aren't sure if that's the case, you can use filepath.ToSlash() on your
+// pattern before calling GlobWalk().
+//
+// Note: users should _not_ count on the returned error,
+// doublestar.ErrBadPattern, being equal to path.ErrBadPattern.
+//
+func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error {
+	if !ValidatePattern(pattern) {
+		return ErrBadPattern
+	}
+
+	g := newGlob(opts...)
+	return g.doGlobWalk(fsys, pattern, true, true, fn)
+}
+
+// Actually execute GlobWalk
+//   - firstSegment is true if we're in the first segment of the pattern, ie,
+//     the right-most part where we can match files. If it's false, we're
+//     somewhere in the middle (or at the beginning) and can only match
+//     directories since there are path segments above us.
+//   - beforeMeta is true if we're exploring segments before any meta
+//     characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/`
+//     bit does not contain any meta characters.
+func (g *glob) doGlobWalk(fsys fs.FS, pattern string, firstSegment, beforeMeta bool, fn GlobWalkFunc) error {
+	patternStart := indexMeta(pattern)
+	if patternStart == -1 {
+		// pattern doesn't contain any meta characters - does a file matching the
+		// pattern exist?
+		// The pattern may contain escaped wildcard characters for an exact path match.
+		path := unescapeMeta(pattern)
+		info, pathExists, err := g.exists(fsys, path, beforeMeta)
+		if pathExists && (!firstSegment || !g.filesOnly || !info.IsDir()) {
+			err = fn(path, dirEntryFromFileInfo(info))
+			if err == SkipDir {
+				err = nil
+			}
+		}
+		return err
+	}
+
+	dir := "."
+	splitIdx := lastIndexSlashOrAlt(pattern)
+	if splitIdx != -1 {
+		if pattern[splitIdx] == '}' {
+			openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx])
+			if openingIdx == -1 {
+				// if there's no matching opening index, technically Match() will treat
+				// an unmatched `}` as nothing special, so... we will, too!
+				splitIdx = lastIndexSlash(pattern[:splitIdx])
+				if splitIdx != -1 {
+					dir = pattern[:splitIdx]
+					pattern = pattern[splitIdx+1:]
+				}
+			} else {
+				// otherwise, we have to handle the alts:
+				return g.globAltsWalk(fsys, pattern, openingIdx, splitIdx, firstSegment, beforeMeta, fn)
+			}
+		} else {
+			dir = pattern[:splitIdx]
+			pattern = pattern[splitIdx+1:]
+		}
+	}
+
+	// if `splitIdx` is less than `patternStart`, we know `dir` has no meta
+	// characters. They would be equal if they are both -1, which means `dir`
+	// will be ".", and we know that doesn't have meta characters either.
+	if splitIdx <= patternStart {
+		return g.globDirWalk(fsys, dir, pattern, firstSegment, beforeMeta, fn)
+	}
+
+	return g.doGlobWalk(fsys, dir, false, beforeMeta, func(p string, d fs.DirEntry) error {
+		if err := g.globDirWalk(fsys, p, pattern, firstSegment, false, fn); err != nil {
+			return err
+		}
+		return nil
+	})
+}
+
+// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the
+// indexes of `{` and `}`, respectively
+func (g *glob) globAltsWalk(fsys fs.FS, pattern string, openingIdx, closingIdx int, firstSegment, beforeMeta bool, fn GlobWalkFunc) (err error) {
+	var matches []DirEntryWithFullPath
+	startIdx := 0
+	afterIdx := closingIdx + 1
+	splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx])
+	if splitIdx == -1 || pattern[splitIdx] == '}' {
+		// no common prefix
+		matches, err = g.doGlobAltsWalk(fsys, "", pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, beforeMeta, matches)
+		if err != nil {
+			return
+		}
+	} else {
+		// our alts have a common prefix that we can process first
+		startIdx = splitIdx + 1
+		innerBeforeMeta := beforeMeta && !hasMetaExceptAlts(pattern[:splitIdx])
+		err = g.doGlobWalk(fsys, pattern[:splitIdx], false, beforeMeta, func(p string, d fs.DirEntry) (e error) {
+			matches, e = g.doGlobAltsWalk(fsys, p, pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, innerBeforeMeta, matches)
+			return e
+		})
+		if err != nil {
+			return
+		}
+	}
+
+	skip := ""
+	for _, m := range matches {
+		if skip != "" {
+			// Because matches are sorted, we know that descendants of the skipped
+			// item must come immediately after the skipped item. If we find an item
+			// that does not have a prefix matching the skipped item, we know we're
+			// done skipping. I'm using strings.HasPrefix here because
+			// filepath.HasPrefix has been marked deprecated (and just calls
+			// strings.HasPrefix anyway). The reason it's deprecated is because it
+			// doesn't handle case-insensitive paths, nor does it guarantee that the
+			// prefix is actually a parent directory. Neither is an issue here: the
+			// paths come from the system so their cases will match, and we guarantee
+			// a parent directory by appending a slash to the prefix.
+			//
+			// NOTE: m.Path will always use slashes as path separators.
+			if strings.HasPrefix(m.Path, skip) {
+				continue
+			}
+			skip = ""
+		}
+		if err = fn(m.Path, m.Entry); err != nil {
+			if err == SkipDir {
+				isDir, err := g.isDir(fsys, "", m.Path, m.Entry)
+				if err != nil {
+					return err
+				}
+				if isDir {
+					// append a slash to guarantee `skip` will be treated as a parent dir
+					skip = m.Path + "/"
+				} else {
+					// Dir() calls Clean() which calls FromSlash(), so we need to convert
+					// back to slashes
+					skip = filepath.ToSlash(filepath.Dir(m.Path)) + "/"
+				}
+				err = nil
+				continue
+			}
+			return
+		}
+	}
+
+	return
+}
+
+// runs actual matching for alts
+func (g *glob) doGlobAltsWalk(fsys fs.FS, d, pattern string, startIdx, openingIdx, closingIdx, afterIdx int, firstSegment, beforeMeta bool, m []DirEntryWithFullPath) (matches []DirEntryWithFullPath, err error) {
+	matches = m
+	matchesLen := len(m)
+	patIdx := openingIdx + 1
+	for patIdx < closingIdx {
+		nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true)
+		if nextIdx == -1 {
+			nextIdx = closingIdx
+		} else {
+			nextIdx += patIdx
+		}
+
+		alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx)
+		err = g.doGlobWalk(fsys, alt, firstSegment, beforeMeta, func(p string, d fs.DirEntry) error {
+			// insertion sort, ignoring dups
+			insertIdx := matchesLen
+			for insertIdx > 0 && matches[insertIdx-1].Path > p {
+				insertIdx--
+			}
+			if insertIdx > 0 && matches[insertIdx-1].Path == p {
+				// dup
+				return nil
+			}
+
+			// append to grow the slice, then insert
+			entry := DirEntryWithFullPath{d, p}
+			matches = append(matches, entry)
+			for i := matchesLen; i > insertIdx; i-- {
+				matches[i] = matches[i-1]
+			}
+			matches[insertIdx] = entry
+			matchesLen++
+
+			return nil
+		})
+		if err != nil {
+			return
+		}
+
+		patIdx = nextIdx + 1
+	}
+
+	return
+}
+
+func (g *glob) globDirWalk(fsys fs.FS, dir, pattern string, canMatchFiles, beforeMeta bool, fn GlobWalkFunc) (e error) {
+	if pattern == "" {
+		if !canMatchFiles || !g.filesOnly {
+			// pattern can be an empty string if the original pattern ended in a
+			// slash, in which case, we should just return dir, but only if it
+			// actually exists and it's a directory (or a symlink to a directory)
+			info, isDir, err := g.isPathDir(fsys, dir, beforeMeta)
+			if err != nil {
+				return err
+			}
+			if isDir {
+				e = fn(dir, dirEntryFromFileInfo(info))
+				if e == SkipDir {
+					e = nil
+				}
+			}
+		}
+		return
+	}
+
+	if pattern == "**" {
+		// `**` can match *this* dir
+		info, dirExists, err := g.exists(fsys, dir, beforeMeta)
+		if err != nil {
+			return err
+		}
+		if !dirExists || !info.IsDir() {
+			return nil
+		}
+		if !canMatchFiles || !g.filesOnly {
+			if e = fn(dir, dirEntryFromFileInfo(info)); e != nil {
+				if e == SkipDir {
+					e = nil
+				}
+				return
+			}
+		}
+		return g.globDoubleStarWalk(fsys, dir, canMatchFiles, fn)
+	}
+
+	dirs, err := fs.ReadDir(fsys, dir)
+	if err != nil {
+		if errors.Is(err, fs.ErrNotExist) {
+			return g.handlePatternNotExist(beforeMeta)
+		}
+		return g.forwardErrIfFailOnIOErrors(err)
+	}
+
+	var matched bool
+	for _, info := range dirs {
+		name := info.Name()
+		matched, e = matchWithSeparator(pattern, name, '/', false)
+		if e != nil {
+			return
+		}
+		if matched {
+			matched = canMatchFiles
+			if !matched || g.filesOnly {
+				matched, e = g.isDir(fsys, dir, name, info)
+				if e != nil {
+					return e
+				}
+				if canMatchFiles {
+					// if we're here, it's because g.filesOnly
+					// is set and we don't want directories
+					matched = !matched
+				}
+			}
+			if matched {
+				if e = fn(path.Join(dir, name), info); e != nil {
+					if e == SkipDir {
+						e = nil
+					}
+					return
+				}
+			}
+		}
+	}
+
+	return
+}
+
+// recursively walk files/directories in a directory
+func (g *glob) globDoubleStarWalk(fsys fs.FS, dir string, canMatchFiles bool, fn GlobWalkFunc) (e error) {
+	dirs, err := fs.ReadDir(fsys, dir)
+	if err != nil {
+		if errors.Is(err, fs.ErrNotExist) {
+			// This function is only ever called after we know the top-most directory
+			// exists, so, if we ever get here, we know we'll never return
+			// ErrPatternNotExist.
+			return nil
+		}
+		return g.forwardErrIfFailOnIOErrors(err)
+	}
+
+	for _, info := range dirs {
+		name := info.Name()
+		isDir, err := g.isDir(fsys, dir, name, info)
+		if err != nil {
+			return err
+		}
+
+		if isDir {
+			p := path.Join(dir, name)
+			if !canMatchFiles || !g.filesOnly {
+				// `**` can match *this* dir, so add it
+				if e = fn(p, info); e != nil {
+					if e == SkipDir {
+						e = nil
+						continue
+					}
+					return
+				}
+			}
+			if e = g.globDoubleStarWalk(fsys, p, canMatchFiles, fn); e != nil {
+				return
+			}
+		} else if canMatchFiles {
+			if e = fn(path.Join(dir, name), info); e != nil {
+				if e == SkipDir {
+					e = nil
+				}
+				return
+			}
+		}
+	}
+
+	return
+}
+
+type DirEntryFromFileInfo struct {
+	fi fs.FileInfo
+}
+
+func (d *DirEntryFromFileInfo) Name() string {
+	return d.fi.Name()
+}
+
+func (d *DirEntryFromFileInfo) IsDir() bool {
+	return d.fi.IsDir()
+}
+
+func (d *DirEntryFromFileInfo) Type() fs.FileMode {
+	return d.fi.Mode().Type()
+}
+
+func (d *DirEntryFromFileInfo) Info() (fs.FileInfo, error) {
+	return d.fi, nil
+}
+
+func dirEntryFromFileInfo(fi fs.FileInfo) fs.DirEntry {
+	return &DirEntryFromFileInfo{fi}
+}
+
+type DirEntryWithFullPath struct {
+	Entry fs.DirEntry
+	Path  string
+}
+
+func hasMetaExceptAlts(s string) bool {
+	var c byte
+	l := len(s)
+	for i := 0; i < l; i++ {
+		c = s[i]
+		if c == '*' || c == '?' || c == '[' {
+			return true
+		} else if c == '\\' {
+			// skip next byte
+			i++
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/match.go b/vendor/github.com/bmatcuk/doublestar/v4/match.go
new file mode 100644
index 000000000000..c0f20afa438d
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/match.go
@@ -0,0 +1,403 @@
+package doublestar
+
+import (
+	"path/filepath"
+	"unicode/utf8"
+)
+
+// Match reports whether name matches the shell pattern.
+// The pattern syntax is:
+//
+//  pattern:
+//    { term }
+//  term:
+//    '*'         matches any sequence of non-path-separators
+//    '/**/'      matches zero or more directories
+//    '?'         matches any single non-path-separator character
+//    '[' [ '^' '!' ] { character-range } ']'
+//                character class (must be non-empty)
+//                starting with `^` or `!` negates the class
+//    '{' { term } [ ',' { term } ... ] '}'
+//                alternatives
+//    c           matches character c (c != '*', '?', '\\', '[')
+//    '\\' c      matches character c
+//
+//  character-range:
+//    c           matches character c (c != '\\', '-', ']')
+//    '\\' c      matches character c
+//    lo '-' hi   matches character c for lo <= c <= hi
+//
+// Match returns true if `name` matches the file name `pattern`. `name` and
+// `pattern` are split on forward slash (`/`) characters and may be relative or
+// absolute.
+//
+// Match requires pattern to match all of name, not just a substring.
+// The only possible returned error is ErrBadPattern, when pattern
+// is malformed.
+//
+// A doublestar (`**`) should appear surrounded by path separators such as
+// `/**/`.  A mid-pattern doublestar (`**`) behaves like bash's globstar
+// option: a pattern such as `path/to/**.txt` would return the same results as
+// `path/to/*.txt`. The pattern you're looking for is `path/to/**/*.txt`.
+//
+// Note: this is meant as a drop-in replacement for path.Match() which
+// always uses '/' as the path separator. If you want to support systems
+// which use a different path separator (such as Windows), what you want
+// is PathMatch(). Alternatively, you can run filepath.ToSlash() on both
+// pattern and name and then use this function.
+//
+// Note: users should _not_ count on the returned error,
+// doublestar.ErrBadPattern, being equal to path.ErrBadPattern.
+//
+func Match(pattern, name string) (bool, error) {
+	return matchWithSeparator(pattern, name, '/', true)
+}
+
+// MatchUnvalidated can provide a small performance improvement if you don't
+// care about whether or not the pattern is valid (perhaps because you already
+// ran `ValidatePattern`). Note that there's really only one case where this
+// performance improvement is realized: when pattern matching reaches the end
+// of `name` before reaching the end of `pattern`, such as `Match("a/b/c",
+// "a")`.
+func MatchUnvalidated(pattern, name string) bool {
+	matched, _ := matchWithSeparator(pattern, name, '/', false)
+	return matched
+}
+
+// PathMatch returns true if `name` matches the file name `pattern`. The
+// difference between Match and PathMatch is that PathMatch will automatically
+// use your system's path separator to split `name` and `pattern`. On systems
+// where the path separator is `'\'`, escaping will be disabled.
+//
+// Note: this is meant as a drop-in replacement for filepath.Match(). It
+// assumes that both `pattern` and `name` are using the system's path
+// separator. If you can't be sure of that, use filepath.ToSlash() on both
+// `pattern` and `name`, and then use the Match() function instead.
+//
+func PathMatch(pattern, name string) (bool, error) {
+	return matchWithSeparator(pattern, name, filepath.Separator, true)
+}
+
+// PathMatchUnvalidated can provide a small performance improvement if you
+// don't care about whether or not the pattern is valid (perhaps because you
+// already ran `ValidatePattern`). Note that there's really only one case where
+// this performance improvement is realized: when pattern matching reaches the
+// end of `name` before reaching the end of `pattern`, such as `Match("a/b/c",
+// "a")`.
+func PathMatchUnvalidated(pattern, name string) bool {
+	matched, _ := matchWithSeparator(pattern, name, filepath.Separator, false)
+	return matched
+}
+
+func matchWithSeparator(pattern, name string, separator rune, validate bool) (matched bool, err error) {
+	return doMatchWithSeparator(pattern, name, separator, validate, -1, -1, -1, -1, 0, 0)
+}
+
+func doMatchWithSeparator(pattern, name string, separator rune, validate bool, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, patIdx, nameIdx int) (matched bool, err error) {
+	patLen := len(pattern)
+	nameLen := len(name)
+	startOfSegment := true
+MATCH:
+	for nameIdx < nameLen {
+		if patIdx < patLen {
+			switch pattern[patIdx] {
+			case '*':
+				if patIdx++; patIdx < patLen && pattern[patIdx] == '*' {
+					// doublestar - must begin with a path separator, otherwise we'll
+					// treat it like a single star like bash
+					patIdx++
+					if startOfSegment {
+						if patIdx >= patLen {
+							// pattern ends in `/**`: return true
+							return true, nil
+						}
+
+						// doublestar must also end with a path separator, otherwise we're
+						// just going to treat the doublestar as a single star like bash
+						patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:])
+						if patRune == separator {
+							patIdx += patRuneLen
+
+							doublestarPatternBacktrack = patIdx
+							doublestarNameBacktrack = nameIdx
+							starPatternBacktrack = -1
+							starNameBacktrack = -1
+							continue
+						}
+					}
+				}
+				startOfSegment = false
+
+				starPatternBacktrack = patIdx
+				starNameBacktrack = nameIdx
+				continue
+
+			case '?':
+				startOfSegment = false
+				nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:])
+				if nameRune == separator {
+					// `?` cannot match the separator
+					break
+				}
+
+				patIdx++
+				nameIdx += nameRuneLen
+				continue
+
+			case '[':
+				startOfSegment = false
+				if patIdx++; patIdx >= patLen {
+					// class didn't end
+					return false, ErrBadPattern
+				}
+				nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:])
+
+				matched := false
+				negate := pattern[patIdx] == '!' || pattern[patIdx] == '^'
+				if negate {
+					patIdx++
+				}
+
+				if patIdx >= patLen || pattern[patIdx] == ']' {
+					// class didn't end or empty character class
+					return false, ErrBadPattern
+				}
+
+				last := utf8.MaxRune
+				for patIdx < patLen && pattern[patIdx] != ']' {
+					patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:])
+					patIdx += patRuneLen
+
+					// match a range
+					if last < utf8.MaxRune && patRune == '-' && patIdx < patLen && pattern[patIdx] != ']' {
+						if pattern[patIdx] == '\\' {
+							// next character is escaped
+							patIdx++
+						}
+						patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:])
+						patIdx += patRuneLen
+
+						if last <= nameRune && nameRune <= patRune {
+							matched = true
+							break
+						}
+
+						// didn't match range - reset `last`
+						last = utf8.MaxRune
+						continue
+					}
+
+					// not a range - check if the next rune is escaped
+					if patRune == '\\' {
+						patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:])
+						patIdx += patRuneLen
+					}
+
+					// check if the rune matches
+					if patRune == nameRune {
+						matched = true
+						break
+					}
+
+					// no matches yet
+					last = patRune
+				}
+
+				if matched == negate {
+					// failed to match - if we reached the end of the pattern, that means
+					// we never found a closing `]`
+					if patIdx >= patLen {
+						return false, ErrBadPattern
+					}
+					break
+				}
+
+				closingIdx := indexUnescapedByte(pattern[patIdx:], ']', true)
+				if closingIdx == -1 {
+					// no closing `]`
+					return false, ErrBadPattern
+				}
+
+				patIdx += closingIdx + 1
+				nameIdx += nameRuneLen
+				continue
+
+			case '{':
+				startOfSegment = false
+				beforeIdx := patIdx
+				patIdx++
+				closingIdx := indexMatchedClosingAlt(pattern[patIdx:], separator != '\\')
+				if closingIdx == -1 {
+					// no closing `}`
+					return false, ErrBadPattern
+				}
+				closingIdx += patIdx
+
+				for {
+					commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\')
+					if commaIdx == -1 {
+						break
+					}
+					commaIdx += patIdx
+
+					result, err := doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:commaIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx)
+					if result || err != nil {
+						return result, err
+					}
+
+					patIdx = commaIdx + 1
+				}
+				return doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:closingIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx)
+
+			case '\\':
+				if separator != '\\' {
+					// next rune is "escaped" in the pattern - literal match
+					if patIdx++; patIdx >= patLen {
+						// pattern ended
+						return false, ErrBadPattern
+					}
+				}
+				fallthrough
+
+			default:
+				patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:])
+				nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:])
+				if patRune != nameRune {
+					if separator != '\\' && patIdx > 0 && pattern[patIdx-1] == '\\' {
+						// if this rune was meant to be escaped, we need to move patIdx
+						// back to the backslash before backtracking or validating below
+						patIdx--
+					}
+					break
+				}
+
+				patIdx += patRuneLen
+				nameIdx += nameRuneLen
+				startOfSegment = patRune == separator
+				continue
+			}
+		}
+
+		if starPatternBacktrack >= 0 {
+			// `*` backtrack, but only if the `name` rune isn't the separator
+			nameRune, nameRuneLen := utf8.DecodeRuneInString(name[starNameBacktrack:])
+			if nameRune != separator {
+				starNameBacktrack += nameRuneLen
+				patIdx = starPatternBacktrack
+				nameIdx = starNameBacktrack
+				startOfSegment = false
+				continue
+			}
+		}
+
+		if doublestarPatternBacktrack >= 0 {
+			// `**` backtrack, advance `name` past next separator
+			nameIdx = doublestarNameBacktrack
+			for nameIdx < nameLen {
+				nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:])
+				nameIdx += nameRuneLen
+				if nameRune == separator {
+					doublestarNameBacktrack = nameIdx
+					patIdx = doublestarPatternBacktrack
+					startOfSegment = true
+					continue MATCH
+				}
+			}
+		}
+
+		if validate && patIdx < patLen && !doValidatePattern(pattern[patIdx:], separator) {
+			return false, ErrBadPattern
+		}
+		return false, nil
+	}
+
+	if nameIdx < nameLen {
+		// we reached the end of `pattern` before the end of `name`
+		return false, nil
+	}
+
+	// we've reached the end of `name`; we've successfully matched if we've also
+	// reached the end of `pattern`, or if the rest of `pattern` can match a
+	// zero-length string
+	return isZeroLengthPattern(pattern[patIdx:], separator)
+}
+
+func isZeroLengthPattern(pattern string, separator rune) (ret bool, err error) {
+	// `/**`, `**/`, and `/**/` are special cases - a pattern such as `path/to/a/**` or `path/to/a/**/`
+	// *should* match `path/to/a` because `a` might be a directory
+	if pattern == "" ||
+		pattern == "*" ||
+		pattern == "**" ||
+		pattern == string(separator)+"**" ||
+		pattern == "**"+string(separator) ||
+		pattern == string(separator)+"**"+string(separator) {
+		return true, nil
+	}
+
+	if pattern[0] == '{' {
+		closingIdx := indexMatchedClosingAlt(pattern[1:], separator != '\\')
+		if closingIdx == -1 {
+			// no closing '}'
+			return false, ErrBadPattern
+		}
+		closingIdx += 1
+
+		patIdx := 1
+		for {
+			commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\')
+			if commaIdx == -1 {
+				break
+			}
+			commaIdx += patIdx
+
+			ret, err = isZeroLengthPattern(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], separator)
+			if ret || err != nil {
+				return
+			}
+
+			patIdx = commaIdx + 1
+		}
+		return isZeroLengthPattern(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], separator)
+	}
+
+	// no luck - validate the rest of the pattern
+	if !doValidatePattern(pattern, separator) {
+		return false, ErrBadPattern
+	}
+	return false, nil
+}
+
+// Finds the index of the first unescaped byte `c`, or negative 1.
+func indexUnescapedByte(s string, c byte, allowEscaping bool) int {
+	l := len(s)
+	for i := 0; i < l; i++ {
+		if allowEscaping && s[i] == '\\' {
+			// skip next byte
+			i++
+		} else if s[i] == c {
+			return i
+		}
+	}
+	return -1
+}
+
+// Assuming the byte before the beginning of `s` is an opening `{`, this
+// function will find the index of the matching `}`. That is, it'll skip over
+// any nested `{}` and account for escaping
+func indexMatchedClosingAlt(s string, allowEscaping bool) int {
+	alts := 1
+	l := len(s)
+	for i := 0; i < l; i++ {
+		if allowEscaping && s[i] == '\\' {
+			// skip next byte
+			i++
+		} else if s[i] == '{' {
+			alts++
+		} else if s[i] == '}' {
+			if alts--; alts == 0 {
+				return i
+			}
+		}
+	}
+	return -1
+}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/utils.go b/vendor/github.com/bmatcuk/doublestar/v4/utils.go
new file mode 100644
index 000000000000..6b8df9a3895b
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/utils.go
@@ -0,0 +1,157 @@
+package doublestar
+
+import (
+	"errors"
+	"os"
+	"path"
+	"path/filepath"
+	"strings"
+)
+
+// SplitPattern is a utility function. Given a pattern, SplitPattern will
+// return two strings: the first string is everything up to the last slash
+// (`/`) that appears _before_ any unescaped "meta" characters (ie, `*?[{`).
+// The second string is everything after that slash. For example, given the
+// pattern:
+//
+//   ../../path/to/meta*/**
+//                ^----------- split here
+//
+// SplitPattern returns "../../path/to" and "meta*/**". This is useful for
+// initializing os.DirFS() to call Glob() because Glob() will silently fail if
+// your pattern includes `/./` or `/../`. For example:
+//
+//   base, pattern := SplitPattern("../../path/to/meta*/**")
+//   fsys := os.DirFS(base)
+//   matches, err := Glob(fsys, pattern)
+//
+// If SplitPattern cannot find somewhere to split the pattern (for example,
+// `meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in
+// this example).
+//
+// Of course, it is your responsibility to decide if the returned base path is
+// "safe" in the context of your application. Perhaps you could use Match() to
+// validate against a list of approved base directories?
+//
+func SplitPattern(p string) (base, pattern string) {
+	base = "."
+	pattern = p
+
+	splitIdx := -1
+	for i := 0; i < len(p); i++ {
+		c := p[i]
+		if c == '\\' {
+			i++
+		} else if c == '/' {
+			splitIdx = i
+		} else if c == '*' || c == '?' || c == '[' || c == '{' {
+			break
+		}
+	}
+
+	if splitIdx == 0 {
+		return "/", p[1:]
+	} else if splitIdx > 0 {
+		return p[:splitIdx], p[splitIdx+1:]
+	}
+
+	return
+}
+
+// FilepathGlob returns the names of all files matching pattern or nil if there
+// is no matching file. The syntax of pattern is the same as in Match(). The
+// pattern may describe hierarchical names such as usr/*/bin/ed.
+//
+// FilepathGlob ignores file system errors such as I/O errors reading
+// directories by default. The only possible returned error is ErrBadPattern,
+// reporting that the pattern is malformed.
+//
+// To enable aborting on I/O errors, the WithFailOnIOErrors option can be
+// passed.
+//
+// Note: FilepathGlob is a convenience function that is meant as a drop-in
+// replacement for `path/filepath.Glob()` for users who don't need the
+// complication of io/fs. Basically, it:
+//   - Runs `filepath.Clean()` and `ToSlash()` on the pattern
+//   - Runs `SplitPattern()` to get a base path and a pattern to Glob
+//   - Creates an FS object from the base path and `Glob()s` on the pattern
+//   - Joins the base path with all of the matches from `Glob()`
+//
+// Returned paths will use the system's path separator, just like
+// `filepath.Glob()`.
+//
+// Note: the returned error doublestar.ErrBadPattern is not equal to
+// filepath.ErrBadPattern.
+//
+func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error) {
+	if pattern == "" {
+		// special case to match filepath.Glob behavior
+		g := newGlob(opts...)
+		if g.failOnIOErrors {
+			// match doublestar.Glob behavior here
+			return nil, os.ErrInvalid
+		}
+		return nil, nil
+	}
+
+	pattern = filepath.Clean(pattern)
+	pattern = filepath.ToSlash(pattern)
+	base, f := SplitPattern(pattern)
+	if f == "" || f == "." || f == ".." {
+		// some special cases to match filepath.Glob behavior
+		if !ValidatePathPattern(pattern) {
+			return nil, ErrBadPattern
+		}
+
+		if filepath.Separator != '\\' {
+			pattern = unescapeMeta(pattern)
+		}
+
+		if _, err = os.Lstat(pattern); err != nil {
+			g := newGlob(opts...)
+			if errors.Is(err, os.ErrNotExist) {
+				return nil, g.handlePatternNotExist(true)
+			}
+			return nil, g.forwardErrIfFailOnIOErrors(err)
+		}
+		return []string{filepath.FromSlash(pattern)}, nil
+	}
+
+	fs := os.DirFS(base)
+	if matches, err = Glob(fs, f, opts...); err != nil {
+		return nil, err
+	}
+	for i := range matches {
+		// use path.Join because we used ToSlash above to ensure our paths are made
+		// of forward slashes, no matter what the system uses
+		matches[i] = filepath.FromSlash(path.Join(base, matches[i]))
+	}
+	return
+}
+
+// Finds the next comma, but ignores any commas that appear inside nested `{}`.
+// Assumes that each opening bracket has a corresponding closing bracket.
+func indexNextAlt(s string, allowEscaping bool) int {
+	alts := 1
+	l := len(s)
+	for i := 0; i < l; i++ {
+		if allowEscaping && s[i] == '\\' {
+			// skip next byte
+			i++
+		} else if s[i] == '{' {
+			alts++
+		} else if s[i] == '}' {
+			alts--
+		} else if s[i] == ',' && alts == 1 {
+			return i
+		}
+	}
+	return -1
+}
+
+var metaReplacer = strings.NewReplacer("\\*", "*", "\\?", "?", "\\[", "[", "\\]", "]", "\\{", "{", "\\}", "}")
+
+// Unescapes meta characters (*?[]{})
+func unescapeMeta(pattern string) string {
+	return metaReplacer.Replace(pattern)
+}
diff --git a/vendor/github.com/bmatcuk/doublestar/v4/validate.go b/vendor/github.com/bmatcuk/doublestar/v4/validate.go
new file mode 100644
index 000000000000..c689b9ebab81
--- /dev/null
+++ b/vendor/github.com/bmatcuk/doublestar/v4/validate.go
@@ -0,0 +1,82 @@
+package doublestar
+
+import "path/filepath"
+
+// Validate a pattern. Patterns are validated while they run in Match(),
+// PathMatch(), and Glob(), so, you normally wouldn't need to call this.
+// However, there are cases where this might be useful: for example, if your
+// program allows a user to enter a pattern that you'll run at a later time,
+// you might want to validate it.
+//
+// ValidatePattern assumes your pattern uses '/' as the path separator.
+//
+func ValidatePattern(s string) bool {
+	return doValidatePattern(s, '/')
+}
+
+// Like ValidatePattern, only uses your OS path separator. In other words, use
+// ValidatePattern if you would normally use Match() or Glob(). Use
+// ValidatePathPattern if you would normally use PathMatch(). Keep in mind,
+// Glob() requires '/' separators, even if your OS uses something else.
+//
+func ValidatePathPattern(s string) bool {
+	return doValidatePattern(s, filepath.Separator)
+}
+
+func doValidatePattern(s string, separator rune) bool {
+	altDepth := 0
+	l := len(s)
+VALIDATE:
+	for i := 0; i < l; i++ {
+		switch s[i] {
+		case '\\':
+			if separator != '\\' {
+				// skip the next byte - return false if there is no next byte
+				if i++; i >= l {
+					return false
+				}
+			}
+			continue
+
+		case '[':
+			if i++; i >= l {
+				// class didn't end
+				return false
+			}
+			if s[i] == '^' || s[i] == '!' {
+				i++
+			}
+			if i >= l || s[i] == ']' {
+				// class didn't end or empty character class
+				return false
+			}
+
+			for ; i < l; i++ {
+				if separator != '\\' && s[i] == '\\' {
+					i++
+				} else if s[i] == ']' {
+					// looks good
+					continue VALIDATE
+				}
+			}
+
+			// class didn't end
+			return false
+
+		case '{':
+			altDepth++
+			continue
+
+		case '}':
+			if altDepth == 0 {
+				// alt end without a corresponding start
+				return false
+			}
+			altDepth--
+			continue
+		}
+	}
+
+	// valid as long as all alts are closed
+	return altDepth == 0
+}
diff --git a/vendor/github.com/coder/quartz/.gitignore b/vendor/github.com/coder/quartz/.gitignore
deleted file mode 100644
index 62c893550adb..000000000000
--- a/vendor/github.com/coder/quartz/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.idea/
\ No newline at end of file
diff --git a/vendor/github.com/coder/quartz/LICENSE b/vendor/github.com/coder/quartz/LICENSE
deleted file mode 100644
index f7c5d7fee65c..000000000000
--- a/vendor/github.com/coder/quartz/LICENSE
+++ /dev/null
@@ -1,121 +0,0 @@
-Creative Commons Legal Code
-
-CC0 1.0 Universal
-
-    CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
-    LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
-    ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
-    INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
-    REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
-    PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
-    THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
-    HEREUNDER.
-
-Statement of Purpose
-
-The laws of most jurisdictions throughout the world automatically confer
-exclusive Copyright and Related Rights (defined below) upon the creator
-and subsequent owner(s) (each and all, an "owner") of an original work of
-authorship and/or a database (each, a "Work").
-
-Certain owners wish to permanently relinquish those rights to a Work for
-the purpose of contributing to a commons of creative, cultural and
-scientific works ("Commons") that the public can reliably and without fear
-of later claims of infringement build upon, modify, incorporate in other
-works, reuse and redistribute as freely as possible in any form whatsoever
-and for any purposes, including without limitation commercial purposes.
-These owners may contribute to the Commons to promote the ideal of a free
-culture and the further production of creative, cultural and scientific
-works, or to gain reputation or greater distribution for their Work in
-part through the use and efforts of others.
-
-For these and/or other purposes and motivations, and without any
-expectation of additional consideration or compensation, the person
-associating CC0 with a Work (the "Affirmer"), to the extent that he or she
-is an owner of Copyright and Related Rights in the Work, voluntarily
-elects to apply CC0 to the Work and publicly distribute the Work under its
-terms, with knowledge of his or her Copyright and Related Rights in the
-Work and the meaning and intended legal effect of CC0 on those rights.
-
-1. Copyright and Related Rights. A Work made available under CC0 may be
-   protected by copyright and related or neighboring rights ("Copyright and
-   Related Rights"). Copyright and Related Rights include, but are not
-   limited to, the following:
-
-i. the right to reproduce, adapt, distribute, perform, display,
-communicate, and translate a Work;
-ii. moral rights retained by the original author(s) and/or performer(s);
-iii. publicity and privacy rights pertaining to a person's image or
-likeness depicted in a Work;
-iv. rights protecting against unfair competition in regards to a Work,
-subject to the limitations in paragraph 4(a), below;
-v. rights protecting the extraction, dissemination, use and reuse of data
-in a Work;
-vi. database rights (such as those arising under Directive 96/9/EC of the
-European Parliament and of the Council of 11 March 1996 on the legal
-protection of databases, and under any national implementation
-thereof, including any amended or successor version of such
-directive); and
-vii. other similar, equivalent or corresponding rights throughout the
-world based on applicable law or treaty, and any national
-implementations thereof.
-
-2. Waiver. To the greatest extent permitted by, but not in contravention
-   of, applicable law, Affirmer hereby overtly, fully, permanently,
-   irrevocably and unconditionally waives, abandons, and surrenders all of
-   Affirmer's Copyright and Related Rights and associated claims and causes
-   of action, whether now known or unknown (including existing as well as
-   future claims and causes of action), in the Work (i) in all territories
-   worldwide, (ii) for the maximum duration provided by applicable law or
-   treaty (including future time extensions), (iii) in any current or future
-   medium and for any number of copies, and (iv) for any purpose whatsoever,
-   including without limitation commercial, advertising or promotional
-   purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
-   member of the public at large and to the detriment of Affirmer's heirs and
-   successors, fully intending that such Waiver shall not be subject to
-   revocation, rescission, cancellation, termination, or any other legal or
-   equitable action to disrupt the quiet enjoyment of the Work by the public
-   as contemplated by Affirmer's express Statement of Purpose.
-
-3. Public License Fallback. Should any part of the Waiver for any reason
-   be judged legally invalid or ineffective under applicable law, then the
-   Waiver shall be preserved to the maximum extent permitted taking into
-   account Affirmer's express Statement of Purpose. In addition, to the
-   extent the Waiver is so judged Affirmer hereby grants to each affected
-   person a royalty-free, non transferable, non sublicensable, non exclusive,
-   irrevocable and unconditional license to exercise Affirmer's Copyright and
-   Related Rights in the Work (i) in all territories worldwide, (ii) for the
-   maximum duration provided by applicable law or treaty (including future
-   time extensions), (iii) in any current or future medium and for any number
-   of copies, and (iv) for any purpose whatsoever, including without
-   limitation commercial, advertising or promotional purposes (the
-   "License"). The License shall be deemed effective as of the date CC0 was
-   applied by Affirmer to the Work. Should any part of the License for any
-   reason be judged legally invalid or ineffective under applicable law, such
-   partial invalidity or ineffectiveness shall not invalidate the remainder
-   of the License, and in such case Affirmer hereby affirms that he or she
-   will not (i) exercise any of his or her remaining Copyright and Related
-   Rights in the Work or (ii) assert any associated claims and causes of
-   action with respect to the Work, in either case contrary to Affirmer's
-   express Statement of Purpose.
-
-4. Limitations and Disclaimers.
-
-a. No trademark or patent rights held by Affirmer are waived, abandoned,
-surrendered, licensed or otherwise affected by this document.
-b. Affirmer offers the Work as-is and makes no representations or
-warranties of any kind concerning the Work, express, implied,
-statutory or otherwise, including without limitation warranties of
-title, merchantability, fitness for a particular purpose, non
-infringement, or the absence of latent or other defects, accuracy, or
-the present or absence of errors, whether or not discoverable, all to
-the greatest extent permissible under applicable law.
-c. Affirmer disclaims responsibility for clearing rights of other persons
-that may apply to the Work or any use thereof, including without
-limitation any person's Copyright and Related Rights in the Work.
-Further, Affirmer disclaims responsibility for obtaining any necessary
-consents, permissions or other rights required for any use of the
-Work.
-d. Affirmer understands and acknowledges that Creative Commons is not a
-party to this document and has no duty or obligation with respect to
-this CC0 or use of the Work.
\ No newline at end of file
diff --git a/vendor/github.com/coder/quartz/README.md b/vendor/github.com/coder/quartz/README.md
deleted file mode 100644
index f0703c4d7456..000000000000
--- a/vendor/github.com/coder/quartz/README.md
+++ /dev/null
@@ -1,632 +0,0 @@
-# Quartz
-
-A Go time testing library for writing deterministic unit tests
-
-Our high level goal is to write unit tests that
-
-1. execute quickly
-2. don't flake
-3. are straightforward to write and understand
-
-For tests to execute quickly without flakes, we want to focus on _determinism_: the test should run
-the same each time, and it should be easy to force the system into a known state (no races) before
-executing test assertions. `time.Sleep`, `runtime.Gosched()`, and
-polling/[Eventually](https://pkg.go.dev/github.com/stretchr/testify/assert#Eventually) are all
-symptoms of an inability to do this easily.
-
-## Usage
-
-### `Clock` interface
-
-In your application code, maintain a reference to a `quartz.Clock` instance to start timers and
-tickers, instead of the bare `time` standard library.
-
-```go
-import "github.com/coder/quartz"
-
-type Component struct {
-	...
-
-	// for testing
-	clock quartz.Clock
-}
-```
-
-Whenever you would call into `time` to start a timer or ticker, call `Component`'s `clock` instead.
-
-In production, set this clock to `quartz.NewReal()` to create a clock that just transparently passes
-through to the standard `time` library.
-
-### Mocking
-
-In your tests, you can use a `*Mock` to control the tickers and timers your code under test gets.
-
-```go
-import (
-	"testing"
-	"github.com/coder/quartz"
-)
-
-func TestComponent(t *testing.T) {
-	mClock := quartz.NewMock(t)
-	comp := &Component{
-		...
-		clock: mClock,
-	}
-}
-```
-
-The `*Mock` clock starts at Jan 1, 2024, 00:00 UTC by default, but you can set any start time you'd like prior to your test.
-
-```go
-mClock := quartz.NewMock(t)
-mClock.Set(time.Date(2021, 6, 18, 12, 0, 0, 0, time.UTC)) // June 18, 2021 @ 12pm UTC
-```
-
-#### Advancing the clock
-
-Once you begin setting timers or tickers, you cannot change the time backward, only advance it
-forward. You may continue to use `Set()`, but it is often easier and clearer to use `Advance()`.
-
-For example, with a timer:
-
-```go
-fired := false
-
-tmr := mClock.AfterFunc(time.Second, func() {
-  fired = true
-})
-mClock.Advance(time.Second)
-```
-
-When you call `Advance()` it immediately moves the clock forward the given amount, and triggers any
-tickers or timers that are scheduled to happen at that time. Any triggered events happen on separate
-goroutines, so _do not_ immediately assert the results:
-
-```go
-fired := false
-
-tmr := mClock.AfterFunc(time.Second, func() {
-  fired = true
-})
-mClock.Advance(time.Second)
-
-// RACE CONDITION, DO NOT DO THIS!
-if !fired {
-  t.Fatal("didn't fire")
-}
-```
-
-`Advance()` (and `Set()` for that matter) return an `AdvanceWaiter` object you can use to wait for
-all triggered events to complete.
-
-```go
-fired := false
-// set a test timeout so we don't wait the default `go test` timeout for a failure
-ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
-
-tmr := mClock.AfterFunc(time.Second, func() {
-  fired = true
-})
-
-w := mClock.Advance(time.Second)
-err := w.Wait(ctx)
-if err != nil {
-  t.Fatal("AfterFunc f never completed")
-}
-if !fired {
-  t.Fatal("didn't fire")
-}
-```
-
-The construction of waiting for the triggered events and failing the test if they don't complete is
-very common, so there is a shorthand:
-
-```go
-w := mClock.Advance(time.Second)
-err := w.Wait(ctx)
-if err != nil {
-  t.Fatal("AfterFunc f never completed")
-}
-```
-
-is equivalent to:
-
-```go
-w := mClock.Advance(time.Second)
-w.MustWait(ctx)
-```
-
-or even more briefly:
-
-```go
-mClock.Advance(time.Second).MustWait(ctx)
-```
-
-### Advance only to the next event
-
-One important restriction on advancing the clock is that you may only advance forward to the next
-timer or ticker event and no further. The following will result in a test failure:
-
-```go
-func TestAdvanceTooFar(t *testing.T) {
-	ctx, cancel := context.WithTimeout(10*time.Second)
-	defer cancel()
-	mClock := quartz.NewMock(t)
-	var firedAt time.Time
-	mClock.AfterFunc(time.Second, func() {
-		firedAt := mClock.Now()
-	})
-	mClock.Advance(2*time.Second).MustWait(ctx)
-}
-```
-
-This is a deliberate design decision to allow `Advance()` to immediately and synchronously move the
-clock forward (even without calling `Wait()` on returned waiter). This helps meet Quartz's design
-goals of writing deterministic and easy to understand unit tests. It also allows the clock to be
-advanced, deterministically _during_ the execution of a tick or timer function, as explained in the
-next sections on Traps.
-
-Advancing multiple events can be accomplished via looping. E.g. if you have a 1-second ticker
-
-```go
-for i := 0; i < 10; i++ {
-	mClock.Advance(time.Second).MustWait(ctx)
-}
-```
-
-will advance 10 ticks.
-
-If you don't know or don't want to compute the time to the next event, you can use `AdvanceNext()`.
-
-```go
-d, w := mClock.AdvanceNext()
-w.MustWait(ctx)
-// d contains the duration we advanced
-```
-
-`d, ok := Peek()` returns the duration until the next event, if any (`ok` is `true`). You can use
-this to advance a specific time, regardless of the tickers and timer events:
-
-```go
-desired := time.Minute // time to advance
-for desired > 0 {
-	p, ok := mClock.Peek()
-	if !ok || p > desired {
-		mClock.Advance(desired).MustWait(ctx)
-		break
-	}
-	mClock.Advance(p).MustWait(ctx)
-	desired -= p
-}
-```
-
-### Traps
-
-A trap allows you to match specific calls into the library while mocking, block their return,
-inspect their arguments, then release them to allow them to return. They help you write
-deterministic unit tests even when the code under test executes asynchronously from the test.
-
-You set your traps prior to executing code under test, and then wait for them to be triggered.
-
-```go
-func TestTrap(t *testing.T) {
-	ctx, cancel := context.WithTimeout(10*time.Second)
-	defer cancel()
-	mClock := quartz.NewMock(t)
-	trap := mClock.Trap().AfterFunc()
-	defer trap.Close() // stop trapping AfterFunc calls
-
-	count := 0
-	go mClock.AfterFunc(time.Hour, func(){
-		count++
-	})
-	call := trap.MustWait(ctx)
-	call.Release()
-	if call.Duration != time.Hour {
-		t.Fatal("wrong duration")
-	}
-
-	// Now that the async call to AfterFunc has occurred, we can advance the clock to trigger it
-	mClock.Advance(call.Duration).MustWait(ctx)
-	if count != 1 {
-		t.Fatal("wrong count")
-	}
-}
-```
-
-In this test, the trap serves 2 purposes. Firstly, it allows us to capture and assert the duration
-passed to the `AfterFunc` call. Secondly, it prevents a race between setting the timer and advancing
-it. Since these things happen on different goroutines, if `Advance()` completes before
-`AfterFunc()` is called, then the timer never pops in this test.
-
-Any untrapped calls immediately complete using the current time, and calling `Close()` on a trap
-causes the mock clock to stop trapping those calls.
-
-You may also `Advance()` the clock between trapping a call and releasing it. The call uses the
-current (mocked) time at the moment it is released.
-
-```go
-func TestTrap2(t *testing.T) {
-	ctx, cancel := context.WithTimeout(10*time.Second)
-	defer cancel()
-	mClock := quartz.NewMock(t)
-	trap := mClock.Trap().Now()
-	defer trap.Close() // stop trapping AfterFunc calls
-
-	var logs []string
-	done := make(chan struct{})
-	go func(clk quartz.Clock){
-		defer close(done)
-		start := clk.Now()
-		phase1()
-		p1end := clk.Now()
-		logs = append(fmt.Sprintf("Phase 1 took %s", p1end.Sub(start).String()))
-		phase2()
-		p2end := clk.Now()
-		logs = append(fmt.Sprintf("Phase 2 took %s", p2end.Sub(p1end).String()))
-	}(mClock)
-
-	// start
-	trap.MustWait(ctx).Release()
-	// phase 1
-	call := trap.MustWait(ctx)
-	mClock.Advance(3*time.Second).MustWait(ctx)
-	call.Release()
-	// phase 2
-	call = trap.MustWait(ctx)
-	mClock.Advance(5*time.Second).MustWait(ctx)
-	call.Release()
-
-	<-done
-	// Now logs contains []string{"Phase 1 took 3s", "Phase 2 took 5s"}
-}
-```
-
-### Tags
-
-When multiple goroutines in the code under test call into the Clock, you can use `tags` to
-distinguish them in your traps.
-
-```go
-trap := mClock.Trap.Now("foo") // traps any calls that contain "foo"
-defer trap.Close()
-
-foo := make(chan time.Time)
-go func(){
-	foo <- mClock.Now("foo", "bar")
-}()
-baz := make(chan time.Time)
-go func(){
-	baz <- mClock.Now("baz")
-}()
-call := trap.MustWait(ctx)
-mClock.Advance(time.Second).MustWait(ctx)
-call.Release()
-// call.Tags contains []string{"foo", "bar"}
-
-gotFoo := <-foo // 1s after start
-gotBaz := <-baz // ?? never trapped, so races with Advance()
-```
-
-Tags appear as an optional suffix on all `Clock` methods (type `...string`) and are ignored entirely
-by the real clock. They also appear on all methods on returned timers and tickers.
-
-## Recommended Patterns
-
-### Options
-
-We use the Option pattern to inject the mock clock for testing, keeping the call signature in
-production clean. The option pattern is compatible with other optional fields as well.
-
-```go
-type Option func(*Thing)
-
-// WithTestClock is used in tests to inject a mock Clock
-func WithTestClock(clk quartz.Clock) Option {
-	return func(t *Thing) {
-		t.clock = clk
-	}
-}
-
-func NewThing(, opts ...Option) *Thing {
-	t := &Thing{
-		...
-		clock: quartz.NewReal()
-	}
-	for _, o := range opts {
-	  o(t)
-	}
-	return t
-}
-```
-
-In tests, this becomes
-
-```go
-func TestThing(t *testing.T) {
-	mClock := quartz.NewMock(t)
-	thing := NewThing(, WithTestClock(mClock))
-	...
-}
-```
-
-### Tagging convention
-
-Tag your `Clock` method calls as:
-
-```go
-func (c *Component) Method() {
-	now := c.clock.Now("Component", "Method")
-}
-```
-
-or
-
-```go
-func (c *Component) Method() {
-	start := c.clock.Now("Component", "Method", "start")
-	...
-	end := c.clock.Now("Component", "Method", "end")
-}
-```
-
-This makes it much less likely that code changes that introduce new components or methods will spoil
-existing unit tests.
-
-## Why another time testing library?
-
-Writing good unit tests for components and functions that use the `time` package is difficult, even
-though several open source libraries exist. In building Quartz, we took some inspiration from
-
-- [github.com/benbjohnson/clock](https://github.com/benbjohnson/clock)
-- Tailscale's [tstest.Clock](https://github.com/coder/tailscale/blob/main/tstest/clock.go)
-- [github.com/aspenmesh/tock](https://github.com/aspenmesh/tock)
-
-Quartz shares the high level design of a `Clock` interface that closely resembles the functions in
-the `time` standard library, and a "real" clock passes thru to the standard library in production,
-while a mock clock gives precise control in testing.
-
-As mentioned in our introduction, our high level goal is to write unit tests that
-
-1. execute quickly
-2. don't flake
-3. are straightforward to write and understand
-
-For several reasons, this is a tall order when it comes to code that depends on time, and we found
-the existing libraries insufficient for our goals.
-
-### Preventing test flakes
-
-The following example comes from the README from benbjohnson/clock:
-
-```go
-mock := clock.NewMock()
-count := 0
-
-// Kick off a timer to increment every 1 mock second.
-go func() {
-	ticker := mock.Ticker(1 * time.Second)
-	for {
-		<-ticker.C
-		count++
-	}
-}()
-runtime.Gosched()
-
-// Move the clock forward 10 seconds.
-mock.Add(10 * time.Second)
-
-// This prints 10.
-fmt.Println(count)
-```
-
-The first race condition is fairly obvious: moving the clock forward 10 seconds may generate 10
-ticks on the `ticker.C` channel, but there is no guarantee that `count++` executes before
-`fmt.Println(count)`.
-
-The second race condition is more subtle, but `runtime.Gosched()` is the tell. Since the ticker
-is started on a separate goroutine, there is no guarantee that `mock.Ticker()` executes before
-`mock.Add()`. `runtime.Gosched()` is an attempt to get this to happen, but it makes no hard
-promises. On a busy system, especially when running tests in parallel, this can flake, advance the
-time 10 seconds first, then start the ticker and never generate a tick.
-
-Let's talk about how Quartz tackles these problems.
-
-In our experience, an extremely common use case is creating a ticker then doing a 2-arm `select`
-with ticks in one and context expiring in another, i.e.
-
-```go
-t := time.NewTicker(duration)
-for {
-	select {
-	case <-ctx.Done():
-		return ctx.Err()
-	case <-t.C:
-		err := do()
-		if err != nil {
-			return err
-		}
-	}
-}
-```
-
-In Quartz, we refactor this to be more compact and testing friendly:
-
-```go
-t := clock.TickerFunc(ctx, duration, do)
-return t.Wait()
-```
-
-This affords the mock `Clock` the ability to explicitly know when processing of a tick is finished
-because it's wrapped in the function passed to `TickerFunc` (`do()` in this example).
-
-In Quartz, when you advance the clock, you are returned an object you can `Wait()` on to ensure all
-ticks and timers triggered are finished. This solves the first race condition in the example.
-
-(As an aside, we still support a traditional standard library-style `Ticker`. You may find it useful
-if you want to keep your code as close as possible to the standard library, or if you need to use
-the channel in a larger `select` block. In that case, you'll have to find some other mechanism to
-sync tick processing to your test code.)
-
-To prevent race conditions related to the starting of the ticker, Quartz allows you to set "traps"
-for calls that access the clock.
-
-```go
-func TestTicker(t *testing.T) {
-	mClock := quartz.NewMock(t)
-	trap := mClock.Trap().TickerFunc()
-	defer trap.Close() // stop trapping at end
-	go runMyTicker(mClock) // async calls TickerFunc()
-	call := trap.Wait(context.Background()) // waits for a call and blocks its return
-	call.Release() // allow the TickerFunc() call to return
-	// optionally check the duration using call.Duration
-	// Move the clock forward 1 tick
-	mClock.Advance(time.Second).MustWait(context.Background())
-	// assert results of the tick
-}
-```
-
-Trapping and then releasing the call to `TickerFunc()` ensures the ticker is started at a
-deterministic time, so our calls to `Advance()` will have a predictable effect.
-
-Take a look at `TestExampleTickerFunc` in `example_test.go` for a complete worked example.
-
-### Complex time dependence
-
-Another difficult issue to handle when unit testing is when some code under test makes multiple
-calls that depend on the time, and you want to simulate some time passing between them.
-
-A very basic example is measuring how long something took:
-
-```go
-var measurement time.Duration
-go func(clock quartz.Clock) {
-	start := clock.Now()
-	doSomething()
-	measurement = clock.Since(start)
-}(mClock)
-
-// how to get measurement to be, say, 5 seconds?
-```
-
-The two calls into the clock happen asynchronously, so we need to be able to advance the clock after
-the first call to `Now()` but before the call to `Since()`. Doing this with the libraries we
-mentioned above means that you have to be able to mock out or otherwise block the completion of
-`doSomething()`.
-
-But, with the trap functionality we mentioned in the previous section, you can deterministically
-control the time each call sees.
-
-```go
-trap := mClock.Trap().Since()
-var measurement time.Duration
-go func(clock quartz.Clock) {
-	start := clock.Now()
-	doSomething()
-	measurement = clock.Since(start)
-}(mClock)
-
-c := trap.Wait(ctx)
-mClock.Advance(5*time.Second)
-c.Release()
-```
-
-We wait until we trap the `clock.Since()` call, which implies that `clock.Now()` has completed, then
-advance the mock clock 5 seconds. Finally, we release the `clock.Since()` call. Any changes to the
-clock that happen _before_ we release the call will be included in the time used for the
-`clock.Since()` call.
-
-As a more involved example, consider an inactivity timeout: we want something to happen if there is
-no activity recorded for some period, say 10 minutes in the following example:
-
-```go
-type InactivityTimer struct {
-	mu sync.Mutex
-	activity time.Time
-	clock quartz.Clock
-}
-
-func (i *InactivityTimer) Start() {
-	i.mu.Lock()
-	defer i.mu.Unlock()
-	next := i.clock.Until(i.activity.Add(10*time.Minute))
-	t := i.clock.AfterFunc(next, func() {
-		i.mu.Lock()
-		defer i.mu.Unlock()
-		next := i.clock.Until(i.activity.Add(10*time.Minute))
-		if next == 0 {
-			i.timeoutLocked()
-			return
-		}
-		t.Reset(next)
-	})
-}
-```
-
-The actual contents of `timeoutLocked()` doesn't matter for this example, and assume there are other
-functions that record the latest `activity`.
-
-We found that some time testing libraries hold a lock on the mock clock while calling the function
-passed to `AfterFunc`, resulting in a deadlock if you made clock calls from within.
-
-Others allow this sort of thing, but don't have the flexibility to test edge cases. There is a
-subtle bug in our `Start()` function. The timer may pop a little late, and/or some measurable real
-time may elapse before `Until()` gets called inside the `AfterFunc`. If there hasn't been activity,
-`next` might be negative.
-
-To test this in Quartz, we'll use a trap. We only want to trap the inner `Until()` call, not the
-initial one, so to make testing easier we can "tag" the call we want. Like this:
-
-```go
-func (i *InactivityTimer) Start() {
-	i.mu.Lock()
-	defer i.mu.Unlock()
-	next := i.clock.Until(i.activity.Add(10*time.Minute))
-	t := i.clock.AfterFunc(next, func() {
-		i.mu.Lock()
-		defer i.mu.Unlock()
-		next := i.clock.Until(i.activity.Add(10*time.Minute), "inner")
-		if next == 0 {
-			i.timeoutLocked()
-			return
-		}
-		t.Reset(next)
-	})
-}
-```
-
-All Quartz `Clock` functions, and functions on returned timers and tickers support zero or more
-string tags that allow traps to match on them.
-
-```go
-func TestInactivityTimer_Late(t *testing.T) {
-	// set a timeout on the test itself, so that if Wait functions get blocked, we don't have to
-	// wait for the default test timeout of 10 minutes.
-	ctx, cancel := context.WithTimeout(10*time.Second)
-	defer cancel()
-	mClock := quartz.NewMock(t)
-	trap := mClock.Trap.Until("inner")
-	defer trap.Close()
-
-	it := &InactivityTimer{
-		activity: mClock.Now(),
-		clock: mClock,
-	}
-	it.Start()
-
-	// Trigger the AfterFunc
-	w := mClock.Advance(10*time.Minute)
-	c := trap.Wait(ctx)
-	// Advance the clock a few ms to simulate a busy system
-	mClock.Advance(3*time.Millisecond)
-	c.Release() // Until() returns
-	w.MustWait(ctx) // Wait for the AfterFunc to wrap up
-
-	// Assert that the timeoutLocked() function was called
-}
-```
-
-This test case will fail with our bugged implementation, since the triggered AfterFunc won't call
-`timeoutLocked()` and instead will reset the timer with a negative number. The fix is easy, use
-`next <= 0` as the comparison.
diff --git a/vendor/github.com/coder/quartz/clock.go b/vendor/github.com/coder/quartz/clock.go
deleted file mode 100644
index 729edfa5623d..000000000000
--- a/vendor/github.com/coder/quartz/clock.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Package quartz is a library for testing time related code.  It exports an interface Clock that
-// mimics the standard library time package functions.  In production, an implementation that calls
-// thru to the standard library is used.  In testing, a Mock clock is used to precisely control and
-// intercept time functions.
-package quartz
-
-import (
-	"context"
-	"time"
-)
-
-type Clock interface {
-	// NewTicker returns a new Ticker containing a channel that will send the current time on the
-	// channel after each tick. The period of the ticks is specified by the duration argument. The
-	// ticker will adjust the time interval or drop ticks to make up for slow receivers. The
-	// duration d must be greater than zero; if not, NewTicker will panic. Stop the ticker to
-	// release associated resources.
-	NewTicker(d time.Duration, tags ...string) *Ticker
-	// TickerFunc is a convenience function that calls f on the interval d until either the given
-	// context expires or f returns an error.  Callers may call Wait() on the returned Waiter to
-	// wait until this happens and obtain the error. The duration d must be greater than zero; if
-	// not, TickerFunc will panic.
-	TickerFunc(ctx context.Context, d time.Duration, f func() error, tags ...string) Waiter
-	// NewTimer creates a new Timer that will send the current time on its channel after at least
-	// duration d.
-	NewTimer(d time.Duration, tags ...string) *Timer
-	// AfterFunc waits for the duration to elapse and then calls f in its own goroutine. It returns
-	// a Timer that can be used to cancel the call using its Stop method. The returned Timer's C
-	// field is not used and will be nil.
-	AfterFunc(d time.Duration, f func(), tags ...string) *Timer
-
-	// Now returns the current local time.
-	Now(tags ...string) time.Time
-	// Since returns the time elapsed since t. It is shorthand for Clock.Now().Sub(t).
-	Since(t time.Time, tags ...string) time.Duration
-	// Until returns the duration until t. It is shorthand for t.Sub(Clock.Now()).
-	Until(t time.Time, tags ...string) time.Duration
-}
-
-// Waiter can be waited on for an error.
-type Waiter interface {
-	Wait(tags ...string) error
-}
diff --git a/vendor/github.com/coder/quartz/mock.go b/vendor/github.com/coder/quartz/mock.go
deleted file mode 100644
index cc8e064c2172..000000000000
--- a/vendor/github.com/coder/quartz/mock.go
+++ /dev/null
@@ -1,661 +0,0 @@
-package quartz
-
-import (
-	"context"
-	"errors"
-	"fmt"
-	"slices"
-	"sync"
-	"testing"
-	"time"
-)
-
-// Mock is the testing implementation of Clock.  It tracks a time that monotonically increases
-// during a test, triggering any timers or tickers automatically.
-type Mock struct {
-	tb testing.TB
-	mu sync.Mutex
-
-	// cur is the current time
-	cur time.Time
-
-	all        []event
-	nextTime   time.Time
-	nextEvents []event
-	traps      []*Trap
-}
-
-type event interface {
-	next() time.Time
-	fire(t time.Time)
-}
-
-func (m *Mock) TickerFunc(ctx context.Context, d time.Duration, f func() error, tags ...string) Waiter {
-	if d <= 0 {
-		panic("TickerFunc called with negative or zero duration")
-	}
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionTickerFunc, tags, withDuration(d))
-	m.matchCallLocked(c)
-	defer close(c.complete)
-	t := &mockTickerFunc{
-		ctx:  ctx,
-		d:    d,
-		f:    f,
-		nxt:  m.cur.Add(d),
-		mock: m,
-		cond: sync.NewCond(&m.mu),
-	}
-	m.all = append(m.all, t)
-	m.recomputeNextLocked()
-	go t.waitForCtx()
-	return t
-}
-
-func (m *Mock) NewTicker(d time.Duration, tags ...string) *Ticker {
-	if d <= 0 {
-		panic("NewTicker called with negative or zero duration")
-	}
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionNewTicker, tags, withDuration(d))
-	m.matchCallLocked(c)
-	defer close(c.complete)
-	// 1 element buffer follows standard library implementation
-	ticks := make(chan time.Time, 1)
-	t := &Ticker{
-		C:    ticks,
-		c:    ticks,
-		d:    d,
-		nxt:  m.cur.Add(d),
-		mock: m,
-	}
-	m.addEventLocked(t)
-	return t
-}
-
-func (m *Mock) NewTimer(d time.Duration, tags ...string) *Timer {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionNewTimer, tags, withDuration(d))
-	defer close(c.complete)
-	m.matchCallLocked(c)
-	ch := make(chan time.Time, 1)
-	t := &Timer{
-		C:    ch,
-		c:    ch,
-		nxt:  m.cur.Add(d),
-		mock: m,
-	}
-	if d <= 0 {
-		// zero or negative duration timer means we should immediately fire
-		// it, rather than add it.
-		go t.fire(t.mock.cur)
-		return t
-	}
-	m.addEventLocked(t)
-	return t
-}
-
-func (m *Mock) AfterFunc(d time.Duration, f func(), tags ...string) *Timer {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionAfterFunc, tags, withDuration(d))
-	defer close(c.complete)
-	m.matchCallLocked(c)
-	t := &Timer{
-		nxt:  m.cur.Add(d),
-		fn:   f,
-		mock: m,
-	}
-	if d <= 0 {
-		// zero or negative duration timer means we should immediately fire
-		// it, rather than add it.
-		go t.fire(t.mock.cur)
-		return t
-	}
-	m.addEventLocked(t)
-	return t
-}
-
-func (m *Mock) Now(tags ...string) time.Time {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionNow, tags)
-	defer close(c.complete)
-	m.matchCallLocked(c)
-	return m.cur
-}
-
-func (m *Mock) Since(t time.Time, tags ...string) time.Duration {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionSince, tags, withTime(t))
-	defer close(c.complete)
-	m.matchCallLocked(c)
-	return m.cur.Sub(t)
-}
-
-func (m *Mock) Until(t time.Time, tags ...string) time.Duration {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	c := newCall(clockFunctionUntil, tags, withTime(t))
-	defer close(c.complete)
-	m.matchCallLocked(c)
-	return t.Sub(m.cur)
-}
-
-func (m *Mock) addEventLocked(e event) {
-	m.all = append(m.all, e)
-	m.recomputeNextLocked()
-}
-
-func (m *Mock) recomputeNextLocked() {
-	var best time.Time
-	var events []event
-	for _, e := range m.all {
-		if best.IsZero() || e.next().Before(best) {
-			best = e.next()
-			events = []event{e}
-			continue
-		}
-		if e.next().Equal(best) {
-			events = append(events, e)
-			continue
-		}
-	}
-	m.nextTime = best
-	m.nextEvents = events
-}
-
-func (m *Mock) removeTimer(t *Timer) {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	m.removeTimerLocked(t)
-}
-
-func (m *Mock) removeTimerLocked(t *Timer) {
-	t.stopped = true
-	m.removeEventLocked(t)
-}
-
-func (m *Mock) removeEventLocked(e event) {
-	defer m.recomputeNextLocked()
-	for i := range m.all {
-		if m.all[i] == e {
-			m.all = append(m.all[:i], m.all[i+1:]...)
-			return
-		}
-	}
-}
-
-func (m *Mock) matchCallLocked(c *Call) {
-	var traps []*Trap
-	for _, t := range m.traps {
-		if t.matches(c) {
-			traps = append(traps, t)
-		}
-	}
-	if len(traps) == 0 {
-		return
-	}
-	c.releases.Add(len(traps))
-	m.mu.Unlock()
-	for _, t := range traps {
-		go t.catch(c)
-	}
-	c.releases.Wait()
-	m.mu.Lock()
-}
-
-// AdvanceWaiter is returned from Advance and Set calls and allows you to wait for ticks and timers
-// to complete.  In the case of functions passed to AfterFunc or TickerFunc, it waits for the
-// functions to return.  For other ticks & timers, it just waits for the tick to be delivered to
-// the channel.
-//
-// If multiple timers or tickers trigger simultaneously, they are all run on separate
-// go routines.
-type AdvanceWaiter struct {
-	tb testing.TB
-	ch chan struct{}
-}
-
-// Wait for all timers and ticks to complete, or until context expires.
-func (w AdvanceWaiter) Wait(ctx context.Context) error {
-	select {
-	case <-w.ch:
-		return nil
-	case <-ctx.Done():
-		return ctx.Err()
-	}
-}
-
-// MustWait waits for all timers and ticks to complete, and fails the test immediately if the
-// context completes first.  MustWait must be called from the goroutine running the test or
-// benchmark, similar to `t.FailNow()`.
-func (w AdvanceWaiter) MustWait(ctx context.Context) {
-	w.tb.Helper()
-	select {
-	case <-w.ch:
-		return
-	case <-ctx.Done():
-		w.tb.Fatalf("context expired while waiting for clock to advance: %s", ctx.Err())
-	}
-}
-
-// Done returns a channel that is closed when all timers and ticks complete.
-func (w AdvanceWaiter) Done() <-chan struct{} {
-	return w.ch
-}
-
-// Advance moves the clock forward by d, triggering any timers or tickers.  The returned value can
-// be used to wait for all timers and ticks to complete.  Advance sets the clock forward before
-// returning, and can only advance up to the next timer or tick event. It will fail the test if you
-// attempt to advance beyond.
-//
-// If you need to advance exactly to the next event, and don't know or don't wish to calculate it,
-// consider AdvanceNext().
-func (m *Mock) Advance(d time.Duration) AdvanceWaiter {
-	m.tb.Helper()
-	w := AdvanceWaiter{tb: m.tb, ch: make(chan struct{})}
-	m.mu.Lock()
-	fin := m.cur.Add(d)
-	// nextTime.IsZero implies no events scheduled.
-	if m.nextTime.IsZero() || fin.Before(m.nextTime) {
-		m.cur = fin
-		m.mu.Unlock()
-		close(w.ch)
-		return w
-	}
-	if fin.After(m.nextTime) {
-		m.tb.Errorf(fmt.Sprintf("cannot advance %s which is beyond next timer/ticker event in %s",
-			d.String(), m.nextTime.Sub(m.cur)))
-		m.mu.Unlock()
-		close(w.ch)
-		return w
-	}
-
-	m.cur = m.nextTime
-	go m.advanceLocked(w)
-	return w
-}
-
-func (m *Mock) advanceLocked(w AdvanceWaiter) {
-	defer close(w.ch)
-	wg := sync.WaitGroup{}
-	for i := range m.nextEvents {
-		e := m.nextEvents[i]
-		t := m.cur
-		wg.Add(1)
-		go func() {
-			e.fire(t)
-			wg.Done()
-		}()
-	}
-	// release the lock and let the events resolve.  This allows them to call back into the
-	// Mock to query the time or set new timers.  Each event should remove or reschedule
-	// itself from nextEvents.
-	m.mu.Unlock()
-	wg.Wait()
-}
-
-// Set the time to t.  If the time is after the current mocked time, then this is equivalent to
-// Advance() with the difference.  You may only Set the time earlier than the current time before
-// starting tickers and timers (e.g. at the start of your test case).
-func (m *Mock) Set(t time.Time) AdvanceWaiter {
-	m.tb.Helper()
-	w := AdvanceWaiter{tb: m.tb, ch: make(chan struct{})}
-	m.mu.Lock()
-	if t.Before(m.cur) {
-		defer close(w.ch)
-		defer m.mu.Unlock()
-		// past
-		if !m.nextTime.IsZero() {
-			m.tb.Error("Set mock clock to the past after timers/tickers started")
-		}
-		m.cur = t
-		return w
-	}
-	// future
-	// nextTime.IsZero implies no events scheduled.
-	if m.nextTime.IsZero() || t.Before(m.nextTime) {
-		defer close(w.ch)
-		defer m.mu.Unlock()
-		m.cur = t
-		return w
-	}
-	if t.After(m.nextTime) {
-		defer close(w.ch)
-		defer m.mu.Unlock()
-		m.tb.Errorf("cannot Set time to %s which is beyond next timer/ticker event at %s",
-			t.String(), m.nextTime)
-		return w
-	}
-
-	m.cur = m.nextTime
-	go m.advanceLocked(w)
-	return w
-}
-
-// AdvanceNext advances the clock to the next timer or tick event.  It fails the test if there are
-// none scheduled.  It returns the duration the clock was advanced and a waiter that can be used to
-// wait for the timer/tick event(s) to finish.
-func (m *Mock) AdvanceNext() (time.Duration, AdvanceWaiter) {
-	m.mu.Lock()
-	m.tb.Helper()
-	w := AdvanceWaiter{tb: m.tb, ch: make(chan struct{})}
-	if m.nextTime.IsZero() {
-		defer close(w.ch)
-		defer m.mu.Unlock()
-		m.tb.Error("cannot AdvanceNext because there are no timers or tickers running")
-	}
-	d := m.nextTime.Sub(m.cur)
-	m.cur = m.nextTime
-	go m.advanceLocked(w)
-	return d, w
-}
-
-// Peek returns the duration until the next ticker or timer event and the value
-// true, or, if there are no running tickers or timers, it returns zero and
-// false.
-func (m *Mock) Peek() (d time.Duration, ok bool) {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	if m.nextTime.IsZero() {
-		return 0, false
-	}
-	return m.nextTime.Sub(m.cur), true
-}
-
-// Trapper allows the creation of Traps
-type Trapper struct {
-	// mock is the underlying Mock.  This is a thin wrapper around Mock so that
-	// we can have our interface look like mClock.Trap().NewTimer("foo")
-	mock *Mock
-}
-
-func (t Trapper) NewTimer(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionNewTimer, tags)
-}
-
-func (t Trapper) AfterFunc(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionAfterFunc, tags)
-}
-
-func (t Trapper) TimerStop(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTimerStop, tags)
-}
-
-func (t Trapper) TimerReset(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTimerReset, tags)
-}
-
-func (t Trapper) TickerFunc(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTickerFunc, tags)
-}
-
-func (t Trapper) TickerFuncWait(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTickerFuncWait, tags)
-}
-
-func (t Trapper) NewTicker(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionNewTicker, tags)
-}
-
-func (t Trapper) TickerStop(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTickerStop, tags)
-}
-
-func (t Trapper) TickerReset(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionTickerReset, tags)
-}
-
-func (t Trapper) Now(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionNow, tags)
-}
-
-func (t Trapper) Since(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionSince, tags)
-}
-
-func (t Trapper) Until(tags ...string) *Trap {
-	return t.mock.newTrap(clockFunctionUntil, tags)
-}
-
-func (m *Mock) Trap() Trapper {
-	return Trapper{m}
-}
-
-func (m *Mock) newTrap(fn clockFunction, tags []string) *Trap {
-	m.mu.Lock()
-	defer m.mu.Unlock()
-	tr := &Trap{
-		fn:    fn,
-		tags:  tags,
-		mock:  m,
-		calls: make(chan *Call),
-		done:  make(chan struct{}),
-	}
-	m.traps = append(m.traps, tr)
-	return tr
-}
-
-// NewMock creates a new Mock with the time set to midnight UTC on Jan 1, 2024.
-// You may re-set the time earlier than this, but only before timers or tickers
-// are created.
-func NewMock(tb testing.TB) *Mock {
-	cur, err := time.Parse(time.RFC3339, "2024-01-01T00:00:00Z")
-	if err != nil {
-		panic(err)
-	}
-	return &Mock{
-		tb:  tb,
-		cur: cur,
-	}
-}
-
-var _ Clock = &Mock{}
-
-type mockTickerFunc struct {
-	ctx  context.Context
-	d    time.Duration
-	f    func() error
-	nxt  time.Time
-	mock *Mock
-
-	// cond is a condition Locked on the main Mock.mu
-	cond *sync.Cond
-	// inProgress is true when we are actively calling f
-	inProgress bool
-	// done is true when the ticker exits
-	done bool
-	// err holds the error when the ticker exits
-	err error
-}
-
-func (m *mockTickerFunc) next() time.Time {
-	return m.nxt
-}
-
-func (m *mockTickerFunc) fire(_ time.Time) {
-	m.mock.mu.Lock()
-	if m.done {
-		m.mock.mu.Unlock()
-		return
-	}
-	m.nxt = m.nxt.Add(m.d)
-	m.mock.recomputeNextLocked()
-	// we need this check to happen after we've computed the next tick,
-	// otherwise it will be immediately rescheduled.
-	if m.inProgress {
-		m.mock.mu.Unlock()
-		return
-	}
-
-	m.inProgress = true
-	m.mock.mu.Unlock()
-	err := m.f()
-	m.mock.mu.Lock()
-	defer m.mock.mu.Unlock()
-	m.inProgress = false
-	m.cond.Broadcast() // wake up anything waiting for f to finish
-	if err != nil {
-		m.exitLocked(err)
-	}
-}
-
-func (m *mockTickerFunc) exitLocked(err error) {
-	if m.done {
-		return
-	}
-	m.done = true
-	m.err = err
-	m.mock.removeEventLocked(m)
-	m.cond.Broadcast()
-}
-
-func (m *mockTickerFunc) waitForCtx() {
-	<-m.ctx.Done()
-	m.mock.mu.Lock()
-	defer m.mock.mu.Unlock()
-	for m.inProgress {
-		m.cond.Wait()
-	}
-	m.exitLocked(m.ctx.Err())
-}
-
-func (m *mockTickerFunc) Wait(tags ...string) error {
-	m.mock.mu.Lock()
-	defer m.mock.mu.Unlock()
-	c := newCall(clockFunctionTickerFuncWait, tags)
-	m.mock.matchCallLocked(c)
-	defer close(c.complete)
-	for !m.done {
-		m.cond.Wait()
-	}
-	return m.err
-}
-
-var _ Waiter = &mockTickerFunc{}
-
-type clockFunction int
-
-const (
-	clockFunctionNewTimer clockFunction = iota
-	clockFunctionAfterFunc
-	clockFunctionTimerStop
-	clockFunctionTimerReset
-	clockFunctionTickerFunc
-	clockFunctionTickerFuncWait
-	clockFunctionNewTicker
-	clockFunctionTickerReset
-	clockFunctionTickerStop
-	clockFunctionNow
-	clockFunctionSince
-	clockFunctionUntil
-)
-
-type callArg func(c *Call)
-
-type Call struct {
-	Time     time.Time
-	Duration time.Duration
-	Tags     []string
-
-	fn       clockFunction
-	releases sync.WaitGroup
-	complete chan struct{}
-}
-
-func (c *Call) Release() {
-	c.releases.Done()
-	<-c.complete
-}
-
-func withTime(t time.Time) callArg {
-	return func(c *Call) {
-		c.Time = t
-	}
-}
-
-func withDuration(d time.Duration) callArg {
-	return func(c *Call) {
-		c.Duration = d
-	}
-}
-
-func newCall(fn clockFunction, tags []string, args ...callArg) *Call {
-	c := &Call{
-		fn:       fn,
-		Tags:     tags,
-		complete: make(chan struct{}),
-	}
-	for _, a := range args {
-		a(c)
-	}
-	return c
-}
-
-type Trap struct {
-	fn    clockFunction
-	tags  []string
-	mock  *Mock
-	calls chan *Call
-	done  chan struct{}
-}
-
-func (t *Trap) catch(c *Call) {
-	select {
-	case t.calls <- c:
-	case <-t.done:
-		c.Release()
-	}
-}
-
-func (t *Trap) matches(c *Call) bool {
-	if t.fn != c.fn {
-		return false
-	}
-	for _, tag := range t.tags {
-		if !slices.Contains(c.Tags, tag) {
-			return false
-		}
-	}
-	return true
-}
-
-func (t *Trap) Close() {
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	for i, tr := range t.mock.traps {
-		if t == tr {
-			t.mock.traps = append(t.mock.traps[:i], t.mock.traps[i+1:]...)
-		}
-	}
-	close(t.done)
-}
-
-var ErrTrapClosed = errors.New("trap closed")
-
-func (t *Trap) Wait(ctx context.Context) (*Call, error) {
-	select {
-	case <-ctx.Done():
-		return nil, ctx.Err()
-	case <-t.done:
-		return nil, ErrTrapClosed
-	case c := <-t.calls:
-		return c, nil
-	}
-}
-
-// MustWait calls Wait() and then if there is an error, immediately fails the
-// test via tb.Fatalf()
-func (t *Trap) MustWait(ctx context.Context) *Call {
-	t.mock.tb.Helper()
-	c, err := t.Wait(ctx)
-	if err != nil {
-		t.mock.tb.Fatalf("context expired while waiting for trap: %s", err.Error())
-	}
-	return c
-}
diff --git a/vendor/github.com/coder/quartz/real.go b/vendor/github.com/coder/quartz/real.go
deleted file mode 100644
index f39fb1163e82..000000000000
--- a/vendor/github.com/coder/quartz/real.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package quartz
-
-import (
-	"context"
-	"time"
-)
-
-type realClock struct{}
-
-func NewReal() Clock {
-	return realClock{}
-}
-
-func (realClock) NewTicker(d time.Duration, _ ...string) *Ticker {
-	tkr := time.NewTicker(d)
-	return &Ticker{ticker: tkr, C: tkr.C}
-}
-
-func (realClock) TickerFunc(ctx context.Context, d time.Duration, f func() error, _ ...string) Waiter {
-	ct := &realContextTicker{
-		ctx: ctx,
-		tkr: time.NewTicker(d),
-		f:   f,
-		err: make(chan error, 1),
-	}
-	go ct.run()
-	return ct
-}
-
-type realContextTicker struct {
-	ctx context.Context
-	tkr *time.Ticker
-	f   func() error
-	err chan error
-}
-
-func (t *realContextTicker) Wait(_ ...string) error {
-	return <-t.err
-}
-
-func (t *realContextTicker) run() {
-	defer t.tkr.Stop()
-	for {
-		select {
-		case <-t.ctx.Done():
-			t.err <- t.ctx.Err()
-			return
-		case <-t.tkr.C:
-			err := t.f()
-			if err != nil {
-				t.err <- err
-				return
-			}
-		}
-	}
-}
-
-func (realClock) NewTimer(d time.Duration, _ ...string) *Timer {
-	rt := time.NewTimer(d)
-	return &Timer{C: rt.C, timer: rt}
-}
-
-func (realClock) AfterFunc(d time.Duration, f func(), _ ...string) *Timer {
-	rt := time.AfterFunc(d, f)
-	return &Timer{C: rt.C, timer: rt}
-}
-
-func (realClock) Now(_ ...string) time.Time {
-	return time.Now()
-}
-
-func (realClock) Since(t time.Time, _ ...string) time.Duration {
-	return time.Since(t)
-}
-
-func (realClock) Until(t time.Time, _ ...string) time.Duration {
-	return time.Until(t)
-}
-
-var _ Clock = realClock{}
diff --git a/vendor/github.com/coder/quartz/ticker.go b/vendor/github.com/coder/quartz/ticker.go
deleted file mode 100644
index 5506750d5d1f..000000000000
--- a/vendor/github.com/coder/quartz/ticker.go
+++ /dev/null
@@ -1,75 +0,0 @@
-package quartz
-
-import "time"
-
-// A Ticker holds a channel that delivers “ticks” of a clock at intervals.
-type Ticker struct {
-	C <-chan time.Time
-	//nolint: revive
-	c       chan time.Time
-	ticker  *time.Ticker  // realtime impl, if set
-	d       time.Duration // period, if set
-	nxt     time.Time     // next tick time
-	mock    *Mock         // mock clock, if set
-	stopped bool          // true if the ticker is not running
-}
-
-func (t *Ticker) fire(tt time.Time) {
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	if t.stopped {
-		return
-	}
-	for !t.nxt.After(t.mock.cur) {
-		t.nxt = t.nxt.Add(t.d)
-	}
-	t.mock.recomputeNextLocked()
-	select {
-	case t.c <- tt:
-	default:
-	}
-}
-
-func (t *Ticker) next() time.Time {
-	return t.nxt
-}
-
-// Stop turns off a ticker. After Stop, no more ticks will be sent. Stop does
-// not close the channel, to prevent a concurrent goroutine reading from the
-// channel from seeing an erroneous "tick".
-func (t *Ticker) Stop(tags ...string) {
-	if t.ticker != nil {
-		t.ticker.Stop()
-		return
-	}
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	c := newCall(clockFunctionTickerStop, tags)
-	t.mock.matchCallLocked(c)
-	defer close(c.complete)
-	t.mock.removeEventLocked(t)
-	t.stopped = true
-}
-
-// Reset stops a ticker and resets its period to the specified duration. The
-// next tick will arrive after the new period elapses. The duration d must be
-// greater than zero; if not, Reset will panic.
-func (t *Ticker) Reset(d time.Duration, tags ...string) {
-	if t.ticker != nil {
-		t.ticker.Reset(d)
-		return
-	}
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	c := newCall(clockFunctionTickerReset, tags, withDuration(d))
-	t.mock.matchCallLocked(c)
-	defer close(c.complete)
-	t.nxt = t.mock.cur.Add(d)
-	t.d = d
-	if t.stopped {
-		t.stopped = false
-		t.mock.addEventLocked(t)
-	} else {
-		t.mock.recomputeNextLocked()
-	}
-}
diff --git a/vendor/github.com/coder/quartz/timer.go b/vendor/github.com/coder/quartz/timer.go
deleted file mode 100644
index 8d928ac6f609..000000000000
--- a/vendor/github.com/coder/quartz/timer.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package quartz
-
-import "time"
-
-// The Timer type represents a single event. When the Timer expires, the current time will be sent
-// on C, unless the Timer was created by AfterFunc. A Timer must be created with NewTimer or
-// AfterFunc.
-type Timer struct {
-	C <-chan time.Time
-	//nolint: revive
-	c       chan time.Time
-	timer   *time.Timer // realtime impl, if set
-	nxt     time.Time   // next tick time
-	mock    *Mock       // mock clock, if set
-	fn      func()      // AfterFunc function, if set
-	stopped bool        // True if stopped, false if running
-}
-
-func (t *Timer) fire(tt time.Time) {
-	t.mock.removeTimer(t)
-	if t.fn != nil {
-		t.fn()
-	} else {
-		t.c <- tt
-	}
-}
-
-func (t *Timer) next() time.Time {
-	return t.nxt
-}
-
-// Stop prevents the Timer from firing. It returns true if the call stops the timer, false if the
-// timer has already expired or been stopped. Stop does not close the channel, to prevent a read
-// from the channel succeeding incorrectly.
-//
-// See https://pkg.go.dev/time#Timer.Stop for more information.
-func (t *Timer) Stop(tags ...string) bool {
-	if t.timer != nil {
-		return t.timer.Stop()
-	}
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	c := newCall(clockFunctionTimerStop, tags)
-	t.mock.matchCallLocked(c)
-	defer close(c.complete)
-	result := !t.stopped
-	t.mock.removeTimerLocked(t)
-	return result
-}
-
-// Reset changes the timer to expire after duration d. It returns true if the timer had been active,
-// false if the timer had expired or been stopped.
-//
-// See https://pkg.go.dev/time#Timer.Reset for more information.
-func (t *Timer) Reset(d time.Duration, tags ...string) bool {
-	if t.timer != nil {
-		return t.timer.Reset(d)
-	}
-	t.mock.mu.Lock()
-	defer t.mock.mu.Unlock()
-	c := newCall(clockFunctionTimerReset, tags, withDuration(d))
-	t.mock.matchCallLocked(c)
-	defer close(c.complete)
-	result := !t.stopped
-	select {
-	case <-t.c:
-	default:
-	}
-	if d <= 0 {
-		// zero or negative duration timer means we should immediately re-fire
-		// it, rather than remove and re-add it.
-		t.stopped = false
-		go t.fire(t.mock.cur)
-		return result
-	}
-	t.mock.removeTimerLocked(t)
-	t.stopped = false
-	t.nxt = t.mock.cur.Add(d)
-	t.mock.addEventLocked(t)
-	return result
-}
diff --git a/vendor/github.com/dlclark/regexp2/.travis.yml b/vendor/github.com/dlclark/regexp2/.travis.yml
index 2aa5ea1eefb7..a2da6be473c8 100644
--- a/vendor/github.com/dlclark/regexp2/.travis.yml
+++ b/vendor/github.com/dlclark/regexp2/.travis.yml
@@ -1,5 +1,7 @@
 language: go
-
+arch:
+  - AMD64
+  - ppc64le
 go:
   - 1.9
-  - tip
\ No newline at end of file
+  - tip
diff --git a/vendor/github.com/dlclark/regexp2/README.md b/vendor/github.com/dlclark/regexp2/README.md
index 4e4abb4c6f5e..f3d1bd92b63d 100644
--- a/vendor/github.com/dlclark/regexp2/README.md
+++ b/vendor/github.com/dlclark/regexp2/README.md
@@ -39,6 +39,24 @@ Group 0 is embedded in the Match.  Group 0 is an automatically-assigned group th
 
 The __last__ capture is embedded in each group, so `g.String()` will return the same thing as `g.Capture.String()` and  `g.Captures[len(g.Captures)-1].String()`.
 
+If you want to find multiple matches from a single input string you should use the `FindNextMatch` method.  For example, to implement a function similar to `regexp.FindAllString`:
+
+```go
+func regexp2FindAllString(re *regexp2.Regexp, s string) []string {
+	var matches []string
+	m, _ := re.FindStringMatch(s)
+	for m != nil {
+		matches = append(matches, m.String())
+		m, _ = re.FindNextMatch(m)
+	}
+	return matches
+}
+```
+
+`FindNextMatch` is optmized so that it re-uses the underlying string/rune slice.
+
+The internals of `regexp2` always operate on `[]rune` so `Index` and `Length` data in a `Match` always reference a position in `rune`s rather than `byte`s (even if the input was given as a string). This is a dramatic difference between `regexp` and `regexp2`.  It's advisable to use the provided `String()` methods to avoid having to work with indices.
+
 ## Compare `regexp` and `regexp2`
 | Category | regexp | regexp2 |
 | --- | --- | --- |
@@ -62,6 +80,8 @@ The default behavior of `regexp2` is to match the .NET regexp engine, however th
 * add support for named ascii character classes (e.g. `[[:foo:]]`)
 * add support for python-style capture groups (e.g. `(Pre)`)
 * change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24))
+* change the character classes `\d` `\s` and `\w` to match the same characters as RE2. NOTE: if you also use the `ECMAScript` option then this will change the `\s` character class to match ECMAScript instead of RE2.  ECMAScript allows more whitespace characters in `\s` than RE2 (but still fewer than the the default behavior).
+* allow character escape sequences to have defaults. For example, by default `\_` isn't a known character escape and will fail to compile, but in RE2 mode it will match the literal character `_`
  
 ```go
 re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
@@ -72,6 +92,70 @@ if isMatch, _ := re.MatchString(`Something to match`); isMatch {
 
 This feature is a work in progress and I'm open to ideas for more things to put here (maybe more relaxed character escaping rules?).
 
+## Catastrophic Backtracking and Timeouts
+
+`regexp2` supports features that can lead to catastrophic backtracking.
+`Regexp.MatchTimeout` can be set to to limit the impact of such behavior; the
+match will fail with an error after approximately MatchTimeout. No timeout
+checks are done by default.
+
+Timeout checking is not free. The current timeout checking implementation starts
+a background worker that updates a clock value approximately once every 100
+milliseconds. The matching code compares this value against the precomputed
+deadline for the match. The performance impact is as follows.
+
+1.  A match with a timeout runs almost as fast as a match without a timeout.
+2.  If any live matches have a timeout, there will be a background CPU load
+    (`~0.15%` currently on a modern machine). This load will remain constant
+    regardless of the number of matches done including matches done in parallel.
+3.  If no live matches are using a timeout, the background load will remain
+    until the longest deadline (match timeout + the time when the match started)
+    is reached. E.g., if you set a timeout of one minute the load will persist
+    for approximately a minute even if the match finishes quickly.
+
+See [PR #58](https://github.com/dlclark/regexp2/pull/58) for more details and 
+alternatives considered.
+
+## Goroutine leak error
+If you're using a library during unit tests (e.g. https://github.com/uber-go/goleak) that validates all goroutines are exited then you'll likely get an error if you or any of your dependencies use regex's with a MatchTimeout. 
+To remedy the problem you'll need to tell the unit test to wait until the backgroup timeout goroutine is exited.
+
+```go
+func TestSomething(t *testing.T) {
+    defer goleak.VerifyNone(t)
+    defer regexp2.StopTimeoutClock()
+
+    // ... test
+}
+
+//or
+
+func TestMain(m *testing.M) {
+    // setup
+    // ...
+
+    // run 
+    m.Run()
+
+    //tear down
+    regexp2.StopTimeoutClock()
+    goleak.VerifyNone(t)
+}
+```
+
+This will add ~100ms runtime to each test (or TestMain). If that's too much time you can set the clock cycle rate of the timeout goroutine in an init function in a test file. `regexp2.SetTimeoutCheckPeriod` isn't threadsafe so it must be setup before starting any regex's with Timeouts.
+
+```go
+func init() {
+	//speed up testing by making the timeout clock 1ms
+	regexp2.SetTimeoutCheckPeriod(time.Millisecond)
+}
+```
+
+## ECMAScript compatibility mode
+In this mode the engine provides compatibility with the [regex engine](https://tc39.es/ecma262/multipage/text-processing.html#sec-regexp-regular-expression-objects) described in the ECMAScript specification.
+
+Additionally a Unicode mode is provided which allows parsing of `\u{CodePoint}` syntax that is only when both are provided.
 
 ## Library features that I'm still working on
 - Regex split
diff --git a/vendor/github.com/dlclark/regexp2/fastclock.go b/vendor/github.com/dlclark/regexp2/fastclock.go
new file mode 100644
index 000000000000..caf2c9d882a8
--- /dev/null
+++ b/vendor/github.com/dlclark/regexp2/fastclock.go
@@ -0,0 +1,129 @@
+package regexp2
+
+import (
+	"sync"
+	"sync/atomic"
+	"time"
+)
+
+// fasttime holds a time value (ticks since clock initialization)
+type fasttime int64
+
+// fastclock provides a fast clock implementation.
+//
+// A background goroutine periodically stores the current time
+// into an atomic variable.
+//
+// A deadline can be quickly checked for expiration by comparing
+// its value to the clock stored in the atomic variable.
+//
+// The goroutine automatically stops once clockEnd is reached.
+// (clockEnd covers the largest deadline seen so far + some
+// extra time). This ensures that if regexp2 with timeouts
+// stops being used we will stop background work.
+type fastclock struct {
+	// instances of atomicTime must be at the start of the struct (or at least 64-bit aligned)
+	// otherwise 32-bit architectures will panic
+
+	current  atomicTime // Current time (approximate)
+	clockEnd atomicTime // When clock updater is supposed to stop (>= any existing deadline)
+
+	// current and clockEnd can be read via atomic loads.
+	// Reads and writes of other fields require mu to be held.
+	mu      sync.Mutex
+	start   time.Time // Time corresponding to fasttime(0)
+	running bool      // Is a clock updater running?
+}
+
+var fast fastclock
+
+// reached returns true if current time is at or past t.
+func (t fasttime) reached() bool {
+	return fast.current.read() >= t
+}
+
+// makeDeadline returns a time that is approximately time.Now().Add(d)
+func makeDeadline(d time.Duration) fasttime {
+	// Increase the deadline since the clock we are reading may be
+	// just about to tick forwards.
+	end := fast.current.read() + durationToTicks(d+clockPeriod)
+
+	// Start or extend clock if necessary.
+	if end > fast.clockEnd.read() {
+		extendClock(end)
+	}
+	return end
+}
+
+// extendClock ensures that clock is live and will run until at least end.
+func extendClock(end fasttime) {
+	fast.mu.Lock()
+	defer fast.mu.Unlock()
+
+	if fast.start.IsZero() {
+		fast.start = time.Now()
+	}
+
+	// Extend the running time to cover end as well as a bit of slop.
+	if shutdown := end + durationToTicks(time.Second); shutdown > fast.clockEnd.read() {
+		fast.clockEnd.write(shutdown)
+	}
+
+	// Start clock if necessary
+	if !fast.running {
+		fast.running = true
+		go runClock()
+	}
+}
+
+// stop the timeout clock in the background
+// should only used for unit tests to abandon the background goroutine
+func stopClock() {
+	fast.mu.Lock()
+	if fast.running {
+		fast.clockEnd.write(fasttime(0))
+	}
+	fast.mu.Unlock()
+
+	// pause until not running
+	// get and release the lock
+	isRunning := true
+	for isRunning {
+		time.Sleep(clockPeriod / 2)
+		fast.mu.Lock()
+		isRunning = fast.running
+		fast.mu.Unlock()
+	}
+}
+
+func durationToTicks(d time.Duration) fasttime {
+	// Downscale nanoseconds to approximately a millisecond so that we can avoid
+	// overflow even if the caller passes in math.MaxInt64.
+	return fasttime(d) >> 20
+}
+
+const DefaultClockPeriod = 100 * time.Millisecond
+
+// clockPeriod is the approximate interval between updates of approximateClock.
+var clockPeriod = DefaultClockPeriod
+
+func runClock() {
+	fast.mu.Lock()
+	defer fast.mu.Unlock()
+
+	for fast.current.read() <= fast.clockEnd.read() {
+		// Unlock while sleeping.
+		fast.mu.Unlock()
+		time.Sleep(clockPeriod)
+		fast.mu.Lock()
+
+		newTime := durationToTicks(time.Since(fast.start))
+		fast.current.write(newTime)
+	}
+	fast.running = false
+}
+
+type atomicTime struct{ v int64 } // Should change to atomic.Int64 when we can use go 1.19
+
+func (t *atomicTime) read() fasttime   { return fasttime(atomic.LoadInt64(&t.v)) }
+func (t *atomicTime) write(v fasttime) { atomic.StoreInt64(&t.v, int64(v)) }
diff --git a/vendor/github.com/dlclark/regexp2/regexp.go b/vendor/github.com/dlclark/regexp2/regexp.go
index 7c7b01d879a5..a7ddbaf35877 100644
--- a/vendor/github.com/dlclark/regexp2/regexp.go
+++ b/vendor/github.com/dlclark/regexp2/regexp.go
@@ -18,13 +18,21 @@ import (
 	"github.com/dlclark/regexp2/syntax"
 )
 
-// Default timeout used when running regexp matches -- "forever"
-var DefaultMatchTimeout = time.Duration(math.MaxInt64)
+var (
+	// DefaultMatchTimeout used when running regexp matches -- "forever"
+	DefaultMatchTimeout = time.Duration(math.MaxInt64)
+	// DefaultUnmarshalOptions used when unmarshaling a regex from text
+	DefaultUnmarshalOptions = None
+)
 
 // Regexp is the representation of a compiled regular expression.
 // A Regexp is safe for concurrent use by multiple goroutines.
 type Regexp struct {
-	//timeout when trying to find matches
+	// A match will time out if it takes (approximately) more than
+	// MatchTimeout. This is a safety check in case the match
+	// encounters catastrophic backtracking.  The default value
+	// (DefaultMatchTimeout) causes all time out checking to be
+	// suppressed.
 	MatchTimeout time.Duration
 
 	// read-only after Compile
@@ -39,7 +47,7 @@ type Regexp struct {
 	code *syntax.Code // compiled program
 
 	// cache of machines for running regexp
-	muRun  sync.Mutex
+	muRun  *sync.Mutex
 	runner []*runner
 }
 
@@ -68,6 +76,7 @@ func Compile(expr string, opt RegexOptions) (*Regexp, error) {
 		capsize:      code.Capsize,
 		code:         code,
 		MatchTimeout: DefaultMatchTimeout,
+		muRun:        &sync.Mutex{},
 	}, nil
 }
 
@@ -92,6 +101,19 @@ func Unescape(input string) (string, error) {
 	return syntax.Unescape(input)
 }
 
+// SetTimeoutPeriod is a debug function that sets the frequency of the timeout goroutine's sleep cycle.
+// Defaults to 100ms. The only benefit of setting this lower is that the 1 background goroutine that manages
+// timeouts may exit slightly sooner after all the timeouts have expired. See Github issue #63
+func SetTimeoutCheckPeriod(d time.Duration) {
+	clockPeriod = d
+}
+
+// StopTimeoutClock should only be used in unit tests to prevent the timeout clock goroutine
+// from appearing like a leaking goroutine
+func StopTimeoutClock() {
+	stopClock()
+}
+
 // String returns the source text used to compile the regular expression.
 func (re *Regexp) String() string {
 	return re.pattern
@@ -121,6 +143,7 @@ const (
 	Debug                                = 0x0080 // "d"
 	ECMAScript                           = 0x0100 // "e"
 	RE2                                  = 0x0200 // RE2 (regexp package) compatibility mode
+	Unicode                              = 0x0400 // "u"
 )
 
 func (re *Regexp) RightToLeft() bool {
@@ -353,3 +376,20 @@ func (re *Regexp) GroupNumberFromName(name string) int {
 
 	return -1
 }
+
+// MarshalText implements [encoding.TextMarshaler]. The output
+// matches that of calling the [Regexp.String] method.
+func (re *Regexp) MarshalText() ([]byte, error) {
+	return []byte(re.String()), nil
+}
+
+// UnmarshalText implements [encoding.TextUnmarshaler] by calling
+// [Compile] on the encoded value.
+func (re *Regexp) UnmarshalText(text []byte) error {
+	newRE, err := Compile(string(text), DefaultUnmarshalOptions)
+	if err != nil {
+		return err
+	}
+	*re = *newRE
+	return nil
+}
diff --git a/vendor/github.com/dlclark/regexp2/runner.go b/vendor/github.com/dlclark/regexp2/runner.go
index 4d7f9b061124..494dcef9f382 100644
--- a/vendor/github.com/dlclark/regexp2/runner.go
+++ b/vendor/github.com/dlclark/regexp2/runner.go
@@ -58,10 +58,9 @@ type runner struct {
 
 	runmatch *Match // result object
 
-	ignoreTimeout       bool
-	timeout             time.Duration // timeout in milliseconds (needed for actual)
-	timeoutChecksToSkip int
-	timeoutAt           time.Time
+	ignoreTimeout bool
+	timeout       time.Duration // timeout in milliseconds (needed for actual)
+	deadline      fasttime
 
 	operator        syntax.InstOp
 	codepos         int
@@ -1551,39 +1550,15 @@ func (r *runner) isECMABoundary(index, startpos, endpos int) bool {
 		(index < endpos && syntax.IsECMAWordChar(r.runtext[index]))
 }
 
-// this seems like a comment to justify randomly picking 1000 :-P
-// We have determined this value in a series of experiments where x86 retail
-// builds (ono-lab-optimized) were run on different pattern/input pairs. Larger values
-// of TimeoutCheckFrequency did not tend to increase performance; smaller values
-// of TimeoutCheckFrequency tended to slow down the execution.
-const timeoutCheckFrequency int = 1000
-
 func (r *runner) startTimeoutWatch() {
 	if r.ignoreTimeout {
 		return
 	}
-
-	r.timeoutChecksToSkip = timeoutCheckFrequency
-	r.timeoutAt = time.Now().Add(r.timeout)
+	r.deadline = makeDeadline(r.timeout)
 }
 
 func (r *runner) checkTimeout() error {
-	if r.ignoreTimeout {
-		return nil
-	}
-	r.timeoutChecksToSkip--
-	if r.timeoutChecksToSkip != 0 {
-		return nil
-	}
-
-	r.timeoutChecksToSkip = timeoutCheckFrequency
-	return r.doCheckTimeout()
-}
-
-func (r *runner) doCheckTimeout() error {
-	current := time.Now()
-
-	if current.Before(r.timeoutAt) {
+	if r.ignoreTimeout || !r.deadline.reached() {
 		return nil
 	}
 
diff --git a/vendor/github.com/dlclark/regexp2/syntax/charclass.go b/vendor/github.com/dlclark/regexp2/syntax/charclass.go
index 53974d10131a..6881a0e29774 100644
--- a/vendor/github.com/dlclark/regexp2/syntax/charclass.go
+++ b/vendor/github.com/dlclark/regexp2/syntax/charclass.go
@@ -37,6 +37,8 @@ var (
 	ecmaSpace = []rune{0x0009, 0x000e, 0x0020, 0x0021, 0x00a0, 0x00a1, 0x1680, 0x1681, 0x2000, 0x200b, 0x2028, 0x202a, 0x202f, 0x2030, 0x205f, 0x2060, 0x3000, 0x3001, 0xfeff, 0xff00}
 	ecmaWord  = []rune{0x0030, 0x003a, 0x0041, 0x005b, 0x005f, 0x0060, 0x0061, 0x007b}
 	ecmaDigit = []rune{0x0030, 0x003a}
+
+	re2Space = []rune{0x0009, 0x000b, 0x000c, 0x000e, 0x0020, 0x0021}
 )
 
 var (
@@ -56,6 +58,9 @@ var (
 	NotSpaceClass = getCharSetFromCategoryString(true, false, spaceCategoryText)
 	DigitClass    = getCharSetFromCategoryString(false, false, "Nd")
 	NotDigitClass = getCharSetFromCategoryString(false, true, "Nd")
+
+	RE2SpaceClass    = getCharSetFromOldString(re2Space, false)
+	NotRE2SpaceClass = getCharSetFromOldString(re2Space, true)
 )
 
 var unicodeCategories = func() map[string]*unicode.RangeTable {
@@ -401,13 +406,19 @@ func (c *CharSet) addChar(ch rune) {
 	c.addRange(ch, ch)
 }
 
-func (c *CharSet) addSpace(ecma, negate bool) {
+func (c *CharSet) addSpace(ecma, re2, negate bool) {
 	if ecma {
 		if negate {
 			c.addRanges(NotECMASpaceClass().ranges)
 		} else {
 			c.addRanges(ECMASpaceClass().ranges)
 		}
+	} else if re2 {
+		if negate {
+			c.addRanges(NotRE2SpaceClass().ranges)
+		} else {
+			c.addRanges(RE2SpaceClass().ranges)
+		}
 	} else {
 		c.addCategories(category{cat: spaceCategoryText, negate: negate})
 	}
@@ -563,7 +574,7 @@ func (c *CharSet) addNamedASCII(name string, negate bool) bool {
 	case "punct": //[!-/:-@[-`{-~]
 		rs = []singleRange{singleRange{'!', '/'}, singleRange{':', '@'}, singleRange{'[', '`'}, singleRange{'{', '~'}}
 	case "space":
-		c.addSpace(true, negate)
+		c.addSpace(true, false, negate)
 	case "upper":
 		rs = []singleRange{singleRange{'A', 'Z'}}
 	case "word":
diff --git a/vendor/github.com/dlclark/regexp2/syntax/parser.go b/vendor/github.com/dlclark/regexp2/syntax/parser.go
index da14f98e3814..b6c3670c036b 100644
--- a/vendor/github.com/dlclark/regexp2/syntax/parser.go
+++ b/vendor/github.com/dlclark/regexp2/syntax/parser.go
@@ -22,6 +22,7 @@ const (
 	Debug                                = 0x0080 // "d"
 	ECMAScript                           = 0x0100 // "e"
 	RE2                                  = 0x0200 // RE2 compat mode
+	Unicode                              = 0x0400 // "u"
 )
 
 func optionFromCode(ch rune) RegexOptions {
@@ -43,6 +44,8 @@ func optionFromCode(ch rune) RegexOptions {
 		return Debug
 	case 'e', 'E':
 		return ECMAScript
+	case 'u', 'U':
+		return Unicode
 	default:
 		return 0
 	}
@@ -104,7 +107,7 @@ const (
 	ErrBadClassInCharRange        = "cannot include class \\%v in character range"
 	ErrUnterminatedBracket        = "unterminated [] set"
 	ErrSubtractionMustBeLast      = "a subtraction must be the last element in a character class"
-	ErrReversedCharRange          = "[x-y] range in reverse order"
+	ErrReversedCharRange          = "[%c-%c] range in reverse order"
 )
 
 func (e ErrorCode) String() string {
@@ -1121,14 +1124,14 @@ func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) {
 
 	case 'w':
 		p.moveRight(1)
-		if p.useOptionE() {
+		if p.useOptionE() || p.useRE2() {
 			return newRegexNodeSet(ntSet, p.options, ECMAWordClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, WordClass()), nil
 
 	case 'W':
 		p.moveRight(1)
-		if p.useOptionE() {
+		if p.useOptionE() || p.useRE2() {
 			return newRegexNodeSet(ntSet, p.options, NotECMAWordClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, NotWordClass()), nil
@@ -1137,6 +1140,8 @@ func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) {
 		p.moveRight(1)
 		if p.useOptionE() {
 			return newRegexNodeSet(ntSet, p.options, ECMASpaceClass()), nil
+		} else if p.useRE2() {
+			return newRegexNodeSet(ntSet, p.options, RE2SpaceClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, SpaceClass()), nil
 
@@ -1144,19 +1149,21 @@ func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) {
 		p.moveRight(1)
 		if p.useOptionE() {
 			return newRegexNodeSet(ntSet, p.options, NotECMASpaceClass()), nil
+		} else if p.useRE2() {
+			return newRegexNodeSet(ntSet, p.options, NotRE2SpaceClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, NotSpaceClass()), nil
 
 	case 'd':
 		p.moveRight(1)
-		if p.useOptionE() {
+		if p.useOptionE() || p.useRE2() {
 			return newRegexNodeSet(ntSet, p.options, ECMADigitClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, DigitClass()), nil
 
 	case 'D':
 		p.moveRight(1)
-		if p.useOptionE() {
+		if p.useOptionE() || p.useRE2() {
 			return newRegexNodeSet(ntSet, p.options, NotECMADigitClass()), nil
 		}
 		return newRegexNodeSet(ntSet, p.options, NotDigitClass()), nil
@@ -1186,19 +1193,24 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
 		return nil, p.getErr(ErrIllegalEndEscape)
 	}
 	angled := false
+	k := false
 	close := '\x00'
 
 	backpos := p.textpos()
 	ch := p.rightChar(0)
 
-	// allow \k instead of \, which is now deprecated
+	// Allow \k instead of \, which is now deprecated.
 
-	if ch == 'k' {
+	// According to ECMAScript specification, \k is only parsed as a named group reference if
+	// there is at least one group name in the regexp.
+	// See https://www.ecma-international.org/ecma-262/#sec-isvalidregularexpressionliteral, step 7.
+	// Note, during the first (scanOnly) run we may not have all group names scanned, but that's ok.
+	if ch == 'k' && (!p.useOptionE() || len(p.capnames) > 0) {
 		if p.charsRight() >= 2 {
 			p.moveRight(1)
 			ch = p.moveRightGetChar()
 
-			if ch == '<' || ch == '\'' {
+			if ch == '<' || (!p.useOptionE() && ch == '\'') { // No support for \k'name' in ECMAScript
 				angled = true
 				if ch == '\'' {
 					close = '\''
@@ -1213,8 +1225,9 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
 		}
 
 		ch = p.rightChar(0)
+		k = true
 
-	} else if (ch == '<' || ch == '\'') && p.charsRight() > 1 { // Note angle without \g
+	} else if !p.useOptionE() && (ch == '<' || ch == '\'') && p.charsRight() > 1 { // Note angle without \g
 		angled = true
 		if ch == '\'' {
 			close = '\''
@@ -1257,14 +1270,23 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
 			return nil, p.getErr(ErrUndefinedBackRef, capnum)
 		}
 
-	} else if angled && IsWordChar(ch) {
+	} else if angled {
 		capname := p.scanCapname()
 
-		if p.charsRight() > 0 && p.moveRightGetChar() == close {
+		if capname != "" && p.charsRight() > 0 && p.moveRightGetChar() == close {
+
+			if scanOnly {
+				return nil, nil
+			}
+
 			if p.isCaptureName(capname) {
 				return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil
 			}
 			return nil, p.getErr(ErrUndefinedNameRef, capname)
+		} else {
+			if k {
+				return nil, p.getErr(ErrMalformedNameRef)
+			}
 		}
 	}
 
@@ -1276,6 +1298,10 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
 		return nil, err
 	}
 
+	if scanOnly {
+		return nil, nil
+	}
+
 	if p.useOptionI() {
 		ch = unicode.ToLower(ch)
 	}
@@ -1285,6 +1311,17 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
 
 // Scans X for \p{X} or \P{X}
 func (p *parser) parseProperty() (string, error) {
+	// RE2 and PCRE supports \pX syntax (no {} and only 1 letter unicode cats supported)
+	// since this is purely additive syntax it's not behind a flag
+	if p.charsRight() >= 1 && p.rightChar(0) != '{' {
+		ch := string(p.moveRightGetChar())
+		// check if it's a valid cat
+		if !isValidUnicodeCat(ch) {
+			return "", p.getErr(ErrUnknownSlashP, ch)
+		}
+		return ch, nil
+	}
+
 	if p.charsRight() < 3 {
 		return "", p.getErr(ErrIncompleteSlashP)
 	}
@@ -1401,7 +1438,7 @@ func (p *parser) scanCapname() string {
 	return string(p.pattern[startpos:p.textpos()])
 }
 
-//Scans contents of [] (not including []'s), and converts to a set.
+// Scans contents of [] (not including []'s), and converts to a set.
 func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
 	ch := '\x00'
 	chPrev := '\x00'
@@ -1443,7 +1480,7 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
 					if inRange {
 						return nil, p.getErr(ErrBadClassInCharRange, ch)
 					}
-					cc.addDigit(p.useOptionE(), ch == 'D', p.patternRaw)
+					cc.addDigit(p.useOptionE() || p.useRE2(), ch == 'D', p.patternRaw)
 				}
 				continue
 
@@ -1452,7 +1489,7 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
 					if inRange {
 						return nil, p.getErr(ErrBadClassInCharRange, ch)
 					}
-					cc.addSpace(p.useOptionE(), ch == 'S')
+					cc.addSpace(p.useOptionE(), p.useRE2(), ch == 'S')
 				}
 				continue
 
@@ -1462,7 +1499,7 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
 						return nil, p.getErr(ErrBadClassInCharRange, ch)
 					}
 
-					cc.addWord(p.useOptionE(), ch == 'W')
+					cc.addWord(p.useOptionE() || p.useRE2(), ch == 'W')
 				}
 				continue
 
@@ -1548,7 +1585,7 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
 				} else {
 					// a regular range, like a-z
 					if chPrev > ch {
-						return nil, p.getErr(ErrReversedCharRange)
+						return nil, p.getErr(ErrReversedCharRange, chPrev, ch)
 					}
 					cc.addRange(chPrev, ch)
 				}
@@ -1672,7 +1709,13 @@ func (p *parser) scanCharEscape() (r rune, err error) {
 			r, err = p.scanHex(2)
 		}
 	case 'u':
-		r, err = p.scanHex(4)
+		// ECMAscript suppot \u{HEX} only if `u` is also set
+		if p.useOptionE() && p.useOptionU() && p.charsRight() > 0 && p.rightChar(0) == '{' {
+			p.moveRight(1)
+			return p.scanHexUntilBrace()
+		} else {
+			r, err = p.scanHex(4)
+		}
 	case 'a':
 		return '\u0007', nil
 	case 'b':
@@ -1692,7 +1735,7 @@ func (p *parser) scanCharEscape() (r rune, err error) {
 	case 'c':
 		r, err = p.scanControl()
 	default:
-		if !p.useOptionE() && IsWordChar(ch) {
+		if !p.useOptionE() && !p.useRE2() && IsWordChar(ch) {
 			return 0, p.getErr(ErrUnrecognizedEscape, string(ch))
 		}
 		return ch, nil
@@ -1949,6 +1992,11 @@ func (p *parser) useRE2() bool {
 	return (p.options & RE2) != 0
 }
 
+// True if U option enabling ECMAScript's Unicode behavior on.
+func (p *parser) useOptionU() bool {
+	return (p.options & Unicode) != 0
+}
+
 // True if options stack is empty.
 func (p *parser) emptyOptionsStack() bool {
 	return len(p.optionsStack) == 0
@@ -2044,7 +2092,8 @@ func (p *parser) addToConcatenate(pos, cch int, isReplacement bool) {
 	}
 
 	if cch > 1 {
-		str := p.pattern[pos : pos+cch]
+		str := make([]rune, cch)
+		copy(str, p.pattern[pos:pos+cch])
 
 		if p.useOptionI() && !isReplacement {
 			// We do the ToLower character by character for consistency.  With surrogate chars, doing
diff --git a/vendor/github.com/dlclark/regexp2/syntax/prefix.go b/vendor/github.com/dlclark/regexp2/syntax/prefix.go
index 011ef0b41683..f67168862932 100644
--- a/vendor/github.com/dlclark/regexp2/syntax/prefix.go
+++ b/vendor/github.com/dlclark/regexp2/syntax/prefix.go
@@ -712,7 +712,7 @@ func (b *BmPrefix) Scan(text []rune, index, beglimit, endlimit int) int {
 
 				if chTest != b.pattern[match] {
 					advance = b.positive[match]
-					if (chTest & 0xFF80) == 0 {
+					if chTest < 128 {
 						test2 = (match - startmatch) + b.negativeASCII[chTest]
 					} else if chTest < 0xffff && len(b.negativeUnicode) > 0 {
 						unicodeLookup = b.negativeUnicode[chTest>>8]
diff --git a/vendor/github.com/go-redis/redis/v8/.gitignore b/vendor/github.com/go-redis/redis/v8/.gitignore
deleted file mode 100644
index b975a7b4c326..000000000000
--- a/vendor/github.com/go-redis/redis/v8/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-*.rdb
-testdata/*/
-.idea/
diff --git a/vendor/github.com/go-redis/redis/v8/CHANGELOG.md b/vendor/github.com/go-redis/redis/v8/CHANGELOG.md
deleted file mode 100644
index 195e51933866..000000000000
--- a/vendor/github.com/go-redis/redis/v8/CHANGELOG.md
+++ /dev/null
@@ -1,177 +0,0 @@
-## [8.11.5](https://github.com/go-redis/redis/compare/v8.11.4...v8.11.5) (2022-03-17)
-
-
-### Bug Fixes
-
-* add missing Expire methods to Cmdable ([17e3b43](https://github.com/go-redis/redis/commit/17e3b43879d516437ada71cf9c0deac6a382ed9a))
-* add whitespace for avoid unlikely colisions ([7f7c181](https://github.com/go-redis/redis/commit/7f7c1817617cfec909efb13d14ad22ef05a6ad4c))
-* example/otel compile error ([#2028](https://github.com/go-redis/redis/issues/2028)) ([187c07c](https://github.com/go-redis/redis/commit/187c07c41bf68dc3ab280bc3a925e960bbef6475))
-* **extra/redisotel:** set span.kind attribute to client ([065b200](https://github.com/go-redis/redis/commit/065b200070b41e6e949710b4f9e01b50ccc60ab2))
-* format ([96f53a0](https://github.com/go-redis/redis/commit/96f53a0159a28affa94beec1543a62234e7f8b32))
-* invalid type assert in stringArg ([de6c131](https://github.com/go-redis/redis/commit/de6c131865b8263400c8491777b295035f2408e4))
-* rename Golang to Go ([#2030](https://github.com/go-redis/redis/issues/2030)) ([b82a2d9](https://github.com/go-redis/redis/commit/b82a2d9d4d2de7b7cbe8fcd4895be62dbcacacbc))
-* set timeout for WAIT command. Fixes [#1963](https://github.com/go-redis/redis/issues/1963) ([333fee1](https://github.com/go-redis/redis/commit/333fee1a8fd98a2fbff1ab187c1b03246a7eb01f))
-* update some argument counts in pre-allocs ([f6974eb](https://github.com/go-redis/redis/commit/f6974ebb5c40a8adf90d2cacab6dc297f4eba4c2))
-
-
-### Features
-
-* Add redis v7's NX, XX, GT, LT expire variants ([e19bbb2](https://github.com/go-redis/redis/commit/e19bbb26e2e395c6e077b48d80d79e99f729a8b8))
-* add support for acl sentinel auth in universal client ([ab0ccc4](https://github.com/go-redis/redis/commit/ab0ccc47413f9b2a6eabc852fed5005a3ee1af6e))
-* add support for COPY command ([#2016](https://github.com/go-redis/redis/issues/2016)) ([730afbc](https://github.com/go-redis/redis/commit/730afbcffb93760e8a36cc06cfe55ab102b693a7))
-* add support for passing extra attributes added to spans ([39faaa1](https://github.com/go-redis/redis/commit/39faaa171523834ba527c9789710c4fde87f5a2e))
-* add support for time.Duration write and scan ([2f1b74e](https://github.com/go-redis/redis/commit/2f1b74e20cdd7719b2aecf0768d3e3ae7c3e781b))
-* **redisotel:** ability to override TracerProvider ([#1998](https://github.com/go-redis/redis/issues/1998)) ([bf8d4aa](https://github.com/go-redis/redis/commit/bf8d4aa60c00366cda2e98c3ddddc8cf68507417))
-* set net.peer.name and net.peer.port in otel example ([69bf454](https://github.com/go-redis/redis/commit/69bf454f706204211cd34835f76b2e8192d3766d))
-
-
-
-## [8.11.4](https://github.com/go-redis/redis/compare/v8.11.3...v8.11.4) (2021-10-04)
-
-
-### Features
-
-* add acl auth support for sentinels ([f66582f](https://github.com/go-redis/redis/commit/f66582f44f3dc3a4705a5260f982043fde4aa634))
-* add Cmd.{String,Int,Float,Bool}Slice helpers and an example ([5d3d293](https://github.com/go-redis/redis/commit/5d3d293cc9c60b90871e2420602001463708ce24))
-* add SetVal method for each command ([168981d](https://github.com/go-redis/redis/commit/168981da2d84ee9e07d15d3e74d738c162e264c4))
-
-
-
-## v8.11
-
-- Remove OpenTelemetry metrics.
-- Supports more redis commands and options.
-
-## v8.10
-
-- Removed extra OpenTelemetry spans from go-redis core. Now go-redis instrumentation only adds a
-  single span with a Redis command (instead of 4 spans). There are multiple reasons behind this
-  decision:
-
-  - Traces become smaller and less noisy.
-  - It may be costly to process those 3 extra spans for each query.
-  - go-redis no longer depends on OpenTelemetry.
-
-  Eventually we hope to replace the information that we no longer collect with OpenTelemetry
-  Metrics.
-
-## v8.9
-
-- Changed `PubSub.Channel` to only rely on `Ping` result. You can now use `WithChannelSize`,
-  `WithChannelHealthCheckInterval`, and `WithChannelSendTimeout` to override default settings.
-
-## v8.8
-
-- To make updating easier, extra modules now have the same version as go-redis does. That means that
-  you need to update your imports:
-
-```
-github.com/go-redis/redis/extra/redisotel -> github.com/go-redis/redis/extra/redisotel/v8
-github.com/go-redis/redis/extra/rediscensus -> github.com/go-redis/redis/extra/rediscensus/v8
-```
-
-## v8.5
-
-- [knadh](https://github.com/knadh) contributed long-awaited ability to scan Redis Hash into a
-  struct:
-
-```go
-err := rdb.HGetAll(ctx, "hash").Scan(&data)
-
-err := rdb.MGet(ctx, "key1", "key2").Scan(&data)
-```
-
-- Please check [redismock](https://github.com/go-redis/redismock) by
-  [monkey92t](https://github.com/monkey92t) if you are looking for mocking Redis Client.
-
-## v8
-
-- All commands require `context.Context` as a first argument, e.g. `rdb.Ping(ctx)`. If you are not
-  using `context.Context` yet, the simplest option is to define global package variable
-  `var ctx = context.TODO()` and use it when `ctx` is required.
-
-- Full support for `context.Context` canceling.
-
-- Added `redis.NewFailoverClusterClient` that supports routing read-only commands to a slave node.
-
-- Added `redisext.OpenTemetryHook` that adds
-  [Redis OpenTelemetry instrumentation](https://redis.uptrace.dev/tracing/).
-
-- Redis slow log support.
-
-- Ring uses Rendezvous Hashing by default which provides better distribution. You need to move
-  existing keys to a new location or keys will be inaccessible / lost. To use old hashing scheme:
-
-```go
-import "github.com/golang/groupcache/consistenthash"
-
-ring := redis.NewRing(&redis.RingOptions{
-    NewConsistentHash: func() {
-        return consistenthash.New(100, crc32.ChecksumIEEE)
-    },
-})
-```
-
-- `ClusterOptions.MaxRedirects` default value is changed from 8 to 3.
-- `Options.MaxRetries` default value is changed from 0 to 3.
-
-- `Cluster.ForEachNode` is renamed to `ForEachShard` for consistency with `Ring`.
-
-## v7.3
-
-- New option `Options.Username` which causes client to use `AuthACL`. Be aware if your connection
-  URL contains username.
-
-## v7.2
-
-- Existing `HMSet` is renamed to `HSet` and old deprecated `HMSet` is restored for Redis 3 users.
-
-## v7.1
-
-- Existing `Cmd.String` is renamed to `Cmd.Text`. New `Cmd.String` implements `fmt.Stringer`
-  interface.
-
-## v7
-
-- _Important_. Tx.Pipeline now returns a non-transactional pipeline. Use Tx.TxPipeline for a
-  transactional pipeline.
-- WrapProcess is replaced with more convenient AddHook that has access to context.Context.
-- WithContext now can not be used to create a shallow copy of the client.
-- New methods ProcessContext, DoContext, and ExecContext.
-- Client respects Context.Deadline when setting net.Conn deadline.
-- Client listens on Context.Done while waiting for a connection from the pool and returns an error
-  when context context is cancelled.
-- Add PubSub.ChannelWithSubscriptions that sends `*Subscription` in addition to `*Message` to allow
-  detecting reconnections.
-- `time.Time` is now marshalled in RFC3339 format. `rdb.Get("foo").Time()` helper is added to parse
-  the time.
-- `SetLimiter` is removed and added `Options.Limiter` instead.
-- `HMSet` is deprecated as of Redis v4.
-
-## v6.15
-
-- Cluster and Ring pipelines process commands for each node in its own goroutine.
-
-## 6.14
-
-- Added Options.MinIdleConns.
-- Added Options.MaxConnAge.
-- PoolStats.FreeConns is renamed to PoolStats.IdleConns.
-- Add Client.Do to simplify creating custom commands.
-- Add Cmd.String, Cmd.Int, Cmd.Int64, Cmd.Uint64, Cmd.Float64, and Cmd.Bool helpers.
-- Lower memory usage.
-
-## v6.13
-
-- Ring got new options called `HashReplicas` and `Hash`. It is recommended to set
-  `HashReplicas = 1000` for better keys distribution between shards.
-- Cluster client was optimized to use much less memory when reloading cluster state.
-- PubSub.ReceiveMessage is re-worked to not use ReceiveTimeout so it does not lose data when timeout
-  occurres. In most cases it is recommended to use PubSub.Channel instead.
-- Dialer.KeepAlive is set to 5 minutes by default.
-
-## v6.12
-
-- ClusterClient got new option called `ClusterSlots` which allows to build cluster of normal Redis
-  Servers that don't have cluster mode enabled. See
-  https://godoc.org/github.com/go-redis/redis#example-NewClusterClient--ManualSetup
diff --git a/vendor/github.com/go-redis/redis/v8/Makefile b/vendor/github.com/go-redis/redis/v8/Makefile
deleted file mode 100644
index a4cfe0576e68..000000000000
--- a/vendor/github.com/go-redis/redis/v8/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-PACKAGE_DIRS := $(shell find . -mindepth 2 -type f -name 'go.mod' -exec dirname {} \; | sort)
-
-test: testdeps
-	go test ./...
-	go test ./... -short -race
-	go test ./... -run=NONE -bench=. -benchmem
-	env GOOS=linux GOARCH=386 go test ./...
-	go vet
-
-testdeps: testdata/redis/src/redis-server
-
-bench: testdeps
-	go test ./... -test.run=NONE -test.bench=. -test.benchmem
-
-.PHONY: all test testdeps bench
-
-testdata/redis:
-	mkdir -p $@
-	wget -qO- https://download.redis.io/releases/redis-6.2.5.tar.gz | tar xvz --strip-components=1 -C $@
-
-testdata/redis/src/redis-server: testdata/redis
-	cd $< && make all
-
-fmt:
-	gofmt -w -s ./
-	goimports -w  -local github.com/go-redis/redis ./
-
-go_mod_tidy:
-	go get -u && go mod tidy
-	set -e; for dir in $(PACKAGE_DIRS); do \
-	  echo "go mod tidy in $${dir}"; \
-	  (cd "$${dir}" && \
-	    go get -u && \
-	    go mod tidy); \
-	done
diff --git a/vendor/github.com/go-redis/redis/v8/README.md b/vendor/github.com/go-redis/redis/v8/README.md
deleted file mode 100644
index f3b6a018cb54..000000000000
--- a/vendor/github.com/go-redis/redis/v8/README.md
+++ /dev/null
@@ -1,175 +0,0 @@
-# Redis client for Go
-
-![build workflow](https://github.com/go-redis/redis/actions/workflows/build.yml/badge.svg)
-[![PkgGoDev](https://pkg.go.dev/badge/github.com/go-redis/redis/v8)](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc)
-[![Documentation](https://img.shields.io/badge/redis-documentation-informational)](https://redis.uptrace.dev/)
-
-go-redis is brought to you by :star: [**uptrace/uptrace**](https://github.com/uptrace/uptrace).
-Uptrace is an open source and blazingly fast **distributed tracing** backend powered by
-OpenTelemetry and ClickHouse. Give it a star as well!
-
-## Resources
-
-- [Discussions](https://github.com/go-redis/redis/discussions)
-- [Documentation](https://redis.uptrace.dev)
-- [Reference](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc)
-- [Examples](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#pkg-examples)
-- [RealWorld example app](https://github.com/uptrace/go-treemux-realworld-example-app)
-
-Other projects you may like:
-
-- [Bun](https://bun.uptrace.dev) - fast and simple SQL client for PostgreSQL, MySQL, and SQLite.
-- [BunRouter](https://bunrouter.uptrace.dev/) - fast and flexible HTTP router for Go.
-
-## Ecosystem
-
-- [Redis Mock](https://github.com/go-redis/redismock)
-- [Distributed Locks](https://github.com/bsm/redislock)
-- [Redis Cache](https://github.com/go-redis/cache)
-- [Rate limiting](https://github.com/go-redis/redis_rate)
-
-## Features
-
-- Redis 3 commands except QUIT, MONITOR, and SYNC.
-- Automatic connection pooling with
-  [circuit breaker](https://en.wikipedia.org/wiki/Circuit_breaker_design_pattern) support.
-- [Pub/Sub](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#PubSub).
-- [Transactions](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client-TxPipeline).
-- [Pipeline](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client.Pipeline) and
-  [TxPipeline](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-Client.TxPipeline).
-- [Scripting](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#Script).
-- [Timeouts](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#Options).
-- [Redis Sentinel](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewFailoverClient).
-- [Redis Cluster](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewClusterClient).
-- [Cluster of Redis Servers](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-NewClusterClient-ManualSetup)
-  without using cluster mode and Redis Sentinel.
-- [Ring](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#NewRing).
-- [Instrumentation](https://pkg.go.dev/github.com/go-redis/redis/v8?tab=doc#example-package-Instrumentation).
-
-## Installation
-
-go-redis supports 2 last Go versions and requires a Go version with
-[modules](https://github.com/golang/go/wiki/Modules) support. So make sure to initialize a Go
-module:
-
-```shell
-go mod init github.com/my/repo
-```
-
-And then install go-redis/v8 (note _v8_ in the import; omitting it is a popular mistake):
-
-```shell
-go get github.com/go-redis/redis/v8
-```
-
-## Quickstart
-
-```go
-import (
-    "context"
-    "github.com/go-redis/redis/v8"
-    "fmt"
-)
-
-var ctx = context.Background()
-
-func ExampleClient() {
-    rdb := redis.NewClient(&redis.Options{
-        Addr:     "localhost:6379",
-        Password: "", // no password set
-        DB:       0,  // use default DB
-    })
-
-    err := rdb.Set(ctx, "key", "value", 0).Err()
-    if err != nil {
-        panic(err)
-    }
-
-    val, err := rdb.Get(ctx, "key").Result()
-    if err != nil {
-        panic(err)
-    }
-    fmt.Println("key", val)
-
-    val2, err := rdb.Get(ctx, "key2").Result()
-    if err == redis.Nil {
-        fmt.Println("key2 does not exist")
-    } else if err != nil {
-        panic(err)
-    } else {
-        fmt.Println("key2", val2)
-    }
-    // Output: key value
-    // key2 does not exist
-}
-```
-
-## Look and feel
-
-Some corner cases:
-
-```go
-// SET key value EX 10 NX
-set, err := rdb.SetNX(ctx, "key", "value", 10*time.Second).Result()
-
-// SET key value keepttl NX
-set, err := rdb.SetNX(ctx, "key", "value", redis.KeepTTL).Result()
-
-// SORT list LIMIT 0 2 ASC
-vals, err := rdb.Sort(ctx, "list", &redis.Sort{Offset: 0, Count: 2, Order: "ASC"}).Result()
-
-// ZRANGEBYSCORE zset -inf +inf WITHSCORES LIMIT 0 2
-vals, err := rdb.ZRangeByScoreWithScores(ctx, "zset", &redis.ZRangeBy{
-    Min: "-inf",
-    Max: "+inf",
-    Offset: 0,
-    Count: 2,
-}).Result()
-
-// ZINTERSTORE out 2 zset1 zset2 WEIGHTS 2 3 AGGREGATE SUM
-vals, err := rdb.ZInterStore(ctx, "out", &redis.ZStore{
-    Keys: []string{"zset1", "zset2"},
-    Weights: []int64{2, 3}
-}).Result()
-
-// EVAL "return {KEYS[1],ARGV[1]}" 1 "key" "hello"
-vals, err := rdb.Eval(ctx, "return {KEYS[1],ARGV[1]}", []string{"key"}, "hello").Result()
-
-// custom command
-res, err := rdb.Do(ctx, "set", "key", "value").Result()
-```
-
-## Run the test
-
-go-redis will start a redis-server and run the test cases.
-
-The paths of redis-server bin file and redis config file are defined in `main_test.go`:
-
-```
-var (
-	redisServerBin, _  = filepath.Abs(filepath.Join("testdata", "redis", "src", "redis-server"))
-	redisServerConf, _ = filepath.Abs(filepath.Join("testdata", "redis", "redis.conf"))
-)
-```
-
-For local testing, you can change the variables to refer to your local files, or create a soft link
-to the corresponding folder for redis-server and copy the config file to `testdata/redis/`:
-
-```
-ln -s /usr/bin/redis-server ./go-redis/testdata/redis/src
-cp ./go-redis/testdata/redis.conf ./go-redis/testdata/redis/
-```
-
-Lastly, run:
-
-```
-go test
-```
-
-## Contributors
-
-Thanks to all the people who already contributed!
-
-
-  
-
diff --git a/vendor/github.com/go-redis/redis/v8/command.go b/vendor/github.com/go-redis/redis/v8/command.go
deleted file mode 100644
index 4bb12a85be43..000000000000
--- a/vendor/github.com/go-redis/redis/v8/command.go
+++ /dev/null
@@ -1,3478 +0,0 @@
-package redis
-
-import (
-	"context"
-	"fmt"
-	"net"
-	"strconv"
-	"time"
-
-	"github.com/go-redis/redis/v8/internal"
-	"github.com/go-redis/redis/v8/internal/hscan"
-	"github.com/go-redis/redis/v8/internal/proto"
-	"github.com/go-redis/redis/v8/internal/util"
-)
-
-type Cmder interface {
-	Name() string
-	FullName() string
-	Args() []interface{}
-	String() string
-	stringArg(int) string
-	firstKeyPos() int8
-	SetFirstKeyPos(int8)
-
-	readTimeout() *time.Duration
-	readReply(rd *proto.Reader) error
-
-	SetErr(error)
-	Err() error
-}
-
-func setCmdsErr(cmds []Cmder, e error) {
-	for _, cmd := range cmds {
-		if cmd.Err() == nil {
-			cmd.SetErr(e)
-		}
-	}
-}
-
-func cmdsFirstErr(cmds []Cmder) error {
-	for _, cmd := range cmds {
-		if err := cmd.Err(); err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-func writeCmds(wr *proto.Writer, cmds []Cmder) error {
-	for _, cmd := range cmds {
-		if err := writeCmd(wr, cmd); err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-func writeCmd(wr *proto.Writer, cmd Cmder) error {
-	return wr.WriteArgs(cmd.Args())
-}
-
-func cmdFirstKeyPos(cmd Cmder, info *CommandInfo) int {
-	if pos := cmd.firstKeyPos(); pos != 0 {
-		return int(pos)
-	}
-
-	switch cmd.Name() {
-	case "eval", "evalsha":
-		if cmd.stringArg(2) != "0" {
-			return 3
-		}
-
-		return 0
-	case "publish":
-		return 1
-	case "memory":
-		// https://github.com/redis/redis/issues/7493
-		if cmd.stringArg(1) == "usage" {
-			return 2
-		}
-	}
-
-	if info != nil {
-		return int(info.FirstKeyPos)
-	}
-	return 0
-}
-
-func cmdString(cmd Cmder, val interface{}) string {
-	b := make([]byte, 0, 64)
-
-	for i, arg := range cmd.Args() {
-		if i > 0 {
-			b = append(b, ' ')
-		}
-		b = internal.AppendArg(b, arg)
-	}
-
-	if err := cmd.Err(); err != nil {
-		b = append(b, ": "...)
-		b = append(b, err.Error()...)
-	} else if val != nil {
-		b = append(b, ": "...)
-		b = internal.AppendArg(b, val)
-	}
-
-	return internal.String(b)
-}
-
-//------------------------------------------------------------------------------
-
-type baseCmd struct {
-	ctx    context.Context
-	args   []interface{}
-	err    error
-	keyPos int8
-
-	_readTimeout *time.Duration
-}
-
-var _ Cmder = (*Cmd)(nil)
-
-func (cmd *baseCmd) Name() string {
-	if len(cmd.args) == 0 {
-		return ""
-	}
-	// Cmd name must be lower cased.
-	return internal.ToLower(cmd.stringArg(0))
-}
-
-func (cmd *baseCmd) FullName() string {
-	switch name := cmd.Name(); name {
-	case "cluster", "command":
-		if len(cmd.args) == 1 {
-			return name
-		}
-		if s2, ok := cmd.args[1].(string); ok {
-			return name + " " + s2
-		}
-		return name
-	default:
-		return name
-	}
-}
-
-func (cmd *baseCmd) Args() []interface{} {
-	return cmd.args
-}
-
-func (cmd *baseCmd) stringArg(pos int) string {
-	if pos < 0 || pos >= len(cmd.args) {
-		return ""
-	}
-	arg := cmd.args[pos]
-	switch v := arg.(type) {
-	case string:
-		return v
-	default:
-		// TODO: consider using appendArg
-		return fmt.Sprint(v)
-	}
-}
-
-func (cmd *baseCmd) firstKeyPos() int8 {
-	return cmd.keyPos
-}
-
-func (cmd *baseCmd) SetFirstKeyPos(keyPos int8) {
-	cmd.keyPos = keyPos
-}
-
-func (cmd *baseCmd) SetErr(e error) {
-	cmd.err = e
-}
-
-func (cmd *baseCmd) Err() error {
-	return cmd.err
-}
-
-func (cmd *baseCmd) readTimeout() *time.Duration {
-	return cmd._readTimeout
-}
-
-func (cmd *baseCmd) setReadTimeout(d time.Duration) {
-	cmd._readTimeout = &d
-}
-
-//------------------------------------------------------------------------------
-
-type Cmd struct {
-	baseCmd
-
-	val interface{}
-}
-
-func NewCmd(ctx context.Context, args ...interface{}) *Cmd {
-	return &Cmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *Cmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *Cmd) SetVal(val interface{}) {
-	cmd.val = val
-}
-
-func (cmd *Cmd) Val() interface{} {
-	return cmd.val
-}
-
-func (cmd *Cmd) Result() (interface{}, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *Cmd) Text() (string, error) {
-	if cmd.err != nil {
-		return "", cmd.err
-	}
-	return toString(cmd.val)
-}
-
-func toString(val interface{}) (string, error) {
-	switch val := val.(type) {
-	case string:
-		return val, nil
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for String", val)
-		return "", err
-	}
-}
-
-func (cmd *Cmd) Int() (int, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	switch val := cmd.val.(type) {
-	case int64:
-		return int(val), nil
-	case string:
-		return strconv.Atoi(val)
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Int", val)
-		return 0, err
-	}
-}
-
-func (cmd *Cmd) Int64() (int64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return toInt64(cmd.val)
-}
-
-func toInt64(val interface{}) (int64, error) {
-	switch val := val.(type) {
-	case int64:
-		return val, nil
-	case string:
-		return strconv.ParseInt(val, 10, 64)
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Int64", val)
-		return 0, err
-	}
-}
-
-func (cmd *Cmd) Uint64() (uint64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return toUint64(cmd.val)
-}
-
-func toUint64(val interface{}) (uint64, error) {
-	switch val := val.(type) {
-	case int64:
-		return uint64(val), nil
-	case string:
-		return strconv.ParseUint(val, 10, 64)
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Uint64", val)
-		return 0, err
-	}
-}
-
-func (cmd *Cmd) Float32() (float32, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return toFloat32(cmd.val)
-}
-
-func toFloat32(val interface{}) (float32, error) {
-	switch val := val.(type) {
-	case int64:
-		return float32(val), nil
-	case string:
-		f, err := strconv.ParseFloat(val, 32)
-		if err != nil {
-			return 0, err
-		}
-		return float32(f), nil
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Float32", val)
-		return 0, err
-	}
-}
-
-func (cmd *Cmd) Float64() (float64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return toFloat64(cmd.val)
-}
-
-func toFloat64(val interface{}) (float64, error) {
-	switch val := val.(type) {
-	case int64:
-		return float64(val), nil
-	case string:
-		return strconv.ParseFloat(val, 64)
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Float64", val)
-		return 0, err
-	}
-}
-
-func (cmd *Cmd) Bool() (bool, error) {
-	if cmd.err != nil {
-		return false, cmd.err
-	}
-	return toBool(cmd.val)
-}
-
-func toBool(val interface{}) (bool, error) {
-	switch val := val.(type) {
-	case int64:
-		return val != 0, nil
-	case string:
-		return strconv.ParseBool(val)
-	default:
-		err := fmt.Errorf("redis: unexpected type=%T for Bool", val)
-		return false, err
-	}
-}
-
-func (cmd *Cmd) Slice() ([]interface{}, error) {
-	if cmd.err != nil {
-		return nil, cmd.err
-	}
-	switch val := cmd.val.(type) {
-	case []interface{}:
-		return val, nil
-	default:
-		return nil, fmt.Errorf("redis: unexpected type=%T for Slice", val)
-	}
-}
-
-func (cmd *Cmd) StringSlice() ([]string, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	ss := make([]string, len(slice))
-	for i, iface := range slice {
-		val, err := toString(iface)
-		if err != nil {
-			return nil, err
-		}
-		ss[i] = val
-	}
-	return ss, nil
-}
-
-func (cmd *Cmd) Int64Slice() ([]int64, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	nums := make([]int64, len(slice))
-	for i, iface := range slice {
-		val, err := toInt64(iface)
-		if err != nil {
-			return nil, err
-		}
-		nums[i] = val
-	}
-	return nums, nil
-}
-
-func (cmd *Cmd) Uint64Slice() ([]uint64, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	nums := make([]uint64, len(slice))
-	for i, iface := range slice {
-		val, err := toUint64(iface)
-		if err != nil {
-			return nil, err
-		}
-		nums[i] = val
-	}
-	return nums, nil
-}
-
-func (cmd *Cmd) Float32Slice() ([]float32, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	floats := make([]float32, len(slice))
-	for i, iface := range slice {
-		val, err := toFloat32(iface)
-		if err != nil {
-			return nil, err
-		}
-		floats[i] = val
-	}
-	return floats, nil
-}
-
-func (cmd *Cmd) Float64Slice() ([]float64, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	floats := make([]float64, len(slice))
-	for i, iface := range slice {
-		val, err := toFloat64(iface)
-		if err != nil {
-			return nil, err
-		}
-		floats[i] = val
-	}
-	return floats, nil
-}
-
-func (cmd *Cmd) BoolSlice() ([]bool, error) {
-	slice, err := cmd.Slice()
-	if err != nil {
-		return nil, err
-	}
-
-	bools := make([]bool, len(slice))
-	for i, iface := range slice {
-		val, err := toBool(iface)
-		if err != nil {
-			return nil, err
-		}
-		bools[i] = val
-	}
-	return bools, nil
-}
-
-func (cmd *Cmd) readReply(rd *proto.Reader) (err error) {
-	cmd.val, err = rd.ReadReply(sliceParser)
-	return err
-}
-
-// sliceParser implements proto.MultiBulkParse.
-func sliceParser(rd *proto.Reader, n int64) (interface{}, error) {
-	vals := make([]interface{}, n)
-	for i := 0; i < len(vals); i++ {
-		v, err := rd.ReadReply(sliceParser)
-		if err != nil {
-			if err == Nil {
-				vals[i] = nil
-				continue
-			}
-			if err, ok := err.(proto.RedisError); ok {
-				vals[i] = err
-				continue
-			}
-			return nil, err
-		}
-		vals[i] = v
-	}
-	return vals, nil
-}
-
-//------------------------------------------------------------------------------
-
-type SliceCmd struct {
-	baseCmd
-
-	val []interface{}
-}
-
-var _ Cmder = (*SliceCmd)(nil)
-
-func NewSliceCmd(ctx context.Context, args ...interface{}) *SliceCmd {
-	return &SliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *SliceCmd) SetVal(val []interface{}) {
-	cmd.val = val
-}
-
-func (cmd *SliceCmd) Val() []interface{} {
-	return cmd.val
-}
-
-func (cmd *SliceCmd) Result() ([]interface{}, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *SliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-// Scan scans the results from the map into a destination struct. The map keys
-// are matched in the Redis struct fields by the `redis:"field"` tag.
-func (cmd *SliceCmd) Scan(dst interface{}) error {
-	if cmd.err != nil {
-		return cmd.err
-	}
-
-	// Pass the list of keys and values.
-	// Skip the first two args for: HMGET key
-	var args []interface{}
-	if cmd.args[0] == "hmget" {
-		args = cmd.args[2:]
-	} else {
-		// Otherwise, it's: MGET field field ...
-		args = cmd.args[1:]
-	}
-
-	return hscan.Scan(dst, args, cmd.val)
-}
-
-func (cmd *SliceCmd) readReply(rd *proto.Reader) error {
-	v, err := rd.ReadArrayReply(sliceParser)
-	if err != nil {
-		return err
-	}
-	cmd.val = v.([]interface{})
-	return nil
-}
-
-//------------------------------------------------------------------------------
-
-type StatusCmd struct {
-	baseCmd
-
-	val string
-}
-
-var _ Cmder = (*StatusCmd)(nil)
-
-func NewStatusCmd(ctx context.Context, args ...interface{}) *StatusCmd {
-	return &StatusCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StatusCmd) SetVal(val string) {
-	cmd.val = val
-}
-
-func (cmd *StatusCmd) Val() string {
-	return cmd.val
-}
-
-func (cmd *StatusCmd) Result() (string, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *StatusCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *StatusCmd) readReply(rd *proto.Reader) (err error) {
-	cmd.val, err = rd.ReadString()
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type IntCmd struct {
-	baseCmd
-
-	val int64
-}
-
-var _ Cmder = (*IntCmd)(nil)
-
-func NewIntCmd(ctx context.Context, args ...interface{}) *IntCmd {
-	return &IntCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *IntCmd) SetVal(val int64) {
-	cmd.val = val
-}
-
-func (cmd *IntCmd) Val() int64 {
-	return cmd.val
-}
-
-func (cmd *IntCmd) Result() (int64, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *IntCmd) Uint64() (uint64, error) {
-	return uint64(cmd.val), cmd.err
-}
-
-func (cmd *IntCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *IntCmd) readReply(rd *proto.Reader) (err error) {
-	cmd.val, err = rd.ReadIntReply()
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type IntSliceCmd struct {
-	baseCmd
-
-	val []int64
-}
-
-var _ Cmder = (*IntSliceCmd)(nil)
-
-func NewIntSliceCmd(ctx context.Context, args ...interface{}) *IntSliceCmd {
-	return &IntSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *IntSliceCmd) SetVal(val []int64) {
-	cmd.val = val
-}
-
-func (cmd *IntSliceCmd) Val() []int64 {
-	return cmd.val
-}
-
-func (cmd *IntSliceCmd) Result() ([]int64, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *IntSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *IntSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]int64, n)
-		for i := 0; i < len(cmd.val); i++ {
-			num, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-			cmd.val[i] = num
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type DurationCmd struct {
-	baseCmd
-
-	val       time.Duration
-	precision time.Duration
-}
-
-var _ Cmder = (*DurationCmd)(nil)
-
-func NewDurationCmd(ctx context.Context, precision time.Duration, args ...interface{}) *DurationCmd {
-	return &DurationCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-		precision: precision,
-	}
-}
-
-func (cmd *DurationCmd) SetVal(val time.Duration) {
-	cmd.val = val
-}
-
-func (cmd *DurationCmd) Val() time.Duration {
-	return cmd.val
-}
-
-func (cmd *DurationCmd) Result() (time.Duration, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *DurationCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *DurationCmd) readReply(rd *proto.Reader) error {
-	n, err := rd.ReadIntReply()
-	if err != nil {
-		return err
-	}
-	switch n {
-	// -2 if the key does not exist
-	// -1 if the key exists but has no associated expire
-	case -2, -1:
-		cmd.val = time.Duration(n)
-	default:
-		cmd.val = time.Duration(n) * cmd.precision
-	}
-	return nil
-}
-
-//------------------------------------------------------------------------------
-
-type TimeCmd struct {
-	baseCmd
-
-	val time.Time
-}
-
-var _ Cmder = (*TimeCmd)(nil)
-
-func NewTimeCmd(ctx context.Context, args ...interface{}) *TimeCmd {
-	return &TimeCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *TimeCmd) SetVal(val time.Time) {
-	cmd.val = val
-}
-
-func (cmd *TimeCmd) Val() time.Time {
-	return cmd.val
-}
-
-func (cmd *TimeCmd) Result() (time.Time, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *TimeCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *TimeCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		if n != 2 {
-			return nil, fmt.Errorf("got %d elements, expected 2", n)
-		}
-
-		sec, err := rd.ReadInt()
-		if err != nil {
-			return nil, err
-		}
-
-		microsec, err := rd.ReadInt()
-		if err != nil {
-			return nil, err
-		}
-
-		cmd.val = time.Unix(sec, microsec*1000)
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type BoolCmd struct {
-	baseCmd
-
-	val bool
-}
-
-var _ Cmder = (*BoolCmd)(nil)
-
-func NewBoolCmd(ctx context.Context, args ...interface{}) *BoolCmd {
-	return &BoolCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *BoolCmd) SetVal(val bool) {
-	cmd.val = val
-}
-
-func (cmd *BoolCmd) Val() bool {
-	return cmd.val
-}
-
-func (cmd *BoolCmd) Result() (bool, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *BoolCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *BoolCmd) readReply(rd *proto.Reader) error {
-	v, err := rd.ReadReply(nil)
-	// `SET key value NX` returns nil when key already exists. But
-	// `SETNX key value` returns bool (0/1). So convert nil to bool.
-	if err == Nil {
-		cmd.val = false
-		return nil
-	}
-	if err != nil {
-		return err
-	}
-	switch v := v.(type) {
-	case int64:
-		cmd.val = v == 1
-		return nil
-	case string:
-		cmd.val = v == "OK"
-		return nil
-	default:
-		return fmt.Errorf("got %T, wanted int64 or string", v)
-	}
-}
-
-//------------------------------------------------------------------------------
-
-type StringCmd struct {
-	baseCmd
-
-	val string
-}
-
-var _ Cmder = (*StringCmd)(nil)
-
-func NewStringCmd(ctx context.Context, args ...interface{}) *StringCmd {
-	return &StringCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StringCmd) SetVal(val string) {
-	cmd.val = val
-}
-
-func (cmd *StringCmd) Val() string {
-	return cmd.val
-}
-
-func (cmd *StringCmd) Result() (string, error) {
-	return cmd.Val(), cmd.err
-}
-
-func (cmd *StringCmd) Bytes() ([]byte, error) {
-	return util.StringToBytes(cmd.val), cmd.err
-}
-
-func (cmd *StringCmd) Bool() (bool, error) {
-	if cmd.err != nil {
-		return false, cmd.err
-	}
-	return strconv.ParseBool(cmd.val)
-}
-
-func (cmd *StringCmd) Int() (int, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return strconv.Atoi(cmd.Val())
-}
-
-func (cmd *StringCmd) Int64() (int64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return strconv.ParseInt(cmd.Val(), 10, 64)
-}
-
-func (cmd *StringCmd) Uint64() (uint64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return strconv.ParseUint(cmd.Val(), 10, 64)
-}
-
-func (cmd *StringCmd) Float32() (float32, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	f, err := strconv.ParseFloat(cmd.Val(), 32)
-	if err != nil {
-		return 0, err
-	}
-	return float32(f), nil
-}
-
-func (cmd *StringCmd) Float64() (float64, error) {
-	if cmd.err != nil {
-		return 0, cmd.err
-	}
-	return strconv.ParseFloat(cmd.Val(), 64)
-}
-
-func (cmd *StringCmd) Time() (time.Time, error) {
-	if cmd.err != nil {
-		return time.Time{}, cmd.err
-	}
-	return time.Parse(time.RFC3339Nano, cmd.Val())
-}
-
-func (cmd *StringCmd) Scan(val interface{}) error {
-	if cmd.err != nil {
-		return cmd.err
-	}
-	return proto.Scan([]byte(cmd.val), val)
-}
-
-func (cmd *StringCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *StringCmd) readReply(rd *proto.Reader) (err error) {
-	cmd.val, err = rd.ReadString()
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type FloatCmd struct {
-	baseCmd
-
-	val float64
-}
-
-var _ Cmder = (*FloatCmd)(nil)
-
-func NewFloatCmd(ctx context.Context, args ...interface{}) *FloatCmd {
-	return &FloatCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *FloatCmd) SetVal(val float64) {
-	cmd.val = val
-}
-
-func (cmd *FloatCmd) Val() float64 {
-	return cmd.val
-}
-
-func (cmd *FloatCmd) Result() (float64, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *FloatCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *FloatCmd) readReply(rd *proto.Reader) (err error) {
-	cmd.val, err = rd.ReadFloatReply()
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type FloatSliceCmd struct {
-	baseCmd
-
-	val []float64
-}
-
-var _ Cmder = (*FloatSliceCmd)(nil)
-
-func NewFloatSliceCmd(ctx context.Context, args ...interface{}) *FloatSliceCmd {
-	return &FloatSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *FloatSliceCmd) SetVal(val []float64) {
-	cmd.val = val
-}
-
-func (cmd *FloatSliceCmd) Val() []float64 {
-	return cmd.val
-}
-
-func (cmd *FloatSliceCmd) Result() ([]float64, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *FloatSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *FloatSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]float64, n)
-		for i := 0; i < len(cmd.val); i++ {
-			switch num, err := rd.ReadFloatReply(); {
-			case err == Nil:
-				cmd.val[i] = 0
-			case err != nil:
-				return nil, err
-			default:
-				cmd.val[i] = num
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type StringSliceCmd struct {
-	baseCmd
-
-	val []string
-}
-
-var _ Cmder = (*StringSliceCmd)(nil)
-
-func NewStringSliceCmd(ctx context.Context, args ...interface{}) *StringSliceCmd {
-	return &StringSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StringSliceCmd) SetVal(val []string) {
-	cmd.val = val
-}
-
-func (cmd *StringSliceCmd) Val() []string {
-	return cmd.val
-}
-
-func (cmd *StringSliceCmd) Result() ([]string, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *StringSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *StringSliceCmd) ScanSlice(container interface{}) error {
-	return proto.ScanSlice(cmd.Val(), container)
-}
-
-func (cmd *StringSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]string, n)
-		for i := 0; i < len(cmd.val); i++ {
-			switch s, err := rd.ReadString(); {
-			case err == Nil:
-				cmd.val[i] = ""
-			case err != nil:
-				return nil, err
-			default:
-				cmd.val[i] = s
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type BoolSliceCmd struct {
-	baseCmd
-
-	val []bool
-}
-
-var _ Cmder = (*BoolSliceCmd)(nil)
-
-func NewBoolSliceCmd(ctx context.Context, args ...interface{}) *BoolSliceCmd {
-	return &BoolSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *BoolSliceCmd) SetVal(val []bool) {
-	cmd.val = val
-}
-
-func (cmd *BoolSliceCmd) Val() []bool {
-	return cmd.val
-}
-
-func (cmd *BoolSliceCmd) Result() ([]bool, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *BoolSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *BoolSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]bool, n)
-		for i := 0; i < len(cmd.val); i++ {
-			n, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-			cmd.val[i] = n == 1
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type StringStringMapCmd struct {
-	baseCmd
-
-	val map[string]string
-}
-
-var _ Cmder = (*StringStringMapCmd)(nil)
-
-func NewStringStringMapCmd(ctx context.Context, args ...interface{}) *StringStringMapCmd {
-	return &StringStringMapCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StringStringMapCmd) SetVal(val map[string]string) {
-	cmd.val = val
-}
-
-func (cmd *StringStringMapCmd) Val() map[string]string {
-	return cmd.val
-}
-
-func (cmd *StringStringMapCmd) Result() (map[string]string, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *StringStringMapCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-// Scan scans the results from the map into a destination struct. The map keys
-// are matched in the Redis struct fields by the `redis:"field"` tag.
-func (cmd *StringStringMapCmd) Scan(dest interface{}) error {
-	if cmd.err != nil {
-		return cmd.err
-	}
-
-	strct, err := hscan.Struct(dest)
-	if err != nil {
-		return err
-	}
-
-	for k, v := range cmd.val {
-		if err := strct.Scan(k, v); err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-func (cmd *StringStringMapCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make(map[string]string, n/2)
-		for i := int64(0); i < n; i += 2 {
-			key, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			value, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			cmd.val[key] = value
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type StringIntMapCmd struct {
-	baseCmd
-
-	val map[string]int64
-}
-
-var _ Cmder = (*StringIntMapCmd)(nil)
-
-func NewStringIntMapCmd(ctx context.Context, args ...interface{}) *StringIntMapCmd {
-	return &StringIntMapCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StringIntMapCmd) SetVal(val map[string]int64) {
-	cmd.val = val
-}
-
-func (cmd *StringIntMapCmd) Val() map[string]int64 {
-	return cmd.val
-}
-
-func (cmd *StringIntMapCmd) Result() (map[string]int64, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *StringIntMapCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *StringIntMapCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make(map[string]int64, n/2)
-		for i := int64(0); i < n; i += 2 {
-			key, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			n, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-
-			cmd.val[key] = n
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type StringStructMapCmd struct {
-	baseCmd
-
-	val map[string]struct{}
-}
-
-var _ Cmder = (*StringStructMapCmd)(nil)
-
-func NewStringStructMapCmd(ctx context.Context, args ...interface{}) *StringStructMapCmd {
-	return &StringStructMapCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *StringStructMapCmd) SetVal(val map[string]struct{}) {
-	cmd.val = val
-}
-
-func (cmd *StringStructMapCmd) Val() map[string]struct{} {
-	return cmd.val
-}
-
-func (cmd *StringStructMapCmd) Result() (map[string]struct{}, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *StringStructMapCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *StringStructMapCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make(map[string]struct{}, n)
-		for i := int64(0); i < n; i++ {
-			key, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-			cmd.val[key] = struct{}{}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XMessage struct {
-	ID     string
-	Values map[string]interface{}
-}
-
-type XMessageSliceCmd struct {
-	baseCmd
-
-	val []XMessage
-}
-
-var _ Cmder = (*XMessageSliceCmd)(nil)
-
-func NewXMessageSliceCmd(ctx context.Context, args ...interface{}) *XMessageSliceCmd {
-	return &XMessageSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XMessageSliceCmd) SetVal(val []XMessage) {
-	cmd.val = val
-}
-
-func (cmd *XMessageSliceCmd) Val() []XMessage {
-	return cmd.val
-}
-
-func (cmd *XMessageSliceCmd) Result() ([]XMessage, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XMessageSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XMessageSliceCmd) readReply(rd *proto.Reader) error {
-	var err error
-	cmd.val, err = readXMessageSlice(rd)
-	return err
-}
-
-func readXMessageSlice(rd *proto.Reader) ([]XMessage, error) {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return nil, err
-	}
-
-	msgs := make([]XMessage, n)
-	for i := 0; i < n; i++ {
-		var err error
-		msgs[i], err = readXMessage(rd)
-		if err != nil {
-			return nil, err
-		}
-	}
-	return msgs, nil
-}
-
-func readXMessage(rd *proto.Reader) (XMessage, error) {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return XMessage{}, err
-	}
-	if n != 2 {
-		return XMessage{}, fmt.Errorf("got %d, wanted 2", n)
-	}
-
-	id, err := rd.ReadString()
-	if err != nil {
-		return XMessage{}, err
-	}
-
-	var values map[string]interface{}
-
-	v, err := rd.ReadArrayReply(stringInterfaceMapParser)
-	if err != nil {
-		if err != proto.Nil {
-			return XMessage{}, err
-		}
-	} else {
-		values = v.(map[string]interface{})
-	}
-
-	return XMessage{
-		ID:     id,
-		Values: values,
-	}, nil
-}
-
-// stringInterfaceMapParser implements proto.MultiBulkParse.
-func stringInterfaceMapParser(rd *proto.Reader, n int64) (interface{}, error) {
-	m := make(map[string]interface{}, n/2)
-	for i := int64(0); i < n; i += 2 {
-		key, err := rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		value, err := rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		m[key] = value
-	}
-	return m, nil
-}
-
-//------------------------------------------------------------------------------
-
-type XStream struct {
-	Stream   string
-	Messages []XMessage
-}
-
-type XStreamSliceCmd struct {
-	baseCmd
-
-	val []XStream
-}
-
-var _ Cmder = (*XStreamSliceCmd)(nil)
-
-func NewXStreamSliceCmd(ctx context.Context, args ...interface{}) *XStreamSliceCmd {
-	return &XStreamSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XStreamSliceCmd) SetVal(val []XStream) {
-	cmd.val = val
-}
-
-func (cmd *XStreamSliceCmd) Val() []XStream {
-	return cmd.val
-}
-
-func (cmd *XStreamSliceCmd) Result() ([]XStream, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XStreamSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XStreamSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]XStream, n)
-		for i := 0; i < len(cmd.val); i++ {
-			i := i
-			_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-				if n != 2 {
-					return nil, fmt.Errorf("got %d, wanted 2", n)
-				}
-
-				stream, err := rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-
-				msgs, err := readXMessageSlice(rd)
-				if err != nil {
-					return nil, err
-				}
-
-				cmd.val[i] = XStream{
-					Stream:   stream,
-					Messages: msgs,
-				}
-				return nil, nil
-			})
-			if err != nil {
-				return nil, err
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XPending struct {
-	Count     int64
-	Lower     string
-	Higher    string
-	Consumers map[string]int64
-}
-
-type XPendingCmd struct {
-	baseCmd
-	val *XPending
-}
-
-var _ Cmder = (*XPendingCmd)(nil)
-
-func NewXPendingCmd(ctx context.Context, args ...interface{}) *XPendingCmd {
-	return &XPendingCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XPendingCmd) SetVal(val *XPending) {
-	cmd.val = val
-}
-
-func (cmd *XPendingCmd) Val() *XPending {
-	return cmd.val
-}
-
-func (cmd *XPendingCmd) Result() (*XPending, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XPendingCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XPendingCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		if n != 4 {
-			return nil, fmt.Errorf("got %d, wanted 4", n)
-		}
-
-		count, err := rd.ReadIntReply()
-		if err != nil {
-			return nil, err
-		}
-
-		lower, err := rd.ReadString()
-		if err != nil && err != Nil {
-			return nil, err
-		}
-
-		higher, err := rd.ReadString()
-		if err != nil && err != Nil {
-			return nil, err
-		}
-
-		cmd.val = &XPending{
-			Count:  count,
-			Lower:  lower,
-			Higher: higher,
-		}
-		_, err = rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-			for i := int64(0); i < n; i++ {
-				_, err = rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-					if n != 2 {
-						return nil, fmt.Errorf("got %d, wanted 2", n)
-					}
-
-					consumerName, err := rd.ReadString()
-					if err != nil {
-						return nil, err
-					}
-
-					consumerPending, err := rd.ReadInt()
-					if err != nil {
-						return nil, err
-					}
-
-					if cmd.val.Consumers == nil {
-						cmd.val.Consumers = make(map[string]int64)
-					}
-					cmd.val.Consumers[consumerName] = consumerPending
-
-					return nil, nil
-				})
-				if err != nil {
-					return nil, err
-				}
-			}
-			return nil, nil
-		})
-		if err != nil && err != Nil {
-			return nil, err
-		}
-
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XPendingExt struct {
-	ID         string
-	Consumer   string
-	Idle       time.Duration
-	RetryCount int64
-}
-
-type XPendingExtCmd struct {
-	baseCmd
-	val []XPendingExt
-}
-
-var _ Cmder = (*XPendingExtCmd)(nil)
-
-func NewXPendingExtCmd(ctx context.Context, args ...interface{}) *XPendingExtCmd {
-	return &XPendingExtCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XPendingExtCmd) SetVal(val []XPendingExt) {
-	cmd.val = val
-}
-
-func (cmd *XPendingExtCmd) Val() []XPendingExt {
-	return cmd.val
-}
-
-func (cmd *XPendingExtCmd) Result() ([]XPendingExt, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XPendingExtCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XPendingExtCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]XPendingExt, 0, n)
-		for i := int64(0); i < n; i++ {
-			_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-				if n != 4 {
-					return nil, fmt.Errorf("got %d, wanted 4", n)
-				}
-
-				id, err := rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-
-				consumer, err := rd.ReadString()
-				if err != nil && err != Nil {
-					return nil, err
-				}
-
-				idle, err := rd.ReadIntReply()
-				if err != nil && err != Nil {
-					return nil, err
-				}
-
-				retryCount, err := rd.ReadIntReply()
-				if err != nil && err != Nil {
-					return nil, err
-				}
-
-				cmd.val = append(cmd.val, XPendingExt{
-					ID:         id,
-					Consumer:   consumer,
-					Idle:       time.Duration(idle) * time.Millisecond,
-					RetryCount: retryCount,
-				})
-				return nil, nil
-			})
-			if err != nil {
-				return nil, err
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XAutoClaimCmd struct {
-	baseCmd
-
-	start string
-	val   []XMessage
-}
-
-var _ Cmder = (*XAutoClaimCmd)(nil)
-
-func NewXAutoClaimCmd(ctx context.Context, args ...interface{}) *XAutoClaimCmd {
-	return &XAutoClaimCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XAutoClaimCmd) SetVal(val []XMessage, start string) {
-	cmd.val = val
-	cmd.start = start
-}
-
-func (cmd *XAutoClaimCmd) Val() (messages []XMessage, start string) {
-	return cmd.val, cmd.start
-}
-
-func (cmd *XAutoClaimCmd) Result() (messages []XMessage, start string, err error) {
-	return cmd.val, cmd.start, cmd.err
-}
-
-func (cmd *XAutoClaimCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XAutoClaimCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		if n != 2 {
-			return nil, fmt.Errorf("got %d, wanted 2", n)
-		}
-		var err error
-
-		cmd.start, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		cmd.val, err = readXMessageSlice(rd)
-		if err != nil {
-			return nil, err
-		}
-
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XAutoClaimJustIDCmd struct {
-	baseCmd
-
-	start string
-	val   []string
-}
-
-var _ Cmder = (*XAutoClaimJustIDCmd)(nil)
-
-func NewXAutoClaimJustIDCmd(ctx context.Context, args ...interface{}) *XAutoClaimJustIDCmd {
-	return &XAutoClaimJustIDCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XAutoClaimJustIDCmd) SetVal(val []string, start string) {
-	cmd.val = val
-	cmd.start = start
-}
-
-func (cmd *XAutoClaimJustIDCmd) Val() (ids []string, start string) {
-	return cmd.val, cmd.start
-}
-
-func (cmd *XAutoClaimJustIDCmd) Result() (ids []string, start string, err error) {
-	return cmd.val, cmd.start, cmd.err
-}
-
-func (cmd *XAutoClaimJustIDCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XAutoClaimJustIDCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		if n != 2 {
-			return nil, fmt.Errorf("got %d, wanted 2", n)
-		}
-		var err error
-
-		cmd.start, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		nn, err := rd.ReadArrayLen()
-		if err != nil {
-			return nil, err
-		}
-
-		cmd.val = make([]string, nn)
-		for i := 0; i < nn; i++ {
-			cmd.val[i], err = rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-		}
-
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type XInfoConsumersCmd struct {
-	baseCmd
-	val []XInfoConsumer
-}
-
-type XInfoConsumer struct {
-	Name    string
-	Pending int64
-	Idle    int64
-}
-
-var _ Cmder = (*XInfoConsumersCmd)(nil)
-
-func NewXInfoConsumersCmd(ctx context.Context, stream string, group string) *XInfoConsumersCmd {
-	return &XInfoConsumersCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: []interface{}{"xinfo", "consumers", stream, group},
-		},
-	}
-}
-
-func (cmd *XInfoConsumersCmd) SetVal(val []XInfoConsumer) {
-	cmd.val = val
-}
-
-func (cmd *XInfoConsumersCmd) Val() []XInfoConsumer {
-	return cmd.val
-}
-
-func (cmd *XInfoConsumersCmd) Result() ([]XInfoConsumer, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XInfoConsumersCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XInfoConsumersCmd) readReply(rd *proto.Reader) error {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return err
-	}
-
-	cmd.val = make([]XInfoConsumer, n)
-
-	for i := 0; i < n; i++ {
-		cmd.val[i], err = readXConsumerInfo(rd)
-		if err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-func readXConsumerInfo(rd *proto.Reader) (XInfoConsumer, error) {
-	var consumer XInfoConsumer
-
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return consumer, err
-	}
-	if n != 6 {
-		return consumer, fmt.Errorf("redis: got %d elements in XINFO CONSUMERS reply, wanted 6", n)
-	}
-
-	for i := 0; i < 3; i++ {
-		key, err := rd.ReadString()
-		if err != nil {
-			return consumer, err
-		}
-
-		val, err := rd.ReadString()
-		if err != nil {
-			return consumer, err
-		}
-
-		switch key {
-		case "name":
-			consumer.Name = val
-		case "pending":
-			consumer.Pending, err = strconv.ParseInt(val, 0, 64)
-			if err != nil {
-				return consumer, err
-			}
-		case "idle":
-			consumer.Idle, err = strconv.ParseInt(val, 0, 64)
-			if err != nil {
-				return consumer, err
-			}
-		default:
-			return consumer, fmt.Errorf("redis: unexpected content %s in XINFO CONSUMERS reply", key)
-		}
-	}
-
-	return consumer, nil
-}
-
-//------------------------------------------------------------------------------
-
-type XInfoGroupsCmd struct {
-	baseCmd
-	val []XInfoGroup
-}
-
-type XInfoGroup struct {
-	Name            string
-	Consumers       int64
-	Pending         int64
-	LastDeliveredID string
-}
-
-var _ Cmder = (*XInfoGroupsCmd)(nil)
-
-func NewXInfoGroupsCmd(ctx context.Context, stream string) *XInfoGroupsCmd {
-	return &XInfoGroupsCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: []interface{}{"xinfo", "groups", stream},
-		},
-	}
-}
-
-func (cmd *XInfoGroupsCmd) SetVal(val []XInfoGroup) {
-	cmd.val = val
-}
-
-func (cmd *XInfoGroupsCmd) Val() []XInfoGroup {
-	return cmd.val
-}
-
-func (cmd *XInfoGroupsCmd) Result() ([]XInfoGroup, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XInfoGroupsCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XInfoGroupsCmd) readReply(rd *proto.Reader) error {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return err
-	}
-
-	cmd.val = make([]XInfoGroup, n)
-
-	for i := 0; i < n; i++ {
-		cmd.val[i], err = readXGroupInfo(rd)
-		if err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-func readXGroupInfo(rd *proto.Reader) (XInfoGroup, error) {
-	var group XInfoGroup
-
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return group, err
-	}
-	if n != 8 {
-		return group, fmt.Errorf("redis: got %d elements in XINFO GROUPS reply, wanted 8", n)
-	}
-
-	for i := 0; i < 4; i++ {
-		key, err := rd.ReadString()
-		if err != nil {
-			return group, err
-		}
-
-		val, err := rd.ReadString()
-		if err != nil {
-			return group, err
-		}
-
-		switch key {
-		case "name":
-			group.Name = val
-		case "consumers":
-			group.Consumers, err = strconv.ParseInt(val, 0, 64)
-			if err != nil {
-				return group, err
-			}
-		case "pending":
-			group.Pending, err = strconv.ParseInt(val, 0, 64)
-			if err != nil {
-				return group, err
-			}
-		case "last-delivered-id":
-			group.LastDeliveredID = val
-		default:
-			return group, fmt.Errorf("redis: unexpected content %s in XINFO GROUPS reply", key)
-		}
-	}
-
-	return group, nil
-}
-
-//------------------------------------------------------------------------------
-
-type XInfoStreamCmd struct {
-	baseCmd
-	val *XInfoStream
-}
-
-type XInfoStream struct {
-	Length          int64
-	RadixTreeKeys   int64
-	RadixTreeNodes  int64
-	Groups          int64
-	LastGeneratedID string
-	FirstEntry      XMessage
-	LastEntry       XMessage
-}
-
-var _ Cmder = (*XInfoStreamCmd)(nil)
-
-func NewXInfoStreamCmd(ctx context.Context, stream string) *XInfoStreamCmd {
-	return &XInfoStreamCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: []interface{}{"xinfo", "stream", stream},
-		},
-	}
-}
-
-func (cmd *XInfoStreamCmd) SetVal(val *XInfoStream) {
-	cmd.val = val
-}
-
-func (cmd *XInfoStreamCmd) Val() *XInfoStream {
-	return cmd.val
-}
-
-func (cmd *XInfoStreamCmd) Result() (*XInfoStream, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XInfoStreamCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XInfoStreamCmd) readReply(rd *proto.Reader) error {
-	v, err := rd.ReadReply(xStreamInfoParser)
-	if err != nil {
-		return err
-	}
-	cmd.val = v.(*XInfoStream)
-	return nil
-}
-
-func xStreamInfoParser(rd *proto.Reader, n int64) (interface{}, error) {
-	if n != 14 {
-		return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM reply,"+
-			"wanted 14", n)
-	}
-	var info XInfoStream
-	for i := 0; i < 7; i++ {
-		key, err := rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-		switch key {
-		case "length":
-			info.Length, err = rd.ReadIntReply()
-		case "radix-tree-keys":
-			info.RadixTreeKeys, err = rd.ReadIntReply()
-		case "radix-tree-nodes":
-			info.RadixTreeNodes, err = rd.ReadIntReply()
-		case "groups":
-			info.Groups, err = rd.ReadIntReply()
-		case "last-generated-id":
-			info.LastGeneratedID, err = rd.ReadString()
-		case "first-entry":
-			info.FirstEntry, err = readXMessage(rd)
-			if err == Nil {
-				err = nil
-			}
-		case "last-entry":
-			info.LastEntry, err = readXMessage(rd)
-			if err == Nil {
-				err = nil
-			}
-		default:
-			return nil, fmt.Errorf("redis: unexpected content %s "+
-				"in XINFO STREAM reply", key)
-		}
-		if err != nil {
-			return nil, err
-		}
-	}
-	return &info, nil
-}
-
-//------------------------------------------------------------------------------
-
-type XInfoStreamFullCmd struct {
-	baseCmd
-	val *XInfoStreamFull
-}
-
-type XInfoStreamFull struct {
-	Length          int64
-	RadixTreeKeys   int64
-	RadixTreeNodes  int64
-	LastGeneratedID string
-	Entries         []XMessage
-	Groups          []XInfoStreamGroup
-}
-
-type XInfoStreamGroup struct {
-	Name            string
-	LastDeliveredID string
-	PelCount        int64
-	Pending         []XInfoStreamGroupPending
-	Consumers       []XInfoStreamConsumer
-}
-
-type XInfoStreamGroupPending struct {
-	ID            string
-	Consumer      string
-	DeliveryTime  time.Time
-	DeliveryCount int64
-}
-
-type XInfoStreamConsumer struct {
-	Name     string
-	SeenTime time.Time
-	PelCount int64
-	Pending  []XInfoStreamConsumerPending
-}
-
-type XInfoStreamConsumerPending struct {
-	ID            string
-	DeliveryTime  time.Time
-	DeliveryCount int64
-}
-
-var _ Cmder = (*XInfoStreamFullCmd)(nil)
-
-func NewXInfoStreamFullCmd(ctx context.Context, args ...interface{}) *XInfoStreamFullCmd {
-	return &XInfoStreamFullCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *XInfoStreamFullCmd) SetVal(val *XInfoStreamFull) {
-	cmd.val = val
-}
-
-func (cmd *XInfoStreamFullCmd) Val() *XInfoStreamFull {
-	return cmd.val
-}
-
-func (cmd *XInfoStreamFullCmd) Result() (*XInfoStreamFull, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *XInfoStreamFullCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *XInfoStreamFullCmd) readReply(rd *proto.Reader) error {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return err
-	}
-	if n != 12 {
-		return fmt.Errorf("redis: got %d elements in XINFO STREAM FULL reply,"+
-			"wanted 12", n)
-	}
-
-	cmd.val = &XInfoStreamFull{}
-
-	for i := 0; i < 6; i++ {
-		key, err := rd.ReadString()
-		if err != nil {
-			return err
-		}
-
-		switch key {
-		case "length":
-			cmd.val.Length, err = rd.ReadIntReply()
-		case "radix-tree-keys":
-			cmd.val.RadixTreeKeys, err = rd.ReadIntReply()
-		case "radix-tree-nodes":
-			cmd.val.RadixTreeNodes, err = rd.ReadIntReply()
-		case "last-generated-id":
-			cmd.val.LastGeneratedID, err = rd.ReadString()
-		case "entries":
-			cmd.val.Entries, err = readXMessageSlice(rd)
-		case "groups":
-			cmd.val.Groups, err = readStreamGroups(rd)
-		default:
-			return fmt.Errorf("redis: unexpected content %s "+
-				"in XINFO STREAM reply", key)
-		}
-		if err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-func readStreamGroups(rd *proto.Reader) ([]XInfoStreamGroup, error) {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return nil, err
-	}
-	groups := make([]XInfoStreamGroup, 0, n)
-	for i := 0; i < n; i++ {
-		nn, err := rd.ReadArrayLen()
-		if err != nil {
-			return nil, err
-		}
-		if nn != 10 {
-			return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM FULL reply,"+
-				"wanted 10", nn)
-		}
-
-		group := XInfoStreamGroup{}
-
-		for f := 0; f < 5; f++ {
-			key, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			switch key {
-			case "name":
-				group.Name, err = rd.ReadString()
-			case "last-delivered-id":
-				group.LastDeliveredID, err = rd.ReadString()
-			case "pel-count":
-				group.PelCount, err = rd.ReadIntReply()
-			case "pending":
-				group.Pending, err = readXInfoStreamGroupPending(rd)
-			case "consumers":
-				group.Consumers, err = readXInfoStreamConsumers(rd)
-			default:
-				return nil, fmt.Errorf("redis: unexpected content %s "+
-					"in XINFO STREAM reply", key)
-			}
-
-			if err != nil {
-				return nil, err
-			}
-		}
-
-		groups = append(groups, group)
-	}
-
-	return groups, nil
-}
-
-func readXInfoStreamGroupPending(rd *proto.Reader) ([]XInfoStreamGroupPending, error) {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return nil, err
-	}
-
-	pending := make([]XInfoStreamGroupPending, 0, n)
-
-	for i := 0; i < n; i++ {
-		nn, err := rd.ReadArrayLen()
-		if err != nil {
-			return nil, err
-		}
-		if nn != 4 {
-			return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM FULL reply,"+
-				"wanted 4", nn)
-		}
-
-		p := XInfoStreamGroupPending{}
-
-		p.ID, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		p.Consumer, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		delivery, err := rd.ReadIntReply()
-		if err != nil {
-			return nil, err
-		}
-		p.DeliveryTime = time.Unix(delivery/1000, delivery%1000*int64(time.Millisecond))
-
-		p.DeliveryCount, err = rd.ReadIntReply()
-		if err != nil {
-			return nil, err
-		}
-
-		pending = append(pending, p)
-	}
-
-	return pending, nil
-}
-
-func readXInfoStreamConsumers(rd *proto.Reader) ([]XInfoStreamConsumer, error) {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return nil, err
-	}
-
-	consumers := make([]XInfoStreamConsumer, 0, n)
-
-	for i := 0; i < n; i++ {
-		nn, err := rd.ReadArrayLen()
-		if err != nil {
-			return nil, err
-		}
-		if nn != 8 {
-			return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM FULL reply,"+
-				"wanted 8", nn)
-		}
-
-		c := XInfoStreamConsumer{}
-
-		for f := 0; f < 4; f++ {
-			cKey, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			switch cKey {
-			case "name":
-				c.Name, err = rd.ReadString()
-			case "seen-time":
-				seen, err := rd.ReadIntReply()
-				if err != nil {
-					return nil, err
-				}
-				c.SeenTime = time.Unix(seen/1000, seen%1000*int64(time.Millisecond))
-			case "pel-count":
-				c.PelCount, err = rd.ReadIntReply()
-			case "pending":
-				pendingNumber, err := rd.ReadArrayLen()
-				if err != nil {
-					return nil, err
-				}
-
-				c.Pending = make([]XInfoStreamConsumerPending, 0, pendingNumber)
-
-				for pn := 0; pn < pendingNumber; pn++ {
-					nn, err := rd.ReadArrayLen()
-					if err != nil {
-						return nil, err
-					}
-					if nn != 3 {
-						return nil, fmt.Errorf("redis: got %d elements in XINFO STREAM reply,"+
-							"wanted 3", nn)
-					}
-
-					p := XInfoStreamConsumerPending{}
-
-					p.ID, err = rd.ReadString()
-					if err != nil {
-						return nil, err
-					}
-
-					delivery, err := rd.ReadIntReply()
-					if err != nil {
-						return nil, err
-					}
-					p.DeliveryTime = time.Unix(delivery/1000, delivery%1000*int64(time.Millisecond))
-
-					p.DeliveryCount, err = rd.ReadIntReply()
-					if err != nil {
-						return nil, err
-					}
-
-					c.Pending = append(c.Pending, p)
-				}
-			default:
-				return nil, fmt.Errorf("redis: unexpected content %s "+
-					"in XINFO STREAM reply", cKey)
-			}
-			if err != nil {
-				return nil, err
-			}
-		}
-		consumers = append(consumers, c)
-	}
-
-	return consumers, nil
-}
-
-//------------------------------------------------------------------------------
-
-type ZSliceCmd struct {
-	baseCmd
-
-	val []Z
-}
-
-var _ Cmder = (*ZSliceCmd)(nil)
-
-func NewZSliceCmd(ctx context.Context, args ...interface{}) *ZSliceCmd {
-	return &ZSliceCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *ZSliceCmd) SetVal(val []Z) {
-	cmd.val = val
-}
-
-func (cmd *ZSliceCmd) Val() []Z {
-	return cmd.val
-}
-
-func (cmd *ZSliceCmd) Result() ([]Z, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *ZSliceCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *ZSliceCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]Z, n/2)
-		for i := 0; i < len(cmd.val); i++ {
-			member, err := rd.ReadString()
-			if err != nil {
-				return nil, err
-			}
-
-			score, err := rd.ReadFloatReply()
-			if err != nil {
-				return nil, err
-			}
-
-			cmd.val[i] = Z{
-				Member: member,
-				Score:  score,
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type ZWithKeyCmd struct {
-	baseCmd
-
-	val *ZWithKey
-}
-
-var _ Cmder = (*ZWithKeyCmd)(nil)
-
-func NewZWithKeyCmd(ctx context.Context, args ...interface{}) *ZWithKeyCmd {
-	return &ZWithKeyCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *ZWithKeyCmd) SetVal(val *ZWithKey) {
-	cmd.val = val
-}
-
-func (cmd *ZWithKeyCmd) Val() *ZWithKey {
-	return cmd.val
-}
-
-func (cmd *ZWithKeyCmd) Result() (*ZWithKey, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *ZWithKeyCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *ZWithKeyCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		if n != 3 {
-			return nil, fmt.Errorf("got %d elements, expected 3", n)
-		}
-
-		cmd.val = &ZWithKey{}
-		var err error
-
-		cmd.val.Key, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		cmd.val.Member, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-
-		cmd.val.Score, err = rd.ReadFloatReply()
-		if err != nil {
-			return nil, err
-		}
-
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type ScanCmd struct {
-	baseCmd
-
-	page   []string
-	cursor uint64
-
-	process cmdable
-}
-
-var _ Cmder = (*ScanCmd)(nil)
-
-func NewScanCmd(ctx context.Context, process cmdable, args ...interface{}) *ScanCmd {
-	return &ScanCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-		process: process,
-	}
-}
-
-func (cmd *ScanCmd) SetVal(page []string, cursor uint64) {
-	cmd.page = page
-	cmd.cursor = cursor
-}
-
-func (cmd *ScanCmd) Val() (keys []string, cursor uint64) {
-	return cmd.page, cmd.cursor
-}
-
-func (cmd *ScanCmd) Result() (keys []string, cursor uint64, err error) {
-	return cmd.page, cmd.cursor, cmd.err
-}
-
-func (cmd *ScanCmd) String() string {
-	return cmdString(cmd, cmd.page)
-}
-
-func (cmd *ScanCmd) readReply(rd *proto.Reader) (err error) {
-	cmd.page, cmd.cursor, err = rd.ReadScanReply()
-	return err
-}
-
-// Iterator creates a new ScanIterator.
-func (cmd *ScanCmd) Iterator() *ScanIterator {
-	return &ScanIterator{
-		cmd: cmd,
-	}
-}
-
-//------------------------------------------------------------------------------
-
-type ClusterNode struct {
-	ID   string
-	Addr string
-}
-
-type ClusterSlot struct {
-	Start int
-	End   int
-	Nodes []ClusterNode
-}
-
-type ClusterSlotsCmd struct {
-	baseCmd
-
-	val []ClusterSlot
-}
-
-var _ Cmder = (*ClusterSlotsCmd)(nil)
-
-func NewClusterSlotsCmd(ctx context.Context, args ...interface{}) *ClusterSlotsCmd {
-	return &ClusterSlotsCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *ClusterSlotsCmd) SetVal(val []ClusterSlot) {
-	cmd.val = val
-}
-
-func (cmd *ClusterSlotsCmd) Val() []ClusterSlot {
-	return cmd.val
-}
-
-func (cmd *ClusterSlotsCmd) Result() ([]ClusterSlot, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *ClusterSlotsCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *ClusterSlotsCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]ClusterSlot, n)
-		for i := 0; i < len(cmd.val); i++ {
-			n, err := rd.ReadArrayLen()
-			if err != nil {
-				return nil, err
-			}
-			if n < 2 {
-				err := fmt.Errorf("redis: got %d elements in cluster info, expected at least 2", n)
-				return nil, err
-			}
-
-			start, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-
-			end, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-
-			nodes := make([]ClusterNode, n-2)
-			for j := 0; j < len(nodes); j++ {
-				n, err := rd.ReadArrayLen()
-				if err != nil {
-					return nil, err
-				}
-				if n != 2 && n != 3 {
-					err := fmt.Errorf("got %d elements in cluster info address, expected 2 or 3", n)
-					return nil, err
-				}
-
-				ip, err := rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-
-				port, err := rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-
-				nodes[j].Addr = net.JoinHostPort(ip, port)
-
-				if n == 3 {
-					id, err := rd.ReadString()
-					if err != nil {
-						return nil, err
-					}
-					nodes[j].ID = id
-				}
-			}
-
-			cmd.val[i] = ClusterSlot{
-				Start: int(start),
-				End:   int(end),
-				Nodes: nodes,
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-// GeoLocation is used with GeoAdd to add geospatial location.
-type GeoLocation struct {
-	Name                      string
-	Longitude, Latitude, Dist float64
-	GeoHash                   int64
-}
-
-// GeoRadiusQuery is used with GeoRadius to query geospatial index.
-type GeoRadiusQuery struct {
-	Radius float64
-	// Can be m, km, ft, or mi. Default is km.
-	Unit        string
-	WithCoord   bool
-	WithDist    bool
-	WithGeoHash bool
-	Count       int
-	// Can be ASC or DESC. Default is no sort order.
-	Sort      string
-	Store     string
-	StoreDist string
-}
-
-type GeoLocationCmd struct {
-	baseCmd
-
-	q         *GeoRadiusQuery
-	locations []GeoLocation
-}
-
-var _ Cmder = (*GeoLocationCmd)(nil)
-
-func NewGeoLocationCmd(ctx context.Context, q *GeoRadiusQuery, args ...interface{}) *GeoLocationCmd {
-	return &GeoLocationCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: geoLocationArgs(q, args...),
-		},
-		q: q,
-	}
-}
-
-func geoLocationArgs(q *GeoRadiusQuery, args ...interface{}) []interface{} {
-	args = append(args, q.Radius)
-	if q.Unit != "" {
-		args = append(args, q.Unit)
-	} else {
-		args = append(args, "km")
-	}
-	if q.WithCoord {
-		args = append(args, "withcoord")
-	}
-	if q.WithDist {
-		args = append(args, "withdist")
-	}
-	if q.WithGeoHash {
-		args = append(args, "withhash")
-	}
-	if q.Count > 0 {
-		args = append(args, "count", q.Count)
-	}
-	if q.Sort != "" {
-		args = append(args, q.Sort)
-	}
-	if q.Store != "" {
-		args = append(args, "store")
-		args = append(args, q.Store)
-	}
-	if q.StoreDist != "" {
-		args = append(args, "storedist")
-		args = append(args, q.StoreDist)
-	}
-	return args
-}
-
-func (cmd *GeoLocationCmd) SetVal(locations []GeoLocation) {
-	cmd.locations = locations
-}
-
-func (cmd *GeoLocationCmd) Val() []GeoLocation {
-	return cmd.locations
-}
-
-func (cmd *GeoLocationCmd) Result() ([]GeoLocation, error) {
-	return cmd.locations, cmd.err
-}
-
-func (cmd *GeoLocationCmd) String() string {
-	return cmdString(cmd, cmd.locations)
-}
-
-func (cmd *GeoLocationCmd) readReply(rd *proto.Reader) error {
-	v, err := rd.ReadArrayReply(newGeoLocationSliceParser(cmd.q))
-	if err != nil {
-		return err
-	}
-	cmd.locations = v.([]GeoLocation)
-	return nil
-}
-
-func newGeoLocationSliceParser(q *GeoRadiusQuery) proto.MultiBulkParse {
-	return func(rd *proto.Reader, n int64) (interface{}, error) {
-		locs := make([]GeoLocation, 0, n)
-		for i := int64(0); i < n; i++ {
-			v, err := rd.ReadReply(newGeoLocationParser(q))
-			if err != nil {
-				return nil, err
-			}
-			switch vv := v.(type) {
-			case string:
-				locs = append(locs, GeoLocation{
-					Name: vv,
-				})
-			case *GeoLocation:
-				// TODO: avoid copying
-				locs = append(locs, *vv)
-			default:
-				return nil, fmt.Errorf("got %T, expected string or *GeoLocation", v)
-			}
-		}
-		return locs, nil
-	}
-}
-
-func newGeoLocationParser(q *GeoRadiusQuery) proto.MultiBulkParse {
-	return func(rd *proto.Reader, n int64) (interface{}, error) {
-		var loc GeoLocation
-		var err error
-
-		loc.Name, err = rd.ReadString()
-		if err != nil {
-			return nil, err
-		}
-		if q.WithDist {
-			loc.Dist, err = rd.ReadFloatReply()
-			if err != nil {
-				return nil, err
-			}
-		}
-		if q.WithGeoHash {
-			loc.GeoHash, err = rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-		}
-		if q.WithCoord {
-			n, err := rd.ReadArrayLen()
-			if err != nil {
-				return nil, err
-			}
-			if n != 2 {
-				return nil, fmt.Errorf("got %d coordinates, expected 2", n)
-			}
-
-			loc.Longitude, err = rd.ReadFloatReply()
-			if err != nil {
-				return nil, err
-			}
-			loc.Latitude, err = rd.ReadFloatReply()
-			if err != nil {
-				return nil, err
-			}
-		}
-
-		return &loc, nil
-	}
-}
-
-//------------------------------------------------------------------------------
-
-// GeoSearchQuery is used for GEOSearch/GEOSearchStore command query.
-type GeoSearchQuery struct {
-	Member string
-
-	// Latitude and Longitude when using FromLonLat option.
-	Longitude float64
-	Latitude  float64
-
-	// Distance and unit when using ByRadius option.
-	// Can use m, km, ft, or mi. Default is km.
-	Radius     float64
-	RadiusUnit string
-
-	// Height, width and unit when using ByBox option.
-	// Can be m, km, ft, or mi. Default is km.
-	BoxWidth  float64
-	BoxHeight float64
-	BoxUnit   string
-
-	// Can be ASC or DESC. Default is no sort order.
-	Sort     string
-	Count    int
-	CountAny bool
-}
-
-type GeoSearchLocationQuery struct {
-	GeoSearchQuery
-
-	WithCoord bool
-	WithDist  bool
-	WithHash  bool
-}
-
-type GeoSearchStoreQuery struct {
-	GeoSearchQuery
-
-	// When using the StoreDist option, the command stores the items in a
-	// sorted set populated with their distance from the center of the circle or box,
-	// as a floating-point number, in the same unit specified for that shape.
-	StoreDist bool
-}
-
-func geoSearchLocationArgs(q *GeoSearchLocationQuery, args []interface{}) []interface{} {
-	args = geoSearchArgs(&q.GeoSearchQuery, args)
-
-	if q.WithCoord {
-		args = append(args, "withcoord")
-	}
-	if q.WithDist {
-		args = append(args, "withdist")
-	}
-	if q.WithHash {
-		args = append(args, "withhash")
-	}
-
-	return args
-}
-
-func geoSearchArgs(q *GeoSearchQuery, args []interface{}) []interface{} {
-	if q.Member != "" {
-		args = append(args, "frommember", q.Member)
-	} else {
-		args = append(args, "fromlonlat", q.Longitude, q.Latitude)
-	}
-
-	if q.Radius > 0 {
-		if q.RadiusUnit == "" {
-			q.RadiusUnit = "km"
-		}
-		args = append(args, "byradius", q.Radius, q.RadiusUnit)
-	} else {
-		if q.BoxUnit == "" {
-			q.BoxUnit = "km"
-		}
-		args = append(args, "bybox", q.BoxWidth, q.BoxHeight, q.BoxUnit)
-	}
-
-	if q.Sort != "" {
-		args = append(args, q.Sort)
-	}
-
-	if q.Count > 0 {
-		args = append(args, "count", q.Count)
-		if q.CountAny {
-			args = append(args, "any")
-		}
-	}
-
-	return args
-}
-
-type GeoSearchLocationCmd struct {
-	baseCmd
-
-	opt *GeoSearchLocationQuery
-	val []GeoLocation
-}
-
-var _ Cmder = (*GeoSearchLocationCmd)(nil)
-
-func NewGeoSearchLocationCmd(
-	ctx context.Context, opt *GeoSearchLocationQuery, args ...interface{},
-) *GeoSearchLocationCmd {
-	return &GeoSearchLocationCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-		opt: opt,
-	}
-}
-
-func (cmd *GeoSearchLocationCmd) SetVal(val []GeoLocation) {
-	cmd.val = val
-}
-
-func (cmd *GeoSearchLocationCmd) Val() []GeoLocation {
-	return cmd.val
-}
-
-func (cmd *GeoSearchLocationCmd) Result() ([]GeoLocation, error) {
-	return cmd.val, cmd.err
-}
-
-func (cmd *GeoSearchLocationCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *GeoSearchLocationCmd) readReply(rd *proto.Reader) error {
-	n, err := rd.ReadArrayLen()
-	if err != nil {
-		return err
-	}
-
-	cmd.val = make([]GeoLocation, n)
-	for i := 0; i < n; i++ {
-		_, err = rd.ReadArrayLen()
-		if err != nil {
-			return err
-		}
-
-		var loc GeoLocation
-
-		loc.Name, err = rd.ReadString()
-		if err != nil {
-			return err
-		}
-		if cmd.opt.WithDist {
-			loc.Dist, err = rd.ReadFloatReply()
-			if err != nil {
-				return err
-			}
-		}
-		if cmd.opt.WithHash {
-			loc.GeoHash, err = rd.ReadIntReply()
-			if err != nil {
-				return err
-			}
-		}
-		if cmd.opt.WithCoord {
-			nn, err := rd.ReadArrayLen()
-			if err != nil {
-				return err
-			}
-			if nn != 2 {
-				return fmt.Errorf("got %d coordinates, expected 2", nn)
-			}
-
-			loc.Longitude, err = rd.ReadFloatReply()
-			if err != nil {
-				return err
-			}
-			loc.Latitude, err = rd.ReadFloatReply()
-			if err != nil {
-				return err
-			}
-		}
-
-		cmd.val[i] = loc
-	}
-
-	return nil
-}
-
-//------------------------------------------------------------------------------
-
-type GeoPos struct {
-	Longitude, Latitude float64
-}
-
-type GeoPosCmd struct {
-	baseCmd
-
-	val []*GeoPos
-}
-
-var _ Cmder = (*GeoPosCmd)(nil)
-
-func NewGeoPosCmd(ctx context.Context, args ...interface{}) *GeoPosCmd {
-	return &GeoPosCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *GeoPosCmd) SetVal(val []*GeoPos) {
-	cmd.val = val
-}
-
-func (cmd *GeoPosCmd) Val() []*GeoPos {
-	return cmd.val
-}
-
-func (cmd *GeoPosCmd) Result() ([]*GeoPos, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *GeoPosCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *GeoPosCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]*GeoPos, n)
-		for i := 0; i < len(cmd.val); i++ {
-			i := i
-			_, err := rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-				longitude, err := rd.ReadFloatReply()
-				if err != nil {
-					return nil, err
-				}
-
-				latitude, err := rd.ReadFloatReply()
-				if err != nil {
-					return nil, err
-				}
-
-				cmd.val[i] = &GeoPos{
-					Longitude: longitude,
-					Latitude:  latitude,
-				}
-				return nil, nil
-			})
-			if err != nil {
-				if err == Nil {
-					cmd.val[i] = nil
-					continue
-				}
-				return nil, err
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
-
-//------------------------------------------------------------------------------
-
-type CommandInfo struct {
-	Name        string
-	Arity       int8
-	Flags       []string
-	ACLFlags    []string
-	FirstKeyPos int8
-	LastKeyPos  int8
-	StepCount   int8
-	ReadOnly    bool
-}
-
-type CommandsInfoCmd struct {
-	baseCmd
-
-	val map[string]*CommandInfo
-}
-
-var _ Cmder = (*CommandsInfoCmd)(nil)
-
-func NewCommandsInfoCmd(ctx context.Context, args ...interface{}) *CommandsInfoCmd {
-	return &CommandsInfoCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *CommandsInfoCmd) SetVal(val map[string]*CommandInfo) {
-	cmd.val = val
-}
-
-func (cmd *CommandsInfoCmd) Val() map[string]*CommandInfo {
-	return cmd.val
-}
-
-func (cmd *CommandsInfoCmd) Result() (map[string]*CommandInfo, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *CommandsInfoCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *CommandsInfoCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make(map[string]*CommandInfo, n)
-		for i := int64(0); i < n; i++ {
-			v, err := rd.ReadReply(commandInfoParser)
-			if err != nil {
-				return nil, err
-			}
-			vv := v.(*CommandInfo)
-			cmd.val[vv.Name] = vv
-		}
-		return nil, nil
-	})
-	return err
-}
-
-func commandInfoParser(rd *proto.Reader, n int64) (interface{}, error) {
-	const numArgRedis5 = 6
-	const numArgRedis6 = 7
-
-	switch n {
-	case numArgRedis5, numArgRedis6:
-		// continue
-	default:
-		return nil, fmt.Errorf("redis: got %d elements in COMMAND reply, wanted 7", n)
-	}
-
-	var cmd CommandInfo
-	var err error
-
-	cmd.Name, err = rd.ReadString()
-	if err != nil {
-		return nil, err
-	}
-
-	arity, err := rd.ReadIntReply()
-	if err != nil {
-		return nil, err
-	}
-	cmd.Arity = int8(arity)
-
-	_, err = rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.Flags = make([]string, n)
-		for i := 0; i < len(cmd.Flags); i++ {
-			switch s, err := rd.ReadString(); {
-			case err == Nil:
-				cmd.Flags[i] = ""
-			case err != nil:
-				return nil, err
-			default:
-				cmd.Flags[i] = s
-			}
-		}
-		return nil, nil
-	})
-	if err != nil {
-		return nil, err
-	}
-
-	firstKeyPos, err := rd.ReadIntReply()
-	if err != nil {
-		return nil, err
-	}
-	cmd.FirstKeyPos = int8(firstKeyPos)
-
-	lastKeyPos, err := rd.ReadIntReply()
-	if err != nil {
-		return nil, err
-	}
-	cmd.LastKeyPos = int8(lastKeyPos)
-
-	stepCount, err := rd.ReadIntReply()
-	if err != nil {
-		return nil, err
-	}
-	cmd.StepCount = int8(stepCount)
-
-	for _, flag := range cmd.Flags {
-		if flag == "readonly" {
-			cmd.ReadOnly = true
-			break
-		}
-	}
-
-	if n == numArgRedis5 {
-		return &cmd, nil
-	}
-
-	_, err = rd.ReadReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.ACLFlags = make([]string, n)
-		for i := 0; i < len(cmd.ACLFlags); i++ {
-			switch s, err := rd.ReadString(); {
-			case err == Nil:
-				cmd.ACLFlags[i] = ""
-			case err != nil:
-				return nil, err
-			default:
-				cmd.ACLFlags[i] = s
-			}
-		}
-		return nil, nil
-	})
-	if err != nil {
-		return nil, err
-	}
-
-	return &cmd, nil
-}
-
-//------------------------------------------------------------------------------
-
-type cmdsInfoCache struct {
-	fn func(ctx context.Context) (map[string]*CommandInfo, error)
-
-	once internal.Once
-	cmds map[string]*CommandInfo
-}
-
-func newCmdsInfoCache(fn func(ctx context.Context) (map[string]*CommandInfo, error)) *cmdsInfoCache {
-	return &cmdsInfoCache{
-		fn: fn,
-	}
-}
-
-func (c *cmdsInfoCache) Get(ctx context.Context) (map[string]*CommandInfo, error) {
-	err := c.once.Do(func() error {
-		cmds, err := c.fn(ctx)
-		if err != nil {
-			return err
-		}
-
-		// Extensions have cmd names in upper case. Convert them to lower case.
-		for k, v := range cmds {
-			lower := internal.ToLower(k)
-			if lower != k {
-				cmds[lower] = v
-			}
-		}
-
-		c.cmds = cmds
-		return nil
-	})
-	return c.cmds, err
-}
-
-//------------------------------------------------------------------------------
-
-type SlowLog struct {
-	ID       int64
-	Time     time.Time
-	Duration time.Duration
-	Args     []string
-	// These are also optional fields emitted only by Redis 4.0 or greater:
-	// https://redis.io/commands/slowlog#output-format
-	ClientAddr string
-	ClientName string
-}
-
-type SlowLogCmd struct {
-	baseCmd
-
-	val []SlowLog
-}
-
-var _ Cmder = (*SlowLogCmd)(nil)
-
-func NewSlowLogCmd(ctx context.Context, args ...interface{}) *SlowLogCmd {
-	return &SlowLogCmd{
-		baseCmd: baseCmd{
-			ctx:  ctx,
-			args: args,
-		},
-	}
-}
-
-func (cmd *SlowLogCmd) SetVal(val []SlowLog) {
-	cmd.val = val
-}
-
-func (cmd *SlowLogCmd) Val() []SlowLog {
-	return cmd.val
-}
-
-func (cmd *SlowLogCmd) Result() ([]SlowLog, error) {
-	return cmd.Val(), cmd.Err()
-}
-
-func (cmd *SlowLogCmd) String() string {
-	return cmdString(cmd, cmd.val)
-}
-
-func (cmd *SlowLogCmd) readReply(rd *proto.Reader) error {
-	_, err := rd.ReadArrayReply(func(rd *proto.Reader, n int64) (interface{}, error) {
-		cmd.val = make([]SlowLog, n)
-		for i := 0; i < len(cmd.val); i++ {
-			n, err := rd.ReadArrayLen()
-			if err != nil {
-				return nil, err
-			}
-			if n < 4 {
-				err := fmt.Errorf("redis: got %d elements in slowlog get, expected at least 4", n)
-				return nil, err
-			}
-
-			id, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-
-			createdAt, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-			createdAtTime := time.Unix(createdAt, 0)
-
-			costs, err := rd.ReadIntReply()
-			if err != nil {
-				return nil, err
-			}
-			costsDuration := time.Duration(costs) * time.Microsecond
-
-			cmdLen, err := rd.ReadArrayLen()
-			if err != nil {
-				return nil, err
-			}
-			if cmdLen < 1 {
-				err := fmt.Errorf("redis: got %d elements commands reply in slowlog get, expected at least 1", cmdLen)
-				return nil, err
-			}
-
-			cmdString := make([]string, cmdLen)
-			for i := 0; i < cmdLen; i++ {
-				cmdString[i], err = rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-			}
-
-			var address, name string
-			for i := 4; i < n; i++ {
-				str, err := rd.ReadString()
-				if err != nil {
-					return nil, err
-				}
-				if i == 4 {
-					address = str
-				} else if i == 5 {
-					name = str
-				}
-			}
-
-			cmd.val[i] = SlowLog{
-				ID:         id,
-				Time:       createdAtTime,
-				Duration:   costsDuration,
-				Args:       cmdString,
-				ClientAddr: address,
-				ClientName: name,
-			}
-		}
-		return nil, nil
-	})
-	return err
-}
diff --git a/vendor/github.com/go-redis/redis/v8/commands.go b/vendor/github.com/go-redis/redis/v8/commands.go
deleted file mode 100644
index bbfe089df166..000000000000
--- a/vendor/github.com/go-redis/redis/v8/commands.go
+++ /dev/null
@@ -1,3475 +0,0 @@
-package redis
-
-import (
-	"context"
-	"errors"
-	"io"
-	"time"
-
-	"github.com/go-redis/redis/v8/internal"
-)
-
-// KeepTTL is a Redis KEEPTTL option to keep existing TTL, it requires your redis-server version >= 6.0,
-// otherwise you will receive an error: (error) ERR syntax error.
-// For example:
-//
-//    rdb.Set(ctx, key, value, redis.KeepTTL)
-const KeepTTL = -1
-
-func usePrecise(dur time.Duration) bool {
-	return dur < time.Second || dur%time.Second != 0
-}
-
-func formatMs(ctx context.Context, dur time.Duration) int64 {
-	if dur > 0 && dur < time.Millisecond {
-		internal.Logger.Printf(
-			ctx,
-			"specified duration is %s, but minimal supported value is %s - truncating to 1ms",
-			dur, time.Millisecond,
-		)
-		return 1
-	}
-	return int64(dur / time.Millisecond)
-}
-
-func formatSec(ctx context.Context, dur time.Duration) int64 {
-	if dur > 0 && dur < time.Second {
-		internal.Logger.Printf(
-			ctx,
-			"specified duration is %s, but minimal supported value is %s - truncating to 1s",
-			dur, time.Second,
-		)
-		return 1
-	}
-	return int64(dur / time.Second)
-}
-
-func appendArgs(dst, src []interface{}) []interface{} {
-	if len(src) == 1 {
-		return appendArg(dst, src[0])
-	}
-
-	dst = append(dst, src...)
-	return dst
-}
-
-func appendArg(dst []interface{}, arg interface{}) []interface{} {
-	switch arg := arg.(type) {
-	case []string:
-		for _, s := range arg {
-			dst = append(dst, s)
-		}
-		return dst
-	case []interface{}:
-		dst = append(dst, arg...)
-		return dst
-	case map[string]interface{}:
-		for k, v := range arg {
-			dst = append(dst, k, v)
-		}
-		return dst
-	case map[string]string:
-		for k, v := range arg {
-			dst = append(dst, k, v)
-		}
-		return dst
-	default:
-		return append(dst, arg)
-	}
-}
-
-type Cmdable interface {
-	Pipeline() Pipeliner
-	Pipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error)
-
-	TxPipelined(ctx context.Context, fn func(Pipeliner) error) ([]Cmder, error)
-	TxPipeline() Pipeliner
-
-	Command(ctx context.Context) *CommandsInfoCmd
-	ClientGetName(ctx context.Context) *StringCmd
-	Echo(ctx context.Context, message interface{}) *StringCmd
-	Ping(ctx context.Context) *StatusCmd
-	Quit(ctx context.Context) *StatusCmd
-	Del(ctx context.Context, keys ...string) *IntCmd
-	Unlink(ctx context.Context, keys ...string) *IntCmd
-	Dump(ctx context.Context, key string) *StringCmd
-	Exists(ctx context.Context, keys ...string) *IntCmd
-	Expire(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	ExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd
-	ExpireNX(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	ExpireXX(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	ExpireGT(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	ExpireLT(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	Keys(ctx context.Context, pattern string) *StringSliceCmd
-	Migrate(ctx context.Context, host, port, key string, db int, timeout time.Duration) *StatusCmd
-	Move(ctx context.Context, key string, db int) *BoolCmd
-	ObjectRefCount(ctx context.Context, key string) *IntCmd
-	ObjectEncoding(ctx context.Context, key string) *StringCmd
-	ObjectIdleTime(ctx context.Context, key string) *DurationCmd
-	Persist(ctx context.Context, key string) *BoolCmd
-	PExpire(ctx context.Context, key string, expiration time.Duration) *BoolCmd
-	PExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd
-	PTTL(ctx context.Context, key string) *DurationCmd
-	RandomKey(ctx context.Context) *StringCmd
-	Rename(ctx context.Context, key, newkey string) *StatusCmd
-	RenameNX(ctx context.Context, key, newkey string) *BoolCmd
-	Restore(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd
-	RestoreReplace(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd
-	Sort(ctx context.Context, key string, sort *Sort) *StringSliceCmd
-	SortStore(ctx context.Context, key, store string, sort *Sort) *IntCmd
-	SortInterfaces(ctx context.Context, key string, sort *Sort) *SliceCmd
-	Touch(ctx context.Context, keys ...string) *IntCmd
-	TTL(ctx context.Context, key string) *DurationCmd
-	Type(ctx context.Context, key string) *StatusCmd
-	Append(ctx context.Context, key, value string) *IntCmd
-	Decr(ctx context.Context, key string) *IntCmd
-	DecrBy(ctx context.Context, key string, decrement int64) *IntCmd
-	Get(ctx context.Context, key string) *StringCmd
-	GetRange(ctx context.Context, key string, start, end int64) *StringCmd
-	GetSet(ctx context.Context, key string, value interface{}) *StringCmd
-	GetEx(ctx context.Context, key string, expiration time.Duration) *StringCmd
-	GetDel(ctx context.Context, key string) *StringCmd
-	Incr(ctx context.Context, key string) *IntCmd
-	IncrBy(ctx context.Context, key string, value int64) *IntCmd
-	IncrByFloat(ctx context.Context, key string, value float64) *FloatCmd
-	MGet(ctx context.Context, keys ...string) *SliceCmd
-	MSet(ctx context.Context, values ...interface{}) *StatusCmd
-	MSetNX(ctx context.Context, values ...interface{}) *BoolCmd
-	Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd
-	SetArgs(ctx context.Context, key string, value interface{}, a SetArgs) *StatusCmd
-	// TODO: rename to SetEx
-	SetEX(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd
-	SetNX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd
-	SetXX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd
-	SetRange(ctx context.Context, key string, offset int64, value string) *IntCmd
-	StrLen(ctx context.Context, key string) *IntCmd
-	Copy(ctx context.Context, sourceKey string, destKey string, db int, replace bool) *IntCmd
-
-	GetBit(ctx context.Context, key string, offset int64) *IntCmd
-	SetBit(ctx context.Context, key string, offset int64, value int) *IntCmd
-	BitCount(ctx context.Context, key string, bitCount *BitCount) *IntCmd
-	BitOpAnd(ctx context.Context, destKey string, keys ...string) *IntCmd
-	BitOpOr(ctx context.Context, destKey string, keys ...string) *IntCmd
-	BitOpXor(ctx context.Context, destKey string, keys ...string) *IntCmd
-	BitOpNot(ctx context.Context, destKey string, key string) *IntCmd
-	BitPos(ctx context.Context, key string, bit int64, pos ...int64) *IntCmd
-	BitField(ctx context.Context, key string, args ...interface{}) *IntSliceCmd
-
-	Scan(ctx context.Context, cursor uint64, match string, count int64) *ScanCmd
-	ScanType(ctx context.Context, cursor uint64, match string, count int64, keyType string) *ScanCmd
-	SScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd
-	HScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd
-	ZScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd
-
-	HDel(ctx context.Context, key string, fields ...string) *IntCmd
-	HExists(ctx context.Context, key, field string) *BoolCmd
-	HGet(ctx context.Context, key, field string) *StringCmd
-	HGetAll(ctx context.Context, key string) *StringStringMapCmd
-	HIncrBy(ctx context.Context, key, field string, incr int64) *IntCmd
-	HIncrByFloat(ctx context.Context, key, field string, incr float64) *FloatCmd
-	HKeys(ctx context.Context, key string) *StringSliceCmd
-	HLen(ctx context.Context, key string) *IntCmd
-	HMGet(ctx context.Context, key string, fields ...string) *SliceCmd
-	HSet(ctx context.Context, key string, values ...interface{}) *IntCmd
-	HMSet(ctx context.Context, key string, values ...interface{}) *BoolCmd
-	HSetNX(ctx context.Context, key, field string, value interface{}) *BoolCmd
-	HVals(ctx context.Context, key string) *StringSliceCmd
-	HRandField(ctx context.Context, key string, count int, withValues bool) *StringSliceCmd
-
-	BLPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd
-	BRPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd
-	BRPopLPush(ctx context.Context, source, destination string, timeout time.Duration) *StringCmd
-	LIndex(ctx context.Context, key string, index int64) *StringCmd
-	LInsert(ctx context.Context, key, op string, pivot, value interface{}) *IntCmd
-	LInsertBefore(ctx context.Context, key string, pivot, value interface{}) *IntCmd
-	LInsertAfter(ctx context.Context, key string, pivot, value interface{}) *IntCmd
-	LLen(ctx context.Context, key string) *IntCmd
-	LPop(ctx context.Context, key string) *StringCmd
-	LPopCount(ctx context.Context, key string, count int) *StringSliceCmd
-	LPos(ctx context.Context, key string, value string, args LPosArgs) *IntCmd
-	LPosCount(ctx context.Context, key string, value string, count int64, args LPosArgs) *IntSliceCmd
-	LPush(ctx context.Context, key string, values ...interface{}) *IntCmd
-	LPushX(ctx context.Context, key string, values ...interface{}) *IntCmd
-	LRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd
-	LRem(ctx context.Context, key string, count int64, value interface{}) *IntCmd
-	LSet(ctx context.Context, key string, index int64, value interface{}) *StatusCmd
-	LTrim(ctx context.Context, key string, start, stop int64) *StatusCmd
-	RPop(ctx context.Context, key string) *StringCmd
-	RPopCount(ctx context.Context, key string, count int) *StringSliceCmd
-	RPopLPush(ctx context.Context, source, destination string) *StringCmd
-	RPush(ctx context.Context, key string, values ...interface{}) *IntCmd
-	RPushX(ctx context.Context, key string, values ...interface{}) *IntCmd
-	LMove(ctx context.Context, source, destination, srcpos, destpos string) *StringCmd
-	BLMove(ctx context.Context, source, destination, srcpos, destpos string, timeout time.Duration) *StringCmd
-
-	SAdd(ctx context.Context, key string, members ...interface{}) *IntCmd
-	SCard(ctx context.Context, key string) *IntCmd
-	SDiff(ctx context.Context, keys ...string) *StringSliceCmd
-	SDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd
-	SInter(ctx context.Context, keys ...string) *StringSliceCmd
-	SInterStore(ctx context.Context, destination string, keys ...string) *IntCmd
-	SIsMember(ctx context.Context, key string, member interface{}) *BoolCmd
-	SMIsMember(ctx context.Context, key string, members ...interface{}) *BoolSliceCmd
-	SMembers(ctx context.Context, key string) *StringSliceCmd
-	SMembersMap(ctx context.Context, key string) *StringStructMapCmd
-	SMove(ctx context.Context, source, destination string, member interface{}) *BoolCmd
-	SPop(ctx context.Context, key string) *StringCmd
-	SPopN(ctx context.Context, key string, count int64) *StringSliceCmd
-	SRandMember(ctx context.Context, key string) *StringCmd
-	SRandMemberN(ctx context.Context, key string, count int64) *StringSliceCmd
-	SRem(ctx context.Context, key string, members ...interface{}) *IntCmd
-	SUnion(ctx context.Context, keys ...string) *StringSliceCmd
-	SUnionStore(ctx context.Context, destination string, keys ...string) *IntCmd
-
-	XAdd(ctx context.Context, a *XAddArgs) *StringCmd
-	XDel(ctx context.Context, stream string, ids ...string) *IntCmd
-	XLen(ctx context.Context, stream string) *IntCmd
-	XRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd
-	XRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd
-	XRevRange(ctx context.Context, stream string, start, stop string) *XMessageSliceCmd
-	XRevRangeN(ctx context.Context, stream string, start, stop string, count int64) *XMessageSliceCmd
-	XRead(ctx context.Context, a *XReadArgs) *XStreamSliceCmd
-	XReadStreams(ctx context.Context, streams ...string) *XStreamSliceCmd
-	XGroupCreate(ctx context.Context, stream, group, start string) *StatusCmd
-	XGroupCreateMkStream(ctx context.Context, stream, group, start string) *StatusCmd
-	XGroupSetID(ctx context.Context, stream, group, start string) *StatusCmd
-	XGroupDestroy(ctx context.Context, stream, group string) *IntCmd
-	XGroupCreateConsumer(ctx context.Context, stream, group, consumer string) *IntCmd
-	XGroupDelConsumer(ctx context.Context, stream, group, consumer string) *IntCmd
-	XReadGroup(ctx context.Context, a *XReadGroupArgs) *XStreamSliceCmd
-	XAck(ctx context.Context, stream, group string, ids ...string) *IntCmd
-	XPending(ctx context.Context, stream, group string) *XPendingCmd
-	XPendingExt(ctx context.Context, a *XPendingExtArgs) *XPendingExtCmd
-	XClaim(ctx context.Context, a *XClaimArgs) *XMessageSliceCmd
-	XClaimJustID(ctx context.Context, a *XClaimArgs) *StringSliceCmd
-	XAutoClaim(ctx context.Context, a *XAutoClaimArgs) *XAutoClaimCmd
-	XAutoClaimJustID(ctx context.Context, a *XAutoClaimArgs) *XAutoClaimJustIDCmd
-
-	// TODO: XTrim and XTrimApprox remove in v9.
-	XTrim(ctx context.Context, key string, maxLen int64) *IntCmd
-	XTrimApprox(ctx context.Context, key string, maxLen int64) *IntCmd
-	XTrimMaxLen(ctx context.Context, key string, maxLen int64) *IntCmd
-	XTrimMaxLenApprox(ctx context.Context, key string, maxLen, limit int64) *IntCmd
-	XTrimMinID(ctx context.Context, key string, minID string) *IntCmd
-	XTrimMinIDApprox(ctx context.Context, key string, minID string, limit int64) *IntCmd
-	XInfoGroups(ctx context.Context, key string) *XInfoGroupsCmd
-	XInfoStream(ctx context.Context, key string) *XInfoStreamCmd
-	XInfoStreamFull(ctx context.Context, key string, count int) *XInfoStreamFullCmd
-	XInfoConsumers(ctx context.Context, key string, group string) *XInfoConsumersCmd
-
-	BZPopMax(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd
-	BZPopMin(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd
-
-	// TODO: remove
-	//		ZAddCh
-	//		ZIncr
-	//		ZAddNXCh
-	//		ZAddXXCh
-	//		ZIncrNX
-	//		ZIncrXX
-	// 	in v9.
-	// 	use ZAddArgs and ZAddArgsIncr.
-
-	ZAdd(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddNX(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddXX(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddCh(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddNXCh(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddXXCh(ctx context.Context, key string, members ...*Z) *IntCmd
-	ZAddArgs(ctx context.Context, key string, args ZAddArgs) *IntCmd
-	ZAddArgsIncr(ctx context.Context, key string, args ZAddArgs) *FloatCmd
-	ZIncr(ctx context.Context, key string, member *Z) *FloatCmd
-	ZIncrNX(ctx context.Context, key string, member *Z) *FloatCmd
-	ZIncrXX(ctx context.Context, key string, member *Z) *FloatCmd
-	ZCard(ctx context.Context, key string) *IntCmd
-	ZCount(ctx context.Context, key, min, max string) *IntCmd
-	ZLexCount(ctx context.Context, key, min, max string) *IntCmd
-	ZIncrBy(ctx context.Context, key string, increment float64, member string) *FloatCmd
-	ZInter(ctx context.Context, store *ZStore) *StringSliceCmd
-	ZInterWithScores(ctx context.Context, store *ZStore) *ZSliceCmd
-	ZInterStore(ctx context.Context, destination string, store *ZStore) *IntCmd
-	ZMScore(ctx context.Context, key string, members ...string) *FloatSliceCmd
-	ZPopMax(ctx context.Context, key string, count ...int64) *ZSliceCmd
-	ZPopMin(ctx context.Context, key string, count ...int64) *ZSliceCmd
-	ZRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd
-	ZRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd
-	ZRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd
-	ZRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd
-	ZRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd
-	ZRangeArgs(ctx context.Context, z ZRangeArgs) *StringSliceCmd
-	ZRangeArgsWithScores(ctx context.Context, z ZRangeArgs) *ZSliceCmd
-	ZRangeStore(ctx context.Context, dst string, z ZRangeArgs) *IntCmd
-	ZRank(ctx context.Context, key, member string) *IntCmd
-	ZRem(ctx context.Context, key string, members ...interface{}) *IntCmd
-	ZRemRangeByRank(ctx context.Context, key string, start, stop int64) *IntCmd
-	ZRemRangeByScore(ctx context.Context, key, min, max string) *IntCmd
-	ZRemRangeByLex(ctx context.Context, key, min, max string) *IntCmd
-	ZRevRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd
-	ZRevRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd
-	ZRevRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd
-	ZRevRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd
-	ZRevRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd
-	ZRevRank(ctx context.Context, key, member string) *IntCmd
-	ZScore(ctx context.Context, key, member string) *FloatCmd
-	ZUnionStore(ctx context.Context, dest string, store *ZStore) *IntCmd
-	ZUnion(ctx context.Context, store ZStore) *StringSliceCmd
-	ZUnionWithScores(ctx context.Context, store ZStore) *ZSliceCmd
-	ZRandMember(ctx context.Context, key string, count int, withScores bool) *StringSliceCmd
-	ZDiff(ctx context.Context, keys ...string) *StringSliceCmd
-	ZDiffWithScores(ctx context.Context, keys ...string) *ZSliceCmd
-	ZDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd
-
-	PFAdd(ctx context.Context, key string, els ...interface{}) *IntCmd
-	PFCount(ctx context.Context, keys ...string) *IntCmd
-	PFMerge(ctx context.Context, dest string, keys ...string) *StatusCmd
-
-	BgRewriteAOF(ctx context.Context) *StatusCmd
-	BgSave(ctx context.Context) *StatusCmd
-	ClientKill(ctx context.Context, ipPort string) *StatusCmd
-	ClientKillByFilter(ctx context.Context, keys ...string) *IntCmd
-	ClientList(ctx context.Context) *StringCmd
-	ClientPause(ctx context.Context, dur time.Duration) *BoolCmd
-	ClientID(ctx context.Context) *IntCmd
-	ConfigGet(ctx context.Context, parameter string) *SliceCmd
-	ConfigResetStat(ctx context.Context) *StatusCmd
-	ConfigSet(ctx context.Context, parameter, value string) *StatusCmd
-	ConfigRewrite(ctx context.Context) *StatusCmd
-	DBSize(ctx context.Context) *IntCmd
-	FlushAll(ctx context.Context) *StatusCmd
-	FlushAllAsync(ctx context.Context) *StatusCmd
-	FlushDB(ctx context.Context) *StatusCmd
-	FlushDBAsync(ctx context.Context) *StatusCmd
-	Info(ctx context.Context, section ...string) *StringCmd
-	LastSave(ctx context.Context) *IntCmd
-	Save(ctx context.Context) *StatusCmd
-	Shutdown(ctx context.Context) *StatusCmd
-	ShutdownSave(ctx context.Context) *StatusCmd
-	ShutdownNoSave(ctx context.Context) *StatusCmd
-	SlaveOf(ctx context.Context, host, port string) *StatusCmd
-	Time(ctx context.Context) *TimeCmd
-	DebugObject(ctx context.Context, key string) *StringCmd
-	ReadOnly(ctx context.Context) *StatusCmd
-	ReadWrite(ctx context.Context) *StatusCmd
-	MemoryUsage(ctx context.Context, key string, samples ...int) *IntCmd
-
-	Eval(ctx context.Context, script string, keys []string, args ...interface{}) *Cmd
-	EvalSha(ctx context.Context, sha1 string, keys []string, args ...interface{}) *Cmd
-	ScriptExists(ctx context.Context, hashes ...string) *BoolSliceCmd
-	ScriptFlush(ctx context.Context) *StatusCmd
-	ScriptKill(ctx context.Context) *StatusCmd
-	ScriptLoad(ctx context.Context, script string) *StringCmd
-
-	Publish(ctx context.Context, channel string, message interface{}) *IntCmd
-	PubSubChannels(ctx context.Context, pattern string) *StringSliceCmd
-	PubSubNumSub(ctx context.Context, channels ...string) *StringIntMapCmd
-	PubSubNumPat(ctx context.Context) *IntCmd
-
-	ClusterSlots(ctx context.Context) *ClusterSlotsCmd
-	ClusterNodes(ctx context.Context) *StringCmd
-	ClusterMeet(ctx context.Context, host, port string) *StatusCmd
-	ClusterForget(ctx context.Context, nodeID string) *StatusCmd
-	ClusterReplicate(ctx context.Context, nodeID string) *StatusCmd
-	ClusterResetSoft(ctx context.Context) *StatusCmd
-	ClusterResetHard(ctx context.Context) *StatusCmd
-	ClusterInfo(ctx context.Context) *StringCmd
-	ClusterKeySlot(ctx context.Context, key string) *IntCmd
-	ClusterGetKeysInSlot(ctx context.Context, slot int, count int) *StringSliceCmd
-	ClusterCountFailureReports(ctx context.Context, nodeID string) *IntCmd
-	ClusterCountKeysInSlot(ctx context.Context, slot int) *IntCmd
-	ClusterDelSlots(ctx context.Context, slots ...int) *StatusCmd
-	ClusterDelSlotsRange(ctx context.Context, min, max int) *StatusCmd
-	ClusterSaveConfig(ctx context.Context) *StatusCmd
-	ClusterSlaves(ctx context.Context, nodeID string) *StringSliceCmd
-	ClusterFailover(ctx context.Context) *StatusCmd
-	ClusterAddSlots(ctx context.Context, slots ...int) *StatusCmd
-	ClusterAddSlotsRange(ctx context.Context, min, max int) *StatusCmd
-
-	GeoAdd(ctx context.Context, key string, geoLocation ...*GeoLocation) *IntCmd
-	GeoPos(ctx context.Context, key string, members ...string) *GeoPosCmd
-	GeoRadius(ctx context.Context, key string, longitude, latitude float64, query *GeoRadiusQuery) *GeoLocationCmd
-	GeoRadiusStore(ctx context.Context, key string, longitude, latitude float64, query *GeoRadiusQuery) *IntCmd
-	GeoRadiusByMember(ctx context.Context, key, member string, query *GeoRadiusQuery) *GeoLocationCmd
-	GeoRadiusByMemberStore(ctx context.Context, key, member string, query *GeoRadiusQuery) *IntCmd
-	GeoSearch(ctx context.Context, key string, q *GeoSearchQuery) *StringSliceCmd
-	GeoSearchLocation(ctx context.Context, key string, q *GeoSearchLocationQuery) *GeoSearchLocationCmd
-	GeoSearchStore(ctx context.Context, key, store string, q *GeoSearchStoreQuery) *IntCmd
-	GeoDist(ctx context.Context, key string, member1, member2, unit string) *FloatCmd
-	GeoHash(ctx context.Context, key string, members ...string) *StringSliceCmd
-}
-
-type StatefulCmdable interface {
-	Cmdable
-	Auth(ctx context.Context, password string) *StatusCmd
-	AuthACL(ctx context.Context, username, password string) *StatusCmd
-	Select(ctx context.Context, index int) *StatusCmd
-	SwapDB(ctx context.Context, index1, index2 int) *StatusCmd
-	ClientSetName(ctx context.Context, name string) *BoolCmd
-}
-
-var (
-	_ Cmdable = (*Client)(nil)
-	_ Cmdable = (*Tx)(nil)
-	_ Cmdable = (*Ring)(nil)
-	_ Cmdable = (*ClusterClient)(nil)
-)
-
-type cmdable func(ctx context.Context, cmd Cmder) error
-
-type statefulCmdable func(ctx context.Context, cmd Cmder) error
-
-//------------------------------------------------------------------------------
-
-func (c statefulCmdable) Auth(ctx context.Context, password string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "auth", password)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// AuthACL Perform an AUTH command, using the given user and pass.
-// Should be used to authenticate the current connection with one of the connections defined in the ACL list
-// when connecting to a Redis 6.0 instance, or greater, that is using the Redis ACL system.
-func (c statefulCmdable) AuthACL(ctx context.Context, username, password string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "auth", username, password)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Wait(ctx context.Context, numSlaves int, timeout time.Duration) *IntCmd {
-	cmd := NewIntCmd(ctx, "wait", numSlaves, int(timeout/time.Millisecond))
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c statefulCmdable) Select(ctx context.Context, index int) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "select", index)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c statefulCmdable) SwapDB(ctx context.Context, index1, index2 int) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "swapdb", index1, index2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ClientSetName assigns a name to the connection.
-func (c statefulCmdable) ClientSetName(ctx context.Context, name string) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "client", "setname", name)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) Command(ctx context.Context) *CommandsInfoCmd {
-	cmd := NewCommandsInfoCmd(ctx, "command")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ClientGetName returns the name of the connection.
-func (c cmdable) ClientGetName(ctx context.Context) *StringCmd {
-	cmd := NewStringCmd(ctx, "client", "getname")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Echo(ctx context.Context, message interface{}) *StringCmd {
-	cmd := NewStringCmd(ctx, "echo", message)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Ping(ctx context.Context) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "ping")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Quit(_ context.Context) *StatusCmd {
-	panic("not implemented")
-}
-
-func (c cmdable) Del(ctx context.Context, keys ...string) *IntCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "del"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Unlink(ctx context.Context, keys ...string) *IntCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "unlink"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Dump(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "dump", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Exists(ctx context.Context, keys ...string) *IntCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "exists"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Expire(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	return c.expire(ctx, key, expiration, "")
-}
-
-func (c cmdable) ExpireNX(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	return c.expire(ctx, key, expiration, "NX")
-}
-
-func (c cmdable) ExpireXX(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	return c.expire(ctx, key, expiration, "XX")
-}
-
-func (c cmdable) ExpireGT(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	return c.expire(ctx, key, expiration, "GT")
-}
-
-func (c cmdable) ExpireLT(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	return c.expire(ctx, key, expiration, "LT")
-}
-
-func (c cmdable) expire(
-	ctx context.Context, key string, expiration time.Duration, mode string,
-) *BoolCmd {
-	args := make([]interface{}, 3, 4)
-	args[0] = "expire"
-	args[1] = key
-	args[2] = formatSec(ctx, expiration)
-	if mode != "" {
-		args = append(args, mode)
-	}
-
-	cmd := NewBoolCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "expireat", key, tm.Unix())
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Keys(ctx context.Context, pattern string) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "keys", pattern)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Migrate(ctx context.Context, host, port, key string, db int, timeout time.Duration) *StatusCmd {
-	cmd := NewStatusCmd(
-		ctx,
-		"migrate",
-		host,
-		port,
-		key,
-		db,
-		formatMs(ctx, timeout),
-	)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Move(ctx context.Context, key string, db int) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "move", key, db)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ObjectRefCount(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "object", "refcount", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ObjectEncoding(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "object", "encoding", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ObjectIdleTime(ctx context.Context, key string) *DurationCmd {
-	cmd := NewDurationCmd(ctx, time.Second, "object", "idletime", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Persist(ctx context.Context, key string) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "persist", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) PExpire(ctx context.Context, key string, expiration time.Duration) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "pexpire", key, formatMs(ctx, expiration))
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) PExpireAt(ctx context.Context, key string, tm time.Time) *BoolCmd {
-	cmd := NewBoolCmd(
-		ctx,
-		"pexpireat",
-		key,
-		tm.UnixNano()/int64(time.Millisecond),
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) PTTL(ctx context.Context, key string) *DurationCmd {
-	cmd := NewDurationCmd(ctx, time.Millisecond, "pttl", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RandomKey(ctx context.Context) *StringCmd {
-	cmd := NewStringCmd(ctx, "randomkey")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Rename(ctx context.Context, key, newkey string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "rename", key, newkey)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RenameNX(ctx context.Context, key, newkey string) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "renamenx", key, newkey)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Restore(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd {
-	cmd := NewStatusCmd(
-		ctx,
-		"restore",
-		key,
-		formatMs(ctx, ttl),
-		value,
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RestoreReplace(ctx context.Context, key string, ttl time.Duration, value string) *StatusCmd {
-	cmd := NewStatusCmd(
-		ctx,
-		"restore",
-		key,
-		formatMs(ctx, ttl),
-		value,
-		"replace",
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type Sort struct {
-	By            string
-	Offset, Count int64
-	Get           []string
-	Order         string
-	Alpha         bool
-}
-
-func (sort *Sort) args(key string) []interface{} {
-	args := []interface{}{"sort", key}
-	if sort.By != "" {
-		args = append(args, "by", sort.By)
-	}
-	if sort.Offset != 0 || sort.Count != 0 {
-		args = append(args, "limit", sort.Offset, sort.Count)
-	}
-	for _, get := range sort.Get {
-		args = append(args, "get", get)
-	}
-	if sort.Order != "" {
-		args = append(args, sort.Order)
-	}
-	if sort.Alpha {
-		args = append(args, "alpha")
-	}
-	return args
-}
-
-func (c cmdable) Sort(ctx context.Context, key string, sort *Sort) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, sort.args(key)...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SortStore(ctx context.Context, key, store string, sort *Sort) *IntCmd {
-	args := sort.args(key)
-	if store != "" {
-		args = append(args, "store", store)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SortInterfaces(ctx context.Context, key string, sort *Sort) *SliceCmd {
-	cmd := NewSliceCmd(ctx, sort.args(key)...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Touch(ctx context.Context, keys ...string) *IntCmd {
-	args := make([]interface{}, len(keys)+1)
-	args[0] = "touch"
-	for i, key := range keys {
-		args[i+1] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) TTL(ctx context.Context, key string) *DurationCmd {
-	cmd := NewDurationCmd(ctx, time.Second, "ttl", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Type(ctx context.Context, key string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "type", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Append(ctx context.Context, key, value string) *IntCmd {
-	cmd := NewIntCmd(ctx, "append", key, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Decr(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "decr", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) DecrBy(ctx context.Context, key string, decrement int64) *IntCmd {
-	cmd := NewIntCmd(ctx, "decrby", key, decrement)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// Get Redis `GET key` command. It returns redis.Nil error when key does not exist.
-func (c cmdable) Get(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "get", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) GetRange(ctx context.Context, key string, start, end int64) *StringCmd {
-	cmd := NewStringCmd(ctx, "getrange", key, start, end)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) GetSet(ctx context.Context, key string, value interface{}) *StringCmd {
-	cmd := NewStringCmd(ctx, "getset", key, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// GetEx An expiration of zero removes the TTL associated with the key (i.e. GETEX key persist).
-// Requires Redis >= 6.2.0.
-func (c cmdable) GetEx(ctx context.Context, key string, expiration time.Duration) *StringCmd {
-	args := make([]interface{}, 0, 4)
-	args = append(args, "getex", key)
-	if expiration > 0 {
-		if usePrecise(expiration) {
-			args = append(args, "px", formatMs(ctx, expiration))
-		} else {
-			args = append(args, "ex", formatSec(ctx, expiration))
-		}
-	} else if expiration == 0 {
-		args = append(args, "persist")
-	}
-
-	cmd := NewStringCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// GetDel redis-server version >= 6.2.0.
-func (c cmdable) GetDel(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "getdel", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Incr(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "incr", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) IncrBy(ctx context.Context, key string, value int64) *IntCmd {
-	cmd := NewIntCmd(ctx, "incrby", key, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) IncrByFloat(ctx context.Context, key string, value float64) *FloatCmd {
-	cmd := NewFloatCmd(ctx, "incrbyfloat", key, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) MGet(ctx context.Context, keys ...string) *SliceCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "mget"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// MSet is like Set but accepts multiple values:
-//   - MSet("key1", "value1", "key2", "value2")
-//   - MSet([]string{"key1", "value1", "key2", "value2"})
-//   - MSet(map[string]interface{}{"key1": "value1", "key2": "value2"})
-func (c cmdable) MSet(ctx context.Context, values ...interface{}) *StatusCmd {
-	args := make([]interface{}, 1, 1+len(values))
-	args[0] = "mset"
-	args = appendArgs(args, values)
-	cmd := NewStatusCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// MSetNX is like SetNX but accepts multiple values:
-//   - MSetNX("key1", "value1", "key2", "value2")
-//   - MSetNX([]string{"key1", "value1", "key2", "value2"})
-//   - MSetNX(map[string]interface{}{"key1": "value1", "key2": "value2"})
-func (c cmdable) MSetNX(ctx context.Context, values ...interface{}) *BoolCmd {
-	args := make([]interface{}, 1, 1+len(values))
-	args[0] = "msetnx"
-	args = appendArgs(args, values)
-	cmd := NewBoolCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// Set Redis `SET key value [expiration]` command.
-// Use expiration for `SETEX`-like behavior.
-//
-// Zero expiration means the key has no expiration time.
-// KeepTTL is a Redis KEEPTTL option to keep existing TTL, it requires your redis-server version >= 6.0,
-// otherwise you will receive an error: (error) ERR syntax error.
-func (c cmdable) Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd {
-	args := make([]interface{}, 3, 5)
-	args[0] = "set"
-	args[1] = key
-	args[2] = value
-	if expiration > 0 {
-		if usePrecise(expiration) {
-			args = append(args, "px", formatMs(ctx, expiration))
-		} else {
-			args = append(args, "ex", formatSec(ctx, expiration))
-		}
-	} else if expiration == KeepTTL {
-		args = append(args, "keepttl")
-	}
-
-	cmd := NewStatusCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SetArgs provides arguments for the SetArgs function.
-type SetArgs struct {
-	// Mode can be `NX` or `XX` or empty.
-	Mode string
-
-	// Zero `TTL` or `Expiration` means that the key has no expiration time.
-	TTL      time.Duration
-	ExpireAt time.Time
-
-	// When Get is true, the command returns the old value stored at key, or nil when key did not exist.
-	Get bool
-
-	// KeepTTL is a Redis KEEPTTL option to keep existing TTL, it requires your redis-server version >= 6.0,
-	// otherwise you will receive an error: (error) ERR syntax error.
-	KeepTTL bool
-}
-
-// SetArgs supports all the options that the SET command supports.
-// It is the alternative to the Set function when you want
-// to have more control over the options.
-func (c cmdable) SetArgs(ctx context.Context, key string, value interface{}, a SetArgs) *StatusCmd {
-	args := []interface{}{"set", key, value}
-
-	if a.KeepTTL {
-		args = append(args, "keepttl")
-	}
-
-	if !a.ExpireAt.IsZero() {
-		args = append(args, "exat", a.ExpireAt.Unix())
-	}
-	if a.TTL > 0 {
-		if usePrecise(a.TTL) {
-			args = append(args, "px", formatMs(ctx, a.TTL))
-		} else {
-			args = append(args, "ex", formatSec(ctx, a.TTL))
-		}
-	}
-
-	if a.Mode != "" {
-		args = append(args, a.Mode)
-	}
-
-	if a.Get {
-		args = append(args, "get")
-	}
-
-	cmd := NewStatusCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SetEX Redis `SETEX key expiration value` command.
-func (c cmdable) SetEX(ctx context.Context, key string, value interface{}, expiration time.Duration) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "setex", key, formatSec(ctx, expiration), value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SetNX Redis `SET key value [expiration] NX` command.
-//
-// Zero expiration means the key has no expiration time.
-// KeepTTL is a Redis KEEPTTL option to keep existing TTL, it requires your redis-server version >= 6.0,
-// otherwise you will receive an error: (error) ERR syntax error.
-func (c cmdable) SetNX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd {
-	var cmd *BoolCmd
-	switch expiration {
-	case 0:
-		// Use old `SETNX` to support old Redis versions.
-		cmd = NewBoolCmd(ctx, "setnx", key, value)
-	case KeepTTL:
-		cmd = NewBoolCmd(ctx, "set", key, value, "keepttl", "nx")
-	default:
-		if usePrecise(expiration) {
-			cmd = NewBoolCmd(ctx, "set", key, value, "px", formatMs(ctx, expiration), "nx")
-		} else {
-			cmd = NewBoolCmd(ctx, "set", key, value, "ex", formatSec(ctx, expiration), "nx")
-		}
-	}
-
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SetXX Redis `SET key value [expiration] XX` command.
-//
-// Zero expiration means the key has no expiration time.
-// KeepTTL is a Redis KEEPTTL option to keep existing TTL, it requires your redis-server version >= 6.0,
-// otherwise you will receive an error: (error) ERR syntax error.
-func (c cmdable) SetXX(ctx context.Context, key string, value interface{}, expiration time.Duration) *BoolCmd {
-	var cmd *BoolCmd
-	switch expiration {
-	case 0:
-		cmd = NewBoolCmd(ctx, "set", key, value, "xx")
-	case KeepTTL:
-		cmd = NewBoolCmd(ctx, "set", key, value, "keepttl", "xx")
-	default:
-		if usePrecise(expiration) {
-			cmd = NewBoolCmd(ctx, "set", key, value, "px", formatMs(ctx, expiration), "xx")
-		} else {
-			cmd = NewBoolCmd(ctx, "set", key, value, "ex", formatSec(ctx, expiration), "xx")
-		}
-	}
-
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SetRange(ctx context.Context, key string, offset int64, value string) *IntCmd {
-	cmd := NewIntCmd(ctx, "setrange", key, offset, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) StrLen(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "strlen", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) Copy(ctx context.Context, sourceKey, destKey string, db int, replace bool) *IntCmd {
-	args := []interface{}{"copy", sourceKey, destKey, "DB", db}
-	if replace {
-		args = append(args, "REPLACE")
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) GetBit(ctx context.Context, key string, offset int64) *IntCmd {
-	cmd := NewIntCmd(ctx, "getbit", key, offset)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SetBit(ctx context.Context, key string, offset int64, value int) *IntCmd {
-	cmd := NewIntCmd(
-		ctx,
-		"setbit",
-		key,
-		offset,
-		value,
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type BitCount struct {
-	Start, End int64
-}
-
-func (c cmdable) BitCount(ctx context.Context, key string, bitCount *BitCount) *IntCmd {
-	args := []interface{}{"bitcount", key}
-	if bitCount != nil {
-		args = append(
-			args,
-			bitCount.Start,
-			bitCount.End,
-		)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) bitOp(ctx context.Context, op, destKey string, keys ...string) *IntCmd {
-	args := make([]interface{}, 3+len(keys))
-	args[0] = "bitop"
-	args[1] = op
-	args[2] = destKey
-	for i, key := range keys {
-		args[3+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BitOpAnd(ctx context.Context, destKey string, keys ...string) *IntCmd {
-	return c.bitOp(ctx, "and", destKey, keys...)
-}
-
-func (c cmdable) BitOpOr(ctx context.Context, destKey string, keys ...string) *IntCmd {
-	return c.bitOp(ctx, "or", destKey, keys...)
-}
-
-func (c cmdable) BitOpXor(ctx context.Context, destKey string, keys ...string) *IntCmd {
-	return c.bitOp(ctx, "xor", destKey, keys...)
-}
-
-func (c cmdable) BitOpNot(ctx context.Context, destKey string, key string) *IntCmd {
-	return c.bitOp(ctx, "not", destKey, key)
-}
-
-func (c cmdable) BitPos(ctx context.Context, key string, bit int64, pos ...int64) *IntCmd {
-	args := make([]interface{}, 3+len(pos))
-	args[0] = "bitpos"
-	args[1] = key
-	args[2] = bit
-	switch len(pos) {
-	case 0:
-	case 1:
-		args[3] = pos[0]
-	case 2:
-		args[3] = pos[0]
-		args[4] = pos[1]
-	default:
-		panic("too many arguments")
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BitField(ctx context.Context, key string, args ...interface{}) *IntSliceCmd {
-	a := make([]interface{}, 0, 2+len(args))
-	a = append(a, "bitfield")
-	a = append(a, key)
-	a = append(a, args...)
-	cmd := NewIntSliceCmd(ctx, a...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) Scan(ctx context.Context, cursor uint64, match string, count int64) *ScanCmd {
-	args := []interface{}{"scan", cursor}
-	if match != "" {
-		args = append(args, "match", match)
-	}
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	cmd := NewScanCmd(ctx, c, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ScanType(ctx context.Context, cursor uint64, match string, count int64, keyType string) *ScanCmd {
-	args := []interface{}{"scan", cursor}
-	if match != "" {
-		args = append(args, "match", match)
-	}
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	if keyType != "" {
-		args = append(args, "type", keyType)
-	}
-	cmd := NewScanCmd(ctx, c, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd {
-	args := []interface{}{"sscan", key, cursor}
-	if match != "" {
-		args = append(args, "match", match)
-	}
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	cmd := NewScanCmd(ctx, c, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd {
-	args := []interface{}{"hscan", key, cursor}
-	if match != "" {
-		args = append(args, "match", match)
-	}
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	cmd := NewScanCmd(ctx, c, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZScan(ctx context.Context, key string, cursor uint64, match string, count int64) *ScanCmd {
-	args := []interface{}{"zscan", key, cursor}
-	if match != "" {
-		args = append(args, "match", match)
-	}
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	cmd := NewScanCmd(ctx, c, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) HDel(ctx context.Context, key string, fields ...string) *IntCmd {
-	args := make([]interface{}, 2+len(fields))
-	args[0] = "hdel"
-	args[1] = key
-	for i, field := range fields {
-		args[2+i] = field
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HExists(ctx context.Context, key, field string) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "hexists", key, field)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HGet(ctx context.Context, key, field string) *StringCmd {
-	cmd := NewStringCmd(ctx, "hget", key, field)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HGetAll(ctx context.Context, key string) *StringStringMapCmd {
-	cmd := NewStringStringMapCmd(ctx, "hgetall", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HIncrBy(ctx context.Context, key, field string, incr int64) *IntCmd {
-	cmd := NewIntCmd(ctx, "hincrby", key, field, incr)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HIncrByFloat(ctx context.Context, key, field string, incr float64) *FloatCmd {
-	cmd := NewFloatCmd(ctx, "hincrbyfloat", key, field, incr)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HKeys(ctx context.Context, key string) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "hkeys", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HLen(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "hlen", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// HMGet returns the values for the specified fields in the hash stored at key.
-// It returns an interface{} to distinguish between empty string and nil value.
-func (c cmdable) HMGet(ctx context.Context, key string, fields ...string) *SliceCmd {
-	args := make([]interface{}, 2+len(fields))
-	args[0] = "hmget"
-	args[1] = key
-	for i, field := range fields {
-		args[2+i] = field
-	}
-	cmd := NewSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// HSet accepts values in following formats:
-//   - HSet("myhash", "key1", "value1", "key2", "value2")
-//   - HSet("myhash", []string{"key1", "value1", "key2", "value2"})
-//   - HSet("myhash", map[string]interface{}{"key1": "value1", "key2": "value2"})
-//
-// Note that it requires Redis v4 for multiple field/value pairs support.
-func (c cmdable) HSet(ctx context.Context, key string, values ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "hset"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// HMSet is a deprecated version of HSet left for compatibility with Redis 3.
-func (c cmdable) HMSet(ctx context.Context, key string, values ...interface{}) *BoolCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "hmset"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewBoolCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HSetNX(ctx context.Context, key, field string, value interface{}) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "hsetnx", key, field, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) HVals(ctx context.Context, key string) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "hvals", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// HRandField redis-server version >= 6.2.0.
-func (c cmdable) HRandField(ctx context.Context, key string, count int, withValues bool) *StringSliceCmd {
-	args := make([]interface{}, 0, 4)
-
-	// Although count=0 is meaningless, redis accepts count=0.
-	args = append(args, "hrandfield", key, count)
-	if withValues {
-		args = append(args, "withvalues")
-	}
-
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) BLPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 1+len(keys)+1)
-	args[0] = "blpop"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	args[len(args)-1] = formatSec(ctx, timeout)
-	cmd := NewStringSliceCmd(ctx, args...)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BRPop(ctx context.Context, timeout time.Duration, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 1+len(keys)+1)
-	args[0] = "brpop"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	args[len(keys)+1] = formatSec(ctx, timeout)
-	cmd := NewStringSliceCmd(ctx, args...)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BRPopLPush(ctx context.Context, source, destination string, timeout time.Duration) *StringCmd {
-	cmd := NewStringCmd(
-		ctx,
-		"brpoplpush",
-		source,
-		destination,
-		formatSec(ctx, timeout),
-	)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LIndex(ctx context.Context, key string, index int64) *StringCmd {
-	cmd := NewStringCmd(ctx, "lindex", key, index)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LInsert(ctx context.Context, key, op string, pivot, value interface{}) *IntCmd {
-	cmd := NewIntCmd(ctx, "linsert", key, op, pivot, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LInsertBefore(ctx context.Context, key string, pivot, value interface{}) *IntCmd {
-	cmd := NewIntCmd(ctx, "linsert", key, "before", pivot, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LInsertAfter(ctx context.Context, key string, pivot, value interface{}) *IntCmd {
-	cmd := NewIntCmd(ctx, "linsert", key, "after", pivot, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LLen(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "llen", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LPop(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "lpop", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LPopCount(ctx context.Context, key string, count int) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "lpop", key, count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type LPosArgs struct {
-	Rank, MaxLen int64
-}
-
-func (c cmdable) LPos(ctx context.Context, key string, value string, a LPosArgs) *IntCmd {
-	args := []interface{}{"lpos", key, value}
-	if a.Rank != 0 {
-		args = append(args, "rank", a.Rank)
-	}
-	if a.MaxLen != 0 {
-		args = append(args, "maxlen", a.MaxLen)
-	}
-
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LPosCount(ctx context.Context, key string, value string, count int64, a LPosArgs) *IntSliceCmd {
-	args := []interface{}{"lpos", key, value, "count", count}
-	if a.Rank != 0 {
-		args = append(args, "rank", a.Rank)
-	}
-	if a.MaxLen != 0 {
-		args = append(args, "maxlen", a.MaxLen)
-	}
-	cmd := NewIntSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LPush(ctx context.Context, key string, values ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "lpush"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LPushX(ctx context.Context, key string, values ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "lpushx"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd {
-	cmd := NewStringSliceCmd(
-		ctx,
-		"lrange",
-		key,
-		start,
-		stop,
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LRem(ctx context.Context, key string, count int64, value interface{}) *IntCmd {
-	cmd := NewIntCmd(ctx, "lrem", key, count, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LSet(ctx context.Context, key string, index int64, value interface{}) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "lset", key, index, value)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LTrim(ctx context.Context, key string, start, stop int64) *StatusCmd {
-	cmd := NewStatusCmd(
-		ctx,
-		"ltrim",
-		key,
-		start,
-		stop,
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RPop(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "rpop", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RPopCount(ctx context.Context, key string, count int) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "rpop", key, count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RPopLPush(ctx context.Context, source, destination string) *StringCmd {
-	cmd := NewStringCmd(ctx, "rpoplpush", source, destination)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RPush(ctx context.Context, key string, values ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "rpush"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) RPushX(ctx context.Context, key string, values ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(values))
-	args[0] = "rpushx"
-	args[1] = key
-	args = appendArgs(args, values)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) LMove(ctx context.Context, source, destination, srcpos, destpos string) *StringCmd {
-	cmd := NewStringCmd(ctx, "lmove", source, destination, srcpos, destpos)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BLMove(
-	ctx context.Context, source, destination, srcpos, destpos string, timeout time.Duration,
-) *StringCmd {
-	cmd := NewStringCmd(ctx, "blmove", source, destination, srcpos, destpos, formatSec(ctx, timeout))
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) SAdd(ctx context.Context, key string, members ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(members))
-	args[0] = "sadd"
-	args[1] = key
-	args = appendArgs(args, members)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SCard(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "scard", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SDiff(ctx context.Context, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "sdiff"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd {
-	args := make([]interface{}, 2+len(keys))
-	args[0] = "sdiffstore"
-	args[1] = destination
-	for i, key := range keys {
-		args[2+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SInter(ctx context.Context, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "sinter"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SInterStore(ctx context.Context, destination string, keys ...string) *IntCmd {
-	args := make([]interface{}, 2+len(keys))
-	args[0] = "sinterstore"
-	args[1] = destination
-	for i, key := range keys {
-		args[2+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SIsMember(ctx context.Context, key string, member interface{}) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "sismember", key, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SMIsMember Redis `SMISMEMBER key member [member ...]` command.
-func (c cmdable) SMIsMember(ctx context.Context, key string, members ...interface{}) *BoolSliceCmd {
-	args := make([]interface{}, 2, 2+len(members))
-	args[0] = "smismember"
-	args[1] = key
-	args = appendArgs(args, members)
-	cmd := NewBoolSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SMembers Redis `SMEMBERS key` command output as a slice.
-func (c cmdable) SMembers(ctx context.Context, key string) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "smembers", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SMembersMap Redis `SMEMBERS key` command output as a map.
-func (c cmdable) SMembersMap(ctx context.Context, key string) *StringStructMapCmd {
-	cmd := NewStringStructMapCmd(ctx, "smembers", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SMove(ctx context.Context, source, destination string, member interface{}) *BoolCmd {
-	cmd := NewBoolCmd(ctx, "smove", source, destination, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SPop Redis `SPOP key` command.
-func (c cmdable) SPop(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "spop", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SPopN Redis `SPOP key count` command.
-func (c cmdable) SPopN(ctx context.Context, key string, count int64) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "spop", key, count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SRandMember Redis `SRANDMEMBER key` command.
-func (c cmdable) SRandMember(ctx context.Context, key string) *StringCmd {
-	cmd := NewStringCmd(ctx, "srandmember", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// SRandMemberN Redis `SRANDMEMBER key count` command.
-func (c cmdable) SRandMemberN(ctx context.Context, key string, count int64) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "srandmember", key, count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SRem(ctx context.Context, key string, members ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(members))
-	args[0] = "srem"
-	args[1] = key
-	args = appendArgs(args, members)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SUnion(ctx context.Context, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "sunion"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) SUnionStore(ctx context.Context, destination string, keys ...string) *IntCmd {
-	args := make([]interface{}, 2+len(keys))
-	args[0] = "sunionstore"
-	args[1] = destination
-	for i, key := range keys {
-		args[2+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-// XAddArgs accepts values in the following formats:
-//   - XAddArgs.Values = []interface{}{"key1", "value1", "key2", "value2"}
-//   - XAddArgs.Values = []string("key1", "value1", "key2", "value2")
-//   - XAddArgs.Values = map[string]interface{}{"key1": "value1", "key2": "value2"}
-//
-// Note that map will not preserve the order of key-value pairs.
-// MaxLen/MaxLenApprox and MinID are in conflict, only one of them can be used.
-type XAddArgs struct {
-	Stream     string
-	NoMkStream bool
-	MaxLen     int64 // MAXLEN N
-
-	// Deprecated: use MaxLen+Approx, remove in v9.
-	MaxLenApprox int64 // MAXLEN ~ N
-
-	MinID string
-	// Approx causes MaxLen and MinID to use "~" matcher (instead of "=").
-	Approx bool
-	Limit  int64
-	ID     string
-	Values interface{}
-}
-
-// XAdd a.Limit has a bug, please confirm it and use it.
-// issue: https://github.com/redis/redis/issues/9046
-func (c cmdable) XAdd(ctx context.Context, a *XAddArgs) *StringCmd {
-	args := make([]interface{}, 0, 11)
-	args = append(args, "xadd", a.Stream)
-	if a.NoMkStream {
-		args = append(args, "nomkstream")
-	}
-	switch {
-	case a.MaxLen > 0:
-		if a.Approx {
-			args = append(args, "maxlen", "~", a.MaxLen)
-		} else {
-			args = append(args, "maxlen", a.MaxLen)
-		}
-	case a.MaxLenApprox > 0:
-		// TODO remove in v9.
-		args = append(args, "maxlen", "~", a.MaxLenApprox)
-	case a.MinID != "":
-		if a.Approx {
-			args = append(args, "minid", "~", a.MinID)
-		} else {
-			args = append(args, "minid", a.MinID)
-		}
-	}
-	if a.Limit > 0 {
-		args = append(args, "limit", a.Limit)
-	}
-	if a.ID != "" {
-		args = append(args, a.ID)
-	} else {
-		args = append(args, "*")
-	}
-	args = appendArg(args, a.Values)
-
-	cmd := NewStringCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XDel(ctx context.Context, stream string, ids ...string) *IntCmd {
-	args := []interface{}{"xdel", stream}
-	for _, id := range ids {
-		args = append(args, id)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XLen(ctx context.Context, stream string) *IntCmd {
-	cmd := NewIntCmd(ctx, "xlen", stream)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd {
-	cmd := NewXMessageSliceCmd(ctx, "xrange", stream, start, stop)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd {
-	cmd := NewXMessageSliceCmd(ctx, "xrange", stream, start, stop, "count", count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XRevRange(ctx context.Context, stream, start, stop string) *XMessageSliceCmd {
-	cmd := NewXMessageSliceCmd(ctx, "xrevrange", stream, start, stop)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XRevRangeN(ctx context.Context, stream, start, stop string, count int64) *XMessageSliceCmd {
-	cmd := NewXMessageSliceCmd(ctx, "xrevrange", stream, start, stop, "count", count)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type XReadArgs struct {
-	Streams []string // list of streams and ids, e.g. stream1 stream2 id1 id2
-	Count   int64
-	Block   time.Duration
-}
-
-func (c cmdable) XRead(ctx context.Context, a *XReadArgs) *XStreamSliceCmd {
-	args := make([]interface{}, 0, 6+len(a.Streams))
-	args = append(args, "xread")
-
-	keyPos := int8(1)
-	if a.Count > 0 {
-		args = append(args, "count")
-		args = append(args, a.Count)
-		keyPos += 2
-	}
-	if a.Block >= 0 {
-		args = append(args, "block")
-		args = append(args, int64(a.Block/time.Millisecond))
-		keyPos += 2
-	}
-	args = append(args, "streams")
-	keyPos++
-	for _, s := range a.Streams {
-		args = append(args, s)
-	}
-
-	cmd := NewXStreamSliceCmd(ctx, args...)
-	if a.Block >= 0 {
-		cmd.setReadTimeout(a.Block)
-	}
-	cmd.SetFirstKeyPos(keyPos)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XReadStreams(ctx context.Context, streams ...string) *XStreamSliceCmd {
-	return c.XRead(ctx, &XReadArgs{
-		Streams: streams,
-		Block:   -1,
-	})
-}
-
-func (c cmdable) XGroupCreate(ctx context.Context, stream, group, start string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "xgroup", "create", stream, group, start)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XGroupCreateMkStream(ctx context.Context, stream, group, start string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "xgroup", "create", stream, group, start, "mkstream")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XGroupSetID(ctx context.Context, stream, group, start string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "xgroup", "setid", stream, group, start)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XGroupDestroy(ctx context.Context, stream, group string) *IntCmd {
-	cmd := NewIntCmd(ctx, "xgroup", "destroy", stream, group)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XGroupCreateConsumer(ctx context.Context, stream, group, consumer string) *IntCmd {
-	cmd := NewIntCmd(ctx, "xgroup", "createconsumer", stream, group, consumer)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XGroupDelConsumer(ctx context.Context, stream, group, consumer string) *IntCmd {
-	cmd := NewIntCmd(ctx, "xgroup", "delconsumer", stream, group, consumer)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type XReadGroupArgs struct {
-	Group    string
-	Consumer string
-	Streams  []string // list of streams and ids, e.g. stream1 stream2 id1 id2
-	Count    int64
-	Block    time.Duration
-	NoAck    bool
-}
-
-func (c cmdable) XReadGroup(ctx context.Context, a *XReadGroupArgs) *XStreamSliceCmd {
-	args := make([]interface{}, 0, 10+len(a.Streams))
-	args = append(args, "xreadgroup", "group", a.Group, a.Consumer)
-
-	keyPos := int8(4)
-	if a.Count > 0 {
-		args = append(args, "count", a.Count)
-		keyPos += 2
-	}
-	if a.Block >= 0 {
-		args = append(args, "block", int64(a.Block/time.Millisecond))
-		keyPos += 2
-	}
-	if a.NoAck {
-		args = append(args, "noack")
-		keyPos++
-	}
-	args = append(args, "streams")
-	keyPos++
-	for _, s := range a.Streams {
-		args = append(args, s)
-	}
-
-	cmd := NewXStreamSliceCmd(ctx, args...)
-	if a.Block >= 0 {
-		cmd.setReadTimeout(a.Block)
-	}
-	cmd.SetFirstKeyPos(keyPos)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XAck(ctx context.Context, stream, group string, ids ...string) *IntCmd {
-	args := []interface{}{"xack", stream, group}
-	for _, id := range ids {
-		args = append(args, id)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XPending(ctx context.Context, stream, group string) *XPendingCmd {
-	cmd := NewXPendingCmd(ctx, "xpending", stream, group)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type XPendingExtArgs struct {
-	Stream   string
-	Group    string
-	Idle     time.Duration
-	Start    string
-	End      string
-	Count    int64
-	Consumer string
-}
-
-func (c cmdable) XPendingExt(ctx context.Context, a *XPendingExtArgs) *XPendingExtCmd {
-	args := make([]interface{}, 0, 9)
-	args = append(args, "xpending", a.Stream, a.Group)
-	if a.Idle != 0 {
-		args = append(args, "idle", formatMs(ctx, a.Idle))
-	}
-	args = append(args, a.Start, a.End, a.Count)
-	if a.Consumer != "" {
-		args = append(args, a.Consumer)
-	}
-	cmd := NewXPendingExtCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-type XAutoClaimArgs struct {
-	Stream   string
-	Group    string
-	MinIdle  time.Duration
-	Start    string
-	Count    int64
-	Consumer string
-}
-
-func (c cmdable) XAutoClaim(ctx context.Context, a *XAutoClaimArgs) *XAutoClaimCmd {
-	args := xAutoClaimArgs(ctx, a)
-	cmd := NewXAutoClaimCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XAutoClaimJustID(ctx context.Context, a *XAutoClaimArgs) *XAutoClaimJustIDCmd {
-	args := xAutoClaimArgs(ctx, a)
-	args = append(args, "justid")
-	cmd := NewXAutoClaimJustIDCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func xAutoClaimArgs(ctx context.Context, a *XAutoClaimArgs) []interface{} {
-	args := make([]interface{}, 0, 8)
-	args = append(args, "xautoclaim", a.Stream, a.Group, a.Consumer, formatMs(ctx, a.MinIdle), a.Start)
-	if a.Count > 0 {
-		args = append(args, "count", a.Count)
-	}
-	return args
-}
-
-type XClaimArgs struct {
-	Stream   string
-	Group    string
-	Consumer string
-	MinIdle  time.Duration
-	Messages []string
-}
-
-func (c cmdable) XClaim(ctx context.Context, a *XClaimArgs) *XMessageSliceCmd {
-	args := xClaimArgs(a)
-	cmd := NewXMessageSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XClaimJustID(ctx context.Context, a *XClaimArgs) *StringSliceCmd {
-	args := xClaimArgs(a)
-	args = append(args, "justid")
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func xClaimArgs(a *XClaimArgs) []interface{} {
-	args := make([]interface{}, 0, 5+len(a.Messages))
-	args = append(args,
-		"xclaim",
-		a.Stream,
-		a.Group, a.Consumer,
-		int64(a.MinIdle/time.Millisecond))
-	for _, id := range a.Messages {
-		args = append(args, id)
-	}
-	return args
-}
-
-// xTrim If approx is true, add the "~" parameter, otherwise it is the default "=" (redis default).
-// example:
-//		XTRIM key MAXLEN/MINID threshold LIMIT limit.
-//		XTRIM key MAXLEN/MINID ~ threshold LIMIT limit.
-// The redis-server version is lower than 6.2, please set limit to 0.
-func (c cmdable) xTrim(
-	ctx context.Context, key, strategy string,
-	approx bool, threshold interface{}, limit int64,
-) *IntCmd {
-	args := make([]interface{}, 0, 7)
-	args = append(args, "xtrim", key, strategy)
-	if approx {
-		args = append(args, "~")
-	}
-	args = append(args, threshold)
-	if limit > 0 {
-		args = append(args, "limit", limit)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// Deprecated: use XTrimMaxLen, remove in v9.
-func (c cmdable) XTrim(ctx context.Context, key string, maxLen int64) *IntCmd {
-	return c.xTrim(ctx, key, "maxlen", false, maxLen, 0)
-}
-
-// Deprecated: use XTrimMaxLenApprox, remove in v9.
-func (c cmdable) XTrimApprox(ctx context.Context, key string, maxLen int64) *IntCmd {
-	return c.xTrim(ctx, key, "maxlen", true, maxLen, 0)
-}
-
-// XTrimMaxLen No `~` rules are used, `limit` cannot be used.
-// cmd: XTRIM key MAXLEN maxLen
-func (c cmdable) XTrimMaxLen(ctx context.Context, key string, maxLen int64) *IntCmd {
-	return c.xTrim(ctx, key, "maxlen", false, maxLen, 0)
-}
-
-// XTrimMaxLenApprox LIMIT has a bug, please confirm it and use it.
-// issue: https://github.com/redis/redis/issues/9046
-// cmd: XTRIM key MAXLEN ~ maxLen LIMIT limit
-func (c cmdable) XTrimMaxLenApprox(ctx context.Context, key string, maxLen, limit int64) *IntCmd {
-	return c.xTrim(ctx, key, "maxlen", true, maxLen, limit)
-}
-
-// XTrimMinID No `~` rules are used, `limit` cannot be used.
-// cmd: XTRIM key MINID minID
-func (c cmdable) XTrimMinID(ctx context.Context, key string, minID string) *IntCmd {
-	return c.xTrim(ctx, key, "minid", false, minID, 0)
-}
-
-// XTrimMinIDApprox LIMIT has a bug, please confirm it and use it.
-// issue: https://github.com/redis/redis/issues/9046
-// cmd: XTRIM key MINID ~ minID LIMIT limit
-func (c cmdable) XTrimMinIDApprox(ctx context.Context, key string, minID string, limit int64) *IntCmd {
-	return c.xTrim(ctx, key, "minid", true, minID, limit)
-}
-
-func (c cmdable) XInfoConsumers(ctx context.Context, key string, group string) *XInfoConsumersCmd {
-	cmd := NewXInfoConsumersCmd(ctx, key, group)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XInfoGroups(ctx context.Context, key string) *XInfoGroupsCmd {
-	cmd := NewXInfoGroupsCmd(ctx, key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) XInfoStream(ctx context.Context, key string) *XInfoStreamCmd {
-	cmd := NewXInfoStreamCmd(ctx, key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// XInfoStreamFull XINFO STREAM FULL [COUNT count]
-// redis-server >= 6.0.
-func (c cmdable) XInfoStreamFull(ctx context.Context, key string, count int) *XInfoStreamFullCmd {
-	args := make([]interface{}, 0, 6)
-	args = append(args, "xinfo", "stream", key, "full")
-	if count > 0 {
-		args = append(args, "count", count)
-	}
-	cmd := NewXInfoStreamFullCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-// Z represents sorted set member.
-type Z struct {
-	Score  float64
-	Member interface{}
-}
-
-// ZWithKey represents sorted set member including the name of the key where it was popped.
-type ZWithKey struct {
-	Z
-	Key string
-}
-
-// ZStore is used as an arg to ZInter/ZInterStore and ZUnion/ZUnionStore.
-type ZStore struct {
-	Keys    []string
-	Weights []float64
-	// Can be SUM, MIN or MAX.
-	Aggregate string
-}
-
-func (z ZStore) len() (n int) {
-	n = len(z.Keys)
-	if len(z.Weights) > 0 {
-		n += 1 + len(z.Weights)
-	}
-	if z.Aggregate != "" {
-		n += 2
-	}
-	return n
-}
-
-func (z ZStore) appendArgs(args []interface{}) []interface{} {
-	for _, key := range z.Keys {
-		args = append(args, key)
-	}
-	if len(z.Weights) > 0 {
-		args = append(args, "weights")
-		for _, weights := range z.Weights {
-			args = append(args, weights)
-		}
-	}
-	if z.Aggregate != "" {
-		args = append(args, "aggregate", z.Aggregate)
-	}
-	return args
-}
-
-// BZPopMax Redis `BZPOPMAX key [key ...] timeout` command.
-func (c cmdable) BZPopMax(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd {
-	args := make([]interface{}, 1+len(keys)+1)
-	args[0] = "bzpopmax"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	args[len(args)-1] = formatSec(ctx, timeout)
-	cmd := NewZWithKeyCmd(ctx, args...)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// BZPopMin Redis `BZPOPMIN key [key ...] timeout` command.
-func (c cmdable) BZPopMin(ctx context.Context, timeout time.Duration, keys ...string) *ZWithKeyCmd {
-	args := make([]interface{}, 1+len(keys)+1)
-	args[0] = "bzpopmin"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	args[len(args)-1] = formatSec(ctx, timeout)
-	cmd := NewZWithKeyCmd(ctx, args...)
-	cmd.setReadTimeout(timeout)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZAddArgs WARN: The GT, LT and NX options are mutually exclusive.
-type ZAddArgs struct {
-	NX      bool
-	XX      bool
-	LT      bool
-	GT      bool
-	Ch      bool
-	Members []Z
-}
-
-func (c cmdable) zAddArgs(key string, args ZAddArgs, incr bool) []interface{} {
-	a := make([]interface{}, 0, 6+2*len(args.Members))
-	a = append(a, "zadd", key)
-
-	// The GT, LT and NX options are mutually exclusive.
-	if args.NX {
-		a = append(a, "nx")
-	} else {
-		if args.XX {
-			a = append(a, "xx")
-		}
-		if args.GT {
-			a = append(a, "gt")
-		} else if args.LT {
-			a = append(a, "lt")
-		}
-	}
-	if args.Ch {
-		a = append(a, "ch")
-	}
-	if incr {
-		a = append(a, "incr")
-	}
-	for _, m := range args.Members {
-		a = append(a, m.Score)
-		a = append(a, m.Member)
-	}
-	return a
-}
-
-func (c cmdable) ZAddArgs(ctx context.Context, key string, args ZAddArgs) *IntCmd {
-	cmd := NewIntCmd(ctx, c.zAddArgs(key, args, false)...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZAddArgsIncr(ctx context.Context, key string, args ZAddArgs) *FloatCmd {
-	cmd := NewFloatCmd(ctx, c.zAddArgs(key, args, true)...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// TODO: Compatible with v8 api, will be removed in v9.
-func (c cmdable) zAdd(ctx context.Context, key string, args ZAddArgs, members ...*Z) *IntCmd {
-	args.Members = make([]Z, len(members))
-	for i, m := range members {
-		args.Members[i] = *m
-	}
-	cmd := NewIntCmd(ctx, c.zAddArgs(key, args, false)...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZAdd Redis `ZADD key score member [score member ...]` command.
-func (c cmdable) ZAdd(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{}, members...)
-}
-
-// ZAddNX Redis `ZADD key NX score member [score member ...]` command.
-func (c cmdable) ZAddNX(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{
-		NX: true,
-	}, members...)
-}
-
-// ZAddXX Redis `ZADD key XX score member [score member ...]` command.
-func (c cmdable) ZAddXX(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{
-		XX: true,
-	}, members...)
-}
-
-// ZAddCh Redis `ZADD key CH score member [score member ...]` command.
-// Deprecated: Use
-//		client.ZAddArgs(ctx, ZAddArgs{
-//			Ch: true,
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZAddCh(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{
-		Ch: true,
-	}, members...)
-}
-
-// ZAddNXCh Redis `ZADD key NX CH score member [score member ...]` command.
-// Deprecated: Use
-//		client.ZAddArgs(ctx, ZAddArgs{
-//			NX: true,
-//			Ch: true,
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZAddNXCh(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{
-		NX: true,
-		Ch: true,
-	}, members...)
-}
-
-// ZAddXXCh Redis `ZADD key XX CH score member [score member ...]` command.
-// Deprecated: Use
-//		client.ZAddArgs(ctx, ZAddArgs{
-//			XX: true,
-//			Ch: true,
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZAddXXCh(ctx context.Context, key string, members ...*Z) *IntCmd {
-	return c.zAdd(ctx, key, ZAddArgs{
-		XX: true,
-		Ch: true,
-	}, members...)
-}
-
-// ZIncr Redis `ZADD key INCR score member` command.
-// Deprecated: Use
-//		client.ZAddArgsIncr(ctx, ZAddArgs{
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZIncr(ctx context.Context, key string, member *Z) *FloatCmd {
-	return c.ZAddArgsIncr(ctx, key, ZAddArgs{
-		Members: []Z{*member},
-	})
-}
-
-// ZIncrNX Redis `ZADD key NX INCR score member` command.
-// Deprecated: Use
-//		client.ZAddArgsIncr(ctx, ZAddArgs{
-//			NX: true,
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZIncrNX(ctx context.Context, key string, member *Z) *FloatCmd {
-	return c.ZAddArgsIncr(ctx, key, ZAddArgs{
-		NX:      true,
-		Members: []Z{*member},
-	})
-}
-
-// ZIncrXX Redis `ZADD key XX INCR score member` command.
-// Deprecated: Use
-//		client.ZAddArgsIncr(ctx, ZAddArgs{
-//			XX: true,
-//			Members: []Z,
-//		})
-//	remove in v9.
-func (c cmdable) ZIncrXX(ctx context.Context, key string, member *Z) *FloatCmd {
-	return c.ZAddArgsIncr(ctx, key, ZAddArgs{
-		XX:      true,
-		Members: []Z{*member},
-	})
-}
-
-func (c cmdable) ZCard(ctx context.Context, key string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zcard", key)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZCount(ctx context.Context, key, min, max string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zcount", key, min, max)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZLexCount(ctx context.Context, key, min, max string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zlexcount", key, min, max)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZIncrBy(ctx context.Context, key string, increment float64, member string) *FloatCmd {
-	cmd := NewFloatCmd(ctx, "zincrby", key, increment, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZInterStore(ctx context.Context, destination string, store *ZStore) *IntCmd {
-	args := make([]interface{}, 0, 3+store.len())
-	args = append(args, "zinterstore", destination, len(store.Keys))
-	args = store.appendArgs(args)
-	cmd := NewIntCmd(ctx, args...)
-	cmd.SetFirstKeyPos(3)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZInter(ctx context.Context, store *ZStore) *StringSliceCmd {
-	args := make([]interface{}, 0, 2+store.len())
-	args = append(args, "zinter", len(store.Keys))
-	args = store.appendArgs(args)
-	cmd := NewStringSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZInterWithScores(ctx context.Context, store *ZStore) *ZSliceCmd {
-	args := make([]interface{}, 0, 3+store.len())
-	args = append(args, "zinter", len(store.Keys))
-	args = store.appendArgs(args)
-	args = append(args, "withscores")
-	cmd := NewZSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZMScore(ctx context.Context, key string, members ...string) *FloatSliceCmd {
-	args := make([]interface{}, 2+len(members))
-	args[0] = "zmscore"
-	args[1] = key
-	for i, member := range members {
-		args[2+i] = member
-	}
-	cmd := NewFloatSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZPopMax(ctx context.Context, key string, count ...int64) *ZSliceCmd {
-	args := []interface{}{
-		"zpopmax",
-		key,
-	}
-
-	switch len(count) {
-	case 0:
-		break
-	case 1:
-		args = append(args, count[0])
-	default:
-		panic("too many arguments")
-	}
-
-	cmd := NewZSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZPopMin(ctx context.Context, key string, count ...int64) *ZSliceCmd {
-	args := []interface{}{
-		"zpopmin",
-		key,
-	}
-
-	switch len(count) {
-	case 0:
-		break
-	case 1:
-		args = append(args, count[0])
-	default:
-		panic("too many arguments")
-	}
-
-	cmd := NewZSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZRangeArgs is all the options of the ZRange command.
-// In version> 6.2.0, you can replace the(cmd):
-//		ZREVRANGE,
-//		ZRANGEBYSCORE,
-//		ZREVRANGEBYSCORE,
-//		ZRANGEBYLEX,
-//		ZREVRANGEBYLEX.
-// Please pay attention to your redis-server version.
-//
-// Rev, ByScore, ByLex and Offset+Count options require redis-server 6.2.0 and higher.
-type ZRangeArgs struct {
-	Key string
-
-	// When the ByScore option is provided, the open interval(exclusive) can be set.
-	// By default, the score intervals specified by  and  are closed (inclusive).
-	// It is similar to the deprecated(6.2.0+) ZRangeByScore command.
-	// For example:
-	//		ZRangeArgs{
-	//			Key: 				"example-key",
-	//	 		Start: 				"(3",
-	//	 		Stop: 				8,
-	//			ByScore:			true,
-	//	 	}
-	// 	 	cmd: "ZRange example-key (3 8 ByScore"  (3 < score <= 8).
-	//
-	// For the ByLex option, it is similar to the deprecated(6.2.0+) ZRangeByLex command.
-	// You can set the  and  options as follows:
-	//		ZRangeArgs{
-	//			Key: 				"example-key",
-	//	 		Start: 				"[abc",
-	//	 		Stop: 				"(def",
-	//			ByLex:				true,
-	//	 	}
-	//		cmd: "ZRange example-key [abc (def ByLex"
-	//
-	// For normal cases (ByScore==false && ByLex==false),  and  should be set to the index range (int).
-	// You can read the documentation for more information: https://redis.io/commands/zrange
-	Start interface{}
-	Stop  interface{}
-
-	// The ByScore and ByLex options are mutually exclusive.
-	ByScore bool
-	ByLex   bool
-
-	Rev bool
-
-	// limit offset count.
-	Offset int64
-	Count  int64
-}
-
-func (z ZRangeArgs) appendArgs(args []interface{}) []interface{} {
-	// For Rev+ByScore/ByLex, we need to adjust the position of  and .
-	if z.Rev && (z.ByScore || z.ByLex) {
-		args = append(args, z.Key, z.Stop, z.Start)
-	} else {
-		args = append(args, z.Key, z.Start, z.Stop)
-	}
-
-	if z.ByScore {
-		args = append(args, "byscore")
-	} else if z.ByLex {
-		args = append(args, "bylex")
-	}
-	if z.Rev {
-		args = append(args, "rev")
-	}
-	if z.Offset != 0 || z.Count != 0 {
-		args = append(args, "limit", z.Offset, z.Count)
-	}
-	return args
-}
-
-func (c cmdable) ZRangeArgs(ctx context.Context, z ZRangeArgs) *StringSliceCmd {
-	args := make([]interface{}, 0, 9)
-	args = append(args, "zrange")
-	args = z.appendArgs(args)
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRangeArgsWithScores(ctx context.Context, z ZRangeArgs) *ZSliceCmd {
-	args := make([]interface{}, 0, 10)
-	args = append(args, "zrange")
-	args = z.appendArgs(args)
-	args = append(args, "withscores")
-	cmd := NewZSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd {
-	return c.ZRangeArgs(ctx, ZRangeArgs{
-		Key:   key,
-		Start: start,
-		Stop:  stop,
-	})
-}
-
-func (c cmdable) ZRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd {
-	return c.ZRangeArgsWithScores(ctx, ZRangeArgs{
-		Key:   key,
-		Start: start,
-		Stop:  stop,
-	})
-}
-
-type ZRangeBy struct {
-	Min, Max      string
-	Offset, Count int64
-}
-
-func (c cmdable) zRangeBy(ctx context.Context, zcmd, key string, opt *ZRangeBy, withScores bool) *StringSliceCmd {
-	args := []interface{}{zcmd, key, opt.Min, opt.Max}
-	if withScores {
-		args = append(args, "withscores")
-	}
-	if opt.Offset != 0 || opt.Count != 0 {
-		args = append(
-			args,
-			"limit",
-			opt.Offset,
-			opt.Count,
-		)
-	}
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd {
-	return c.zRangeBy(ctx, "zrangebyscore", key, opt, false)
-}
-
-func (c cmdable) ZRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd {
-	return c.zRangeBy(ctx, "zrangebylex", key, opt, false)
-}
-
-func (c cmdable) ZRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd {
-	args := []interface{}{"zrangebyscore", key, opt.Min, opt.Max, "withscores"}
-	if opt.Offset != 0 || opt.Count != 0 {
-		args = append(
-			args,
-			"limit",
-			opt.Offset,
-			opt.Count,
-		)
-	}
-	cmd := NewZSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRangeStore(ctx context.Context, dst string, z ZRangeArgs) *IntCmd {
-	args := make([]interface{}, 0, 10)
-	args = append(args, "zrangestore", dst)
-	args = z.appendArgs(args)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRank(ctx context.Context, key, member string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zrank", key, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRem(ctx context.Context, key string, members ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(members))
-	args[0] = "zrem"
-	args[1] = key
-	args = appendArgs(args, members)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRemRangeByRank(ctx context.Context, key string, start, stop int64) *IntCmd {
-	cmd := NewIntCmd(
-		ctx,
-		"zremrangebyrank",
-		key,
-		start,
-		stop,
-	)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRemRangeByScore(ctx context.Context, key, min, max string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zremrangebyscore", key, min, max)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRemRangeByLex(ctx context.Context, key, min, max string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zremrangebylex", key, min, max)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRevRange(ctx context.Context, key string, start, stop int64) *StringSliceCmd {
-	cmd := NewStringSliceCmd(ctx, "zrevrange", key, start, stop)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRevRangeWithScores(ctx context.Context, key string, start, stop int64) *ZSliceCmd {
-	cmd := NewZSliceCmd(ctx, "zrevrange", key, start, stop, "withscores")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) zRevRangeBy(ctx context.Context, zcmd, key string, opt *ZRangeBy) *StringSliceCmd {
-	args := []interface{}{zcmd, key, opt.Max, opt.Min}
-	if opt.Offset != 0 || opt.Count != 0 {
-		args = append(
-			args,
-			"limit",
-			opt.Offset,
-			opt.Count,
-		)
-	}
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRevRangeByScore(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd {
-	return c.zRevRangeBy(ctx, "zrevrangebyscore", key, opt)
-}
-
-func (c cmdable) ZRevRangeByLex(ctx context.Context, key string, opt *ZRangeBy) *StringSliceCmd {
-	return c.zRevRangeBy(ctx, "zrevrangebylex", key, opt)
-}
-
-func (c cmdable) ZRevRangeByScoreWithScores(ctx context.Context, key string, opt *ZRangeBy) *ZSliceCmd {
-	args := []interface{}{"zrevrangebyscore", key, opt.Max, opt.Min, "withscores"}
-	if opt.Offset != 0 || opt.Count != 0 {
-		args = append(
-			args,
-			"limit",
-			opt.Offset,
-			opt.Count,
-		)
-	}
-	cmd := NewZSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZRevRank(ctx context.Context, key, member string) *IntCmd {
-	cmd := NewIntCmd(ctx, "zrevrank", key, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZScore(ctx context.Context, key, member string) *FloatCmd {
-	cmd := NewFloatCmd(ctx, "zscore", key, member)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZUnion(ctx context.Context, store ZStore) *StringSliceCmd {
-	args := make([]interface{}, 0, 2+store.len())
-	args = append(args, "zunion", len(store.Keys))
-	args = store.appendArgs(args)
-	cmd := NewStringSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZUnionWithScores(ctx context.Context, store ZStore) *ZSliceCmd {
-	args := make([]interface{}, 0, 3+store.len())
-	args = append(args, "zunion", len(store.Keys))
-	args = store.appendArgs(args)
-	args = append(args, "withscores")
-	cmd := NewZSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ZUnionStore(ctx context.Context, dest string, store *ZStore) *IntCmd {
-	args := make([]interface{}, 0, 3+store.len())
-	args = append(args, "zunionstore", dest, len(store.Keys))
-	args = store.appendArgs(args)
-	cmd := NewIntCmd(ctx, args...)
-	cmd.SetFirstKeyPos(3)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZRandMember redis-server version >= 6.2.0.
-func (c cmdable) ZRandMember(ctx context.Context, key string, count int, withScores bool) *StringSliceCmd {
-	args := make([]interface{}, 0, 4)
-
-	// Although count=0 is meaningless, redis accepts count=0.
-	args = append(args, "zrandmember", key, count)
-	if withScores {
-		args = append(args, "withscores")
-	}
-
-	cmd := NewStringSliceCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZDiff redis-server version >= 6.2.0.
-func (c cmdable) ZDiff(ctx context.Context, keys ...string) *StringSliceCmd {
-	args := make([]interface{}, 2+len(keys))
-	args[0] = "zdiff"
-	args[1] = len(keys)
-	for i, key := range keys {
-		args[i+2] = key
-	}
-
-	cmd := NewStringSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZDiffWithScores redis-server version >= 6.2.0.
-func (c cmdable) ZDiffWithScores(ctx context.Context, keys ...string) *ZSliceCmd {
-	args := make([]interface{}, 3+len(keys))
-	args[0] = "zdiff"
-	args[1] = len(keys)
-	for i, key := range keys {
-		args[i+2] = key
-	}
-	args[len(keys)+2] = "withscores"
-
-	cmd := NewZSliceCmd(ctx, args...)
-	cmd.SetFirstKeyPos(2)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ZDiffStore redis-server version >=6.2.0.
-func (c cmdable) ZDiffStore(ctx context.Context, destination string, keys ...string) *IntCmd {
-	args := make([]interface{}, 0, 3+len(keys))
-	args = append(args, "zdiffstore", destination, len(keys))
-	for _, key := range keys {
-		args = append(args, key)
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) PFAdd(ctx context.Context, key string, els ...interface{}) *IntCmd {
-	args := make([]interface{}, 2, 2+len(els))
-	args[0] = "pfadd"
-	args[1] = key
-	args = appendArgs(args, els)
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) PFCount(ctx context.Context, keys ...string) *IntCmd {
-	args := make([]interface{}, 1+len(keys))
-	args[0] = "pfcount"
-	for i, key := range keys {
-		args[1+i] = key
-	}
-	cmd := NewIntCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) PFMerge(ctx context.Context, dest string, keys ...string) *StatusCmd {
-	args := make([]interface{}, 2+len(keys))
-	args[0] = "pfmerge"
-	args[1] = dest
-	for i, key := range keys {
-		args[2+i] = key
-	}
-	cmd := NewStatusCmd(ctx, args...)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-//------------------------------------------------------------------------------
-
-func (c cmdable) BgRewriteAOF(ctx context.Context) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "bgrewriteaof")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) BgSave(ctx context.Context) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "bgsave")
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-func (c cmdable) ClientKill(ctx context.Context, ipPort string) *StatusCmd {
-	cmd := NewStatusCmd(ctx, "client", "kill", ipPort)
-	_ = c(ctx, cmd)
-	return cmd
-}
-
-// ClientKillByFilter is new style syntax, while the ClientKill is old
-//
-//   CLIENT KILL