diff --git a/.gitignore b/.gitignore index 01df87f2dc..f35e736b20 100644 --- a/.gitignore +++ b/.gitignore @@ -46,4 +46,122 @@ vendor .direnv .terraform/ -*.tfvars \ No newline at end of file +*.tfvars + +# python +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ +.vscode diff --git a/Gopkg.lock b/Gopkg.lock deleted file mode 100644 index 18004a22c7..0000000000 --- a/Gopkg.lock +++ /dev/null @@ -1,280 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - digest = "1:abeb38ade3f32a92943e5be54f55ed6d6e3b6602761d74b4aab4c9dd45c18abd" - name = "github.com/fsnotify/fsnotify" - packages = ["."] - pruneopts = "UT" - revision = "c2828203cd70a50dcccfb2761f8b1f8ceef9a8e9" - version = "v1.4.7" - -[[projects]] - digest = "1:2cd7915ab26ede7d95b8749e6b1f933f1c6d5398030684e6505940a10f31cfda" - name = "github.com/ghodss/yaml" - packages = ["."] - pruneopts = "UT" - revision = "0ca9ea5df5451ffdf184b4428c902747c2c11cd7" - version = "v1.0.0" - -[[projects]] - digest = "1:54a9254d3845e3a80d4957975abfc5e771a49d08379509ec4f9c99c1ccbe13a9" - name = "github.com/golang/protobuf" - packages = [ - "proto", - "protoc-gen-go", - "protoc-gen-go/descriptor", - "protoc-gen-go/generator", - "protoc-gen-go/generator/internal/remap", - "protoc-gen-go/grpc", - "protoc-gen-go/plugin", - "ptypes", - "ptypes/any", - "ptypes/duration", - "ptypes/empty", - "ptypes/timestamp", - ] - pruneopts = "UT" - revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5" - version = "v1.2.0" - -[[projects]] - digest = "1:2e3c336fc7fde5c984d2841455a658a6d626450b1754a854b3b32e7a8f49a07a" - name = "github.com/google/go-cmp" - packages = [ - "cmp", - "cmp/internal/diff", - "cmp/internal/function", - "cmp/internal/value", - ] - pruneopts = "UT" - revision = "3af367b6b30c263d47e8895973edcca9a49cf029" - version = "v0.2.0" - -[[projects]] - digest = "1:c0d19ab64b32ce9fe5cf4ddceba78d5bc9807f0016db6b1183599da3dcc24d10" - name = "github.com/hashicorp/hcl" - packages = [ - ".", - "hcl/ast", - "hcl/parser", - "hcl/printer", - "hcl/scanner", - "hcl/strconv", - "hcl/token", - "json/parser", - "json/scanner", - "json/token", - ] - pruneopts = "UT" - revision = "8cb6e5b959231cc1119e43259c4a608f9c51a241" - version = "v1.0.0" - -[[projects]] - digest = "1:870d441fe217b8e689d7949fef6e43efbc787e50f200cb1e70dbca9204a1d6be" - name = "github.com/inconshreveable/mousetrap" - packages = ["."] - pruneopts = "UT" - revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" - version = "v1.0" - -[[projects]] - digest = "1:c568d7727aa262c32bdf8a3f7db83614f7af0ed661474b24588de635c20024c7" - name = "github.com/magiconair/properties" - packages = ["."] - pruneopts = "UT" - revision = "c2353362d570a7bfa228149c62842019201cfb71" - version = "v1.8.0" - -[[projects]] - digest = "1:78bbb1ba5b7c3f2ed0ea1eab57bdd3859aec7e177811563edc41198a760b06af" - name = "github.com/mitchellh/go-homedir" - packages = ["."] - pruneopts = "UT" - revision = "ae18d6b8b3205b561c79e8e5f69bff09736185f4" - version = "v1.0.0" - -[[projects]] - digest = "1:53bc4cd4914cd7cd52139990d5170d6dc99067ae31c56530621b18b35fc30318" - name = "github.com/mitchellh/mapstructure" - packages = ["."] - pruneopts = "UT" - revision = "3536a929edddb9a5b34bd6861dc4a9647cb459fe" - version = "v1.1.2" - -[[projects]] - digest = "1:95741de3af260a92cc5c7f3f3061e85273f5a81b5db20d4bd68da74bd521675e" - name = "github.com/pelletier/go-toml" - packages = ["."] - pruneopts = "UT" - revision = "c01d1270ff3e442a8a57cddc1c92dc1138598194" - version = "v1.2.0" - -[[projects]] - digest = "1:6a4a11ba764a56d2758899ec6f3848d24698d48442ebce85ee7a3f63284526cd" - name = "github.com/spf13/afero" - packages = [ - ".", - "mem", - ] - pruneopts = "UT" - revision = "d40851caa0d747393da1ffb28f7f9d8b4eeffebd" - version = "v1.1.2" - -[[projects]] - digest = "1:516e71bed754268937f57d4ecb190e01958452336fa73dbac880894164e91c1f" - name = "github.com/spf13/cast" - packages = ["."] - pruneopts = "UT" - revision = "8965335b8c7107321228e3e3702cab9832751bac" - version = "v1.2.0" - -[[projects]] - digest = "1:645cabccbb4fa8aab25a956cbcbdf6a6845ca736b2c64e197ca7cbb9d210b939" - name = "github.com/spf13/cobra" - packages = ["."] - pruneopts = "UT" - revision = "ef82de70bb3f60c65fb8eebacbb2d122ef517385" - version = "v0.0.3" - -[[projects]] - digest = "1:68ea4e23713989dc20b1bded5d9da2c5f9be14ff9885beef481848edd18c26cb" - name = "github.com/spf13/jwalterweatherman" - packages = ["."] - pruneopts = "UT" - revision = "4a4406e478ca629068e7768fc33f3f044173c0a6" - version = "v1.0.0" - -[[projects]] - digest = "1:c1b1102241e7f645bc8e0c22ae352e8f0dc6484b6cb4d132fa9f24174e0119e2" - name = "github.com/spf13/pflag" - packages = ["."] - pruneopts = "UT" - revision = "298182f68c66c05229eb03ac171abe6e309ee79a" - version = "v1.0.3" - -[[projects]] - digest = "1:214775c11fd26da94a100111a62daa25339198a4f9c57cb4aab352da889f5b93" - name = "github.com/spf13/viper" - packages = ["."] - pruneopts = "UT" - revision = "2c12c60302a5a0e62ee102ca9bc996277c2f64f5" - version = "v1.2.1" - -[[projects]] - branch = "master" - digest = "1:505dbee0833715a72a529bb57c354826ad42a4496fad787fa143699b4de1a6d0" - name = "golang.org/x/net" - packages = [ - "context", - "http/httpguts", - "http2", - "http2/hpack", - "idna", - "internal/timeseries", - "trace", - ] - pruneopts = "UT" - revision = "04a2e542c03f1d053ab3e4d6e5abcd4b66e2be8e" - -[[projects]] - branch = "master" - digest = "1:d99359caa06128f971c1b1373e8c6c4221ed74f07840e66d660006f0c2df0ed6" - name = "golang.org/x/sys" - packages = ["unix"] - pruneopts = "UT" - revision = "8b8824e799c8fc8e6d900615cc6c761ce6c26c67" - -[[projects]] - digest = "1:a2ab62866c75542dd18d2b069fec854577a20211d7c0ea6ae746072a1dccdd18" - name = "golang.org/x/text" - packages = [ - "collate", - "collate/build", - "internal/colltab", - "internal/gen", - "internal/tag", - "internal/triegen", - "internal/ucd", - "language", - "secure/bidirule", - "transform", - "unicode/bidi", - "unicode/cldr", - "unicode/norm", - "unicode/rangetable", - ] - pruneopts = "UT" - revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" - version = "v0.3.0" - -[[projects]] - branch = "master" - digest = "1:56b0bca90b7e5d1facf5fbdacba23e4e0ce069d25381b8e2f70ef1e7ebfb9c1a" - name = "google.golang.org/genproto" - packages = ["googleapis/rpc/status"] - pruneopts = "UT" - revision = "94acd270e44e65579b9ee3cdab25034d33fed608" - -[[projects]] - digest = "1:ab8e92d746fb5c4c18846b0879842ac8e53b3d352449423d0924a11f1020ae1b" - name = "google.golang.org/grpc" - packages = [ - ".", - "balancer", - "balancer/base", - "balancer/roundrobin", - "codes", - "connectivity", - "credentials", - "encoding", - "encoding/proto", - "grpclog", - "internal", - "internal/backoff", - "internal/channelz", - "internal/envconfig", - "internal/grpcrand", - "internal/transport", - "keepalive", - "metadata", - "naming", - "peer", - "resolver", - "resolver/dns", - "resolver/passthrough", - "stats", - "status", - "tap", - ] - pruneopts = "UT" - revision = "8dea3dc473e90c8179e519d91302d0597c0ca1d1" - version = "v1.15.0" - -[[projects]] - digest = "1:342378ac4dcb378a5448dd723f0784ae519383532f5e70ade24132c4c8693202" - name = "gopkg.in/yaml.v2" - packages = ["."] - pruneopts = "UT" - revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183" - version = "v2.2.1" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - input-imports = [ - "github.com/ghodss/yaml", - "github.com/golang/protobuf/proto", - "github.com/golang/protobuf/protoc-gen-go", - "github.com/golang/protobuf/ptypes/empty", - "github.com/golang/protobuf/ptypes/timestamp", - "github.com/google/go-cmp/cmp", - "github.com/mitchellh/go-homedir", - "github.com/spf13/cobra", - "github.com/spf13/viper", - "golang.org/x/net/context", - "google.golang.org/grpc", - "gopkg.in/yaml.v2", - ] - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml deleted file mode 100644 index c06fd9fc91..0000000000 --- a/Gopkg.toml +++ /dev/null @@ -1,33 +0,0 @@ -required = ["github.com/golang/protobuf/protoc-gen-go"] - -[[constraint]] - name = "github.com/ghodss/yaml" - version = "1.0.0" - -[[constraint]] - name = "github.com/golang/protobuf" - version = "1.2.0" - -[[constraint]] - name = "github.com/google/go-cmp" - version = "0.2.0" - -[[constraint]] - name = "github.com/mitchellh/go-homedir" - version = "1.0.0" - -[[constraint]] - name = "github.com/spf13/cobra" - version = "0.0.3" - -[[constraint]] - name = "github.com/spf13/viper" - version = "1.2.1" - -[[constraint]] - name = "google.golang.org/grpc" - version = "1.15.0" - -[prune] - go-tests = true - unused-packages = true diff --git a/Makefile b/Makefile index 888cfea426..84b787c2c0 100644 --- a/Makefile +++ b/Makefile @@ -1,15 +1,33 @@ -VERSION_FILE=VERSION -FEAST_VERSION=`cat $(VERSION_FILE)` +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +VERSION_FILE = VERSION +FEAST_VERSION = `cat $(VERSION_FILE)` test: mvn test -build-deps: +test-integration: + $(MAKE) -C testing/integration test-integration TYPE=$(TYPE) ID=$(ID) + +build-proto: $(MAKE) -C protos gen-go - dep ensure build-cli: - $(MAKE) build-deps + $(MAKE) build-proto $(MAKE) -C cli build-all build-java: diff --git a/charts/feast/templates/core-service.yaml b/charts/feast/templates/core-service.yaml index 965ee538d1..9f8f1209a2 100644 --- a/charts/feast/templates/core-service.yaml +++ b/charts/feast/templates/core-service.yaml @@ -14,12 +14,10 @@ spec: type: LoadBalancer {{- if .Values.core.service.extIPAdr }} loadBalancerIP: {{ .Values.core.service.extIPAdr }} + {{- end }} {{- if .Values.core.service.loadBalancerSourceRanges }} loadBalancerSourceRanges: - {{- range .Values.core.service.loadBalancerSourceRanges }} - - {{ . }} - {{- end }} - {{- end }} +{{ toYaml .Values.core.service.loadBalancerSourceRanges | indent 2 }} {{- end }} ports: - name: http diff --git a/charts/feast/templates/serving-service.yaml b/charts/feast/templates/serving-service.yaml index ee1ecdfb0b..81520ba471 100644 --- a/charts/feast/templates/serving-service.yaml +++ b/charts/feast/templates/serving-service.yaml @@ -14,12 +14,10 @@ spec: type: LoadBalancer {{- if .Values.serving.service.extIPAdr }} loadBalancerIP: {{ .Values.serving.service.extIPAdr }} + {{- end }} {{- if .Values.serving.service.loadBalancerSourceRanges }} loadBalancerSourceRanges: - {{- range .Values.serving.service.loadBalancerSourceRanges }} - - {{ . }} - {{- end }} - {{- end }} +{{ toYaml .Values.serving.service.loadBalancerSourceRanges | indent 2 }} {{- end }} ports: - name: grpc diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index fde0f7e7d9..f8cb3dfe44 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -24,6 +24,9 @@ core: http: port: 80 targetPort: 8080 + # loadBalancerSourceRanges sets the accepted IP ranges for firewall ingress rule + # this firewall rule is usually created when the service type is "LoadBalancer" + # loadBalancerSourceRanges: ["10.0.0.0/8"] jobs: workspace: "/tmp" runner: DirectRunner @@ -87,6 +90,9 @@ serving: http: port: 80 targetPort: 8080 + # loadBalancerSourceRanges sets the accepted IP ranges for firewall ingress rule + # this firewall rule is usually created when the service type is "LoadBalancer" + # loadBalancerSourceRanges: ["10.0.0.0/8"] jaeger: enabled: false diff --git a/cli/feast/cmd/apply_test.go b/cli/feast/cmd/apply_test.go new file mode 100644 index 0000000000..7a8de3aaab --- /dev/null +++ b/cli/feast/cmd/apply_test.go @@ -0,0 +1,146 @@ +// Copyright 2018 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "context" + "github.com/gojek/feast/cli/feast/cmd/mock" + "github.com/golang/mock/gomock" + "testing" + + "github.com/gojek/feast/protos/generated/go/feast/core" +) + +func Test_apply(t *testing.T) { + mockCore := mock.NewMockCoreServiceClient(gomock.NewController(t)) + mockCore.EXPECT().ApplyEntity(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() + mockCore.EXPECT().ApplyFeature(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() + mockCore.EXPECT().ApplyFeatureGroup(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() + mockCore.EXPECT().ApplyStorage(gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() + + type args struct { + ctx context.Context + coreCli core.CoreServiceClient + resource string + fileLocation string + } + tests := []struct { + name string + args args + want string + wantErr bool + }{ + { + name: "test apply invalid resource", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "invalidResource", + fileLocation: "testdata/valid_entity.yaml", + }, + want: "", + wantErr: true, + }, + { + name: "test apply entity", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "entity", + fileLocation: "testdata/valid_entity.yaml", + }, + want: "myentity", + wantErr: false, + }, + { + name: "test apply entity with non-existent file", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "entity", + fileLocation: "testdata/file_not_exists.yaml", + }, + want: "", + wantErr: true, + }, + { + name: "test apply entity with no tag", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "entity", + fileLocation: "testdata/valid_entity_no_tag.yaml", + }, + want: "myentity", + wantErr: false, + }, + { + name: "test apply invalid syntax in entity yaml", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "entity", + fileLocation: "testdata/invalid_entity.yaml", + }, + want: "", + wantErr: true, + }, + { + name: "test apply feature", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "feature", + fileLocation: "testdata/valid_feature.yaml", + }, + want: "myentity.feature_bool_redis1", + wantErr: false, + }, + { + name: "test apply feature group", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "featureGroup", + fileLocation: "testdata/valid_feature_group.yaml", + }, + want: "my_fg", + wantErr: false, + }, + { + name: "test apply storage", + args: args{ + ctx: context.Background(), + coreCli: mockCore, + resource: "storage", + fileLocation: "testdata/valid_storage.yaml", + }, + want: "BIGQUERY1", + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := apply(tt.args.ctx, tt.args.coreCli, tt.args.resource, tt.args.fileLocation) + if (err != nil) != tt.wantErr { + t.Errorf("apply() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("apply() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/cli/feast/cmd/config_test.go b/cli/feast/cmd/config_test.go new file mode 100644 index 0000000000..5c6010c4e1 --- /dev/null +++ b/cli/feast/cmd/config_test.go @@ -0,0 +1,42 @@ +// Copyright 2018 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "github.com/mitchellh/go-homedir" + "os" + "testing" +) + +func Test_initConfig(t *testing.T) { + tests := []struct { + name string + }{ + {"test creation of default config at $HOME/.feast"}, + } + + defaultFeastConfigFile, _ := homedir.Expand("~/.feast") + _ = os.Remove(defaultFeastConfigFile) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + initConfig() + _, err := os.Stat(defaultFeastConfigFile) + if os.IsNotExist(err) { + t.Errorf("initConfig should create config file at " + defaultFeastConfigFile) + } + }) + } +} diff --git a/cli/feast/cmd/list.go b/cli/feast/cmd/list.go index 684fb73ab4..ac4a067e4a 100644 --- a/cli/feast/cmd/list.go +++ b/cli/feast/cmd/list.go @@ -22,7 +22,7 @@ import ( "strings" "text/tabwriter" - "github.com/gojek/feast/cli/feast/pkg/util" + "github.com/gojek/feast/cli/feast/pkg/timeutil" "github.com/gojek/feast/protos/generated/go/feast/core" "github.com/golang/protobuf/ptypes/empty" @@ -144,7 +144,7 @@ func listJobs(ctx context.Context, cli core.JobServiceClient) error { fmt.Fprintf(w, "------\t----\t------\t------\t---\n") for _, job := range response.GetJobs() { fmt.Fprintf(w, strings.Join( - []string{job.Id, job.Type, job.Runner, job.Status, util.ParseAge(*job.Created)}, "\t")+"\n") + []string{job.Id, job.Type, job.Runner, job.Status, timeutil.DurationUntilNowInHumanFormat(*job.Created)}, "\t")+"\n") } w.Flush() return nil diff --git a/cli/feast/cmd/mock/README.md b/cli/feast/cmd/mock/README.md new file mode 100644 index 0000000000..3036d5856a --- /dev/null +++ b/cli/feast/cmd/mock/README.md @@ -0,0 +1,11 @@ +Mock GRPC client is created using this library +https://github.com/golang/mock + +An example to generate `mock_core.go`: + +``` +cd $FEAST_REPO/cli/feast +mockgen --package mock github.com/gojek/feast/protos/generated/go/feast/core CoreServiceClient > cmd/mock/core_service.go +``` + +> We need to re-run `mockgen` whenever the protos for the services are updated \ No newline at end of file diff --git a/cli/feast/cmd/mock/core_service.go b/cli/feast/cmd/mock/core_service.go new file mode 100644 index 0000000000..702712c847 --- /dev/null +++ b/cli/feast/cmd/mock/core_service.go @@ -0,0 +1,238 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/gojek/feast/protos/generated/go/feast/core (interfaces: CoreServiceClient) + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + core "github.com/gojek/feast/protos/generated/go/feast/core" + specs "github.com/gojek/feast/protos/generated/go/feast/specs" + gomock "github.com/golang/mock/gomock" + empty "github.com/golang/protobuf/ptypes/empty" + grpc "google.golang.org/grpc" + reflect "reflect" +) + +// MockCoreServiceClient is a mock of CoreServiceClient interface +type MockCoreServiceClient struct { + ctrl *gomock.Controller + recorder *MockCoreServiceClientMockRecorder +} + +// MockCoreServiceClientMockRecorder is the mock recorder for MockCoreServiceClient +type MockCoreServiceClientMockRecorder struct { + mock *MockCoreServiceClient +} + +// NewMockCoreServiceClient creates a new mock instance +func NewMockCoreServiceClient(ctrl *gomock.Controller) *MockCoreServiceClient { + mock := &MockCoreServiceClient{ctrl: ctrl} + mock.recorder = &MockCoreServiceClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockCoreServiceClient) EXPECT() *MockCoreServiceClientMockRecorder { + return m.recorder +} + +// ApplyEntity mocks base method +func (m *MockCoreServiceClient) ApplyEntity(arg0 context.Context, arg1 *specs.EntitySpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyEntityResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ApplyEntity", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyEntityResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ApplyEntity indicates an expected call of ApplyEntity +func (mr *MockCoreServiceClientMockRecorder) ApplyEntity(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyEntity", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyEntity), varargs...) +} + +// ApplyFeature mocks base method +func (m *MockCoreServiceClient) ApplyFeature(arg0 context.Context, arg1 *specs.FeatureSpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyFeatureResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ApplyFeature", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyFeatureResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ApplyFeature indicates an expected call of ApplyFeature +func (mr *MockCoreServiceClientMockRecorder) ApplyFeature(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyFeature", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyFeature), varargs...) +} + +// ApplyFeatureGroup mocks base method +func (m *MockCoreServiceClient) ApplyFeatureGroup(arg0 context.Context, arg1 *specs.FeatureGroupSpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyFeatureGroupResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ApplyFeatureGroup", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyFeatureGroupResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ApplyFeatureGroup indicates an expected call of ApplyFeatureGroup +func (mr *MockCoreServiceClientMockRecorder) ApplyFeatureGroup(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyFeatureGroup", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyFeatureGroup), varargs...) +} + +// ApplyStorage mocks base method +func (m *MockCoreServiceClient) ApplyStorage(arg0 context.Context, arg1 *specs.StorageSpec, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ApplyStorageResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ApplyStorage", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ApplyStorageResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ApplyStorage indicates an expected call of ApplyStorage +func (mr *MockCoreServiceClientMockRecorder) ApplyStorage(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ApplyStorage", reflect.TypeOf((*MockCoreServiceClient)(nil).ApplyStorage), varargs...) +} + +// GetEntities mocks base method +func (m *MockCoreServiceClient) GetEntities(arg0 context.Context, arg1 *core.CoreServiceTypes_GetEntitiesRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetEntitiesResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetEntities", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_GetEntitiesResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetEntities indicates an expected call of GetEntities +func (mr *MockCoreServiceClientMockRecorder) GetEntities(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEntities", reflect.TypeOf((*MockCoreServiceClient)(nil).GetEntities), varargs...) +} + +// GetFeatures mocks base method +func (m *MockCoreServiceClient) GetFeatures(arg0 context.Context, arg1 *core.CoreServiceTypes_GetFeaturesRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetFeaturesResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetFeatures", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_GetFeaturesResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetFeatures indicates an expected call of GetFeatures +func (mr *MockCoreServiceClientMockRecorder) GetFeatures(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFeatures", reflect.TypeOf((*MockCoreServiceClient)(nil).GetFeatures), varargs...) +} + +// GetStorage mocks base method +func (m *MockCoreServiceClient) GetStorage(arg0 context.Context, arg1 *core.CoreServiceTypes_GetStorageRequest, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_GetStorageResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetStorage", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_GetStorageResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStorage indicates an expected call of GetStorage +func (mr *MockCoreServiceClientMockRecorder) GetStorage(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStorage", reflect.TypeOf((*MockCoreServiceClient)(nil).GetStorage), varargs...) +} + +// ListEntities mocks base method +func (m *MockCoreServiceClient) ListEntities(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListEntitiesResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ListEntities", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ListEntitiesResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListEntities indicates an expected call of ListEntities +func (mr *MockCoreServiceClientMockRecorder) ListEntities(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListEntities", reflect.TypeOf((*MockCoreServiceClient)(nil).ListEntities), varargs...) +} + +// ListFeatures mocks base method +func (m *MockCoreServiceClient) ListFeatures(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListFeaturesResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ListFeatures", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ListFeaturesResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListFeatures indicates an expected call of ListFeatures +func (mr *MockCoreServiceClientMockRecorder) ListFeatures(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListFeatures", reflect.TypeOf((*MockCoreServiceClient)(nil).ListFeatures), varargs...) +} + +// ListStorage mocks base method +func (m *MockCoreServiceClient) ListStorage(arg0 context.Context, arg1 *empty.Empty, arg2 ...grpc.CallOption) (*core.CoreServiceTypes_ListStorageResponse, error) { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1} + for _, a := range arg2 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ListStorage", varargs...) + ret0, _ := ret[0].(*core.CoreServiceTypes_ListStorageResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListStorage indicates an expected call of ListStorage +func (mr *MockCoreServiceClientMockRecorder) ListStorage(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1}, arg2...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListStorage", reflect.TypeOf((*MockCoreServiceClient)(nil).ListStorage), varargs...) +} diff --git a/cli/feast/cmd/testdata/invalid_entity.yaml b/cli/feast/cmd/testdata/invalid_entity.yaml new file mode 100644 index 0000000000..4fafacc87b --- /dev/null +++ b/cli/feast/cmd/testdata/invalid_entity.yaml @@ -0,0 +1,5 @@ +name: myentity +description: test entity with tag +tags: + - tag1 + tag2 \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_entity.yaml b/cli/feast/cmd/testdata/valid_entity.yaml new file mode 100644 index 0000000000..97f215711f --- /dev/null +++ b/cli/feast/cmd/testdata/valid_entity.yaml @@ -0,0 +1,5 @@ +name : myentity +description: test entity with tag +tags: + - tag1 + - tag2 \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_entity_no_tag.yaml b/cli/feast/cmd/testdata/valid_entity_no_tag.yaml new file mode 100644 index 0000000000..828a768b29 --- /dev/null +++ b/cli/feast/cmd/testdata/valid_entity_no_tag.yaml @@ -0,0 +1,2 @@ +name : myentity +description: test entity without tag \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_feature.yaml b/cli/feast/cmd/testdata/valid_feature.yaml new file mode 100644 index 0000000000..a54915641b --- /dev/null +++ b/cli/feast/cmd/testdata/valid_feature.yaml @@ -0,0 +1,12 @@ +id: myentity.feature_bool_redis1 +name: feature_bool_redis1 +entity: myentity +owner: bob@example.com +description: test entity. +valueType: BOOL +uri: https://github.com/bob/example +dataStores: + serving: + id: REDIS1 + warehouse: + id: BIGQUERY1 \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_feature_group.yaml b/cli/feast/cmd/testdata/valid_feature_group.yaml new file mode 100644 index 0000000000..f877d209fe --- /dev/null +++ b/cli/feast/cmd/testdata/valid_feature_group.yaml @@ -0,0 +1,7 @@ +id: my_fg +tags: ["tag1", "tag2"] +dataStores: + serving: + id: "REDIS1" + warehouse: + id: "BIGQUERY1" \ No newline at end of file diff --git a/cli/feast/cmd/testdata/valid_storage.yaml b/cli/feast/cmd/testdata/valid_storage.yaml new file mode 100644 index 0000000000..96ae9e40f7 --- /dev/null +++ b/cli/feast/cmd/testdata/valid_storage.yaml @@ -0,0 +1,6 @@ +id: BIGQUERY1 +type: bigquery +options: + dataset: "feast" + project: "gcp-project" + tempLocation: "gs://feast-storage" \ No newline at end of file diff --git a/cli/feast/pkg/printer/job.go b/cli/feast/pkg/printer/job.go index 56f9ac8e8b..45af68519b 100644 --- a/cli/feast/pkg/printer/job.go +++ b/cli/feast/pkg/printer/job.go @@ -18,7 +18,7 @@ import ( "fmt" "strings" - "github.com/gojek/feast/cli/feast/pkg/util" + "github.com/gojek/feast/cli/feast/pkg/timeutil" "github.com/gojek/feast/protos/generated/go/feast/core" ) @@ -30,7 +30,7 @@ func PrintJobDetail(jobDetail *core.JobServiceTypes_JobDetail) string { fmt.Sprintf("%s:\t%s", "Type", jobDetail.GetType()), fmt.Sprintf("%s:\t%s", "Runner", jobDetail.GetRunner()), fmt.Sprintf("%s:\t%s", "Status", jobDetail.GetStatus()), - fmt.Sprintf("%s:\t%s", "Age", util.ParseAge(*jobDetail.GetCreated())), + fmt.Sprintf("%s:\t%s", "Age", timeutil.DurationUntilNowInHumanFormat(*jobDetail.GetCreated())), } lines = append(lines, "Metrics:") for k, v := range jobDetail.GetMetrics() { diff --git a/cli/feast/pkg/printer/job_test.go b/cli/feast/pkg/printer/job_test.go new file mode 100644 index 0000000000..4b930a56d7 --- /dev/null +++ b/cli/feast/pkg/printer/job_test.go @@ -0,0 +1,66 @@ +// Copyright 2018 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package printer + +import ( + "testing" +) + +func Test_printEntity(t *testing.T) { + type args struct { + entityName string + metrics map[string]float64 + } + tests := []struct { + name string + args args + want string + }{ + {"1 feature 1 metric value", args{"entity1", map[string]float64{"entity:entity1:value1": 89.0}}, "- Name: entity1\n Metrics: \n value1: 89.0"}, + {"1 feature 2 metric value", args{"entity1", map[string]float64{"entity:entity1:value1": 89.0, "entity:entity1:value2": 90.0}}, "- Name: entity1\n Metrics: \n value1: 89.0\n value2: 90.0"}, + {"1 feature 0 metric value", args{"entity1", map[string]float64{"entity:entity2:value1": 89.0, "entity:entity3:value2": 90.0}}, "- Name: entity1\n Metrics: "}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := printEntity(tt.args.entityName, tt.args.metrics); got != tt.want { + t.Errorf("printEntity() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_printFeature(t *testing.T) { + type args struct { + featureName string + metrics map[string]float64 + } + tests := []struct { + name string + args args + want string + }{ + {"1 feature 1 metric value", args{"feature1", map[string]float64{"feature:feature1:value1": 89.0, "feature:feature2:value1": 90.0}}, "- Id: feature1\n Metrics: \n value1: 89.0"}, + {"1 feature 2 metric value", args{"feature1", map[string]float64{"feature:feature1:value1": 89.0, "feature:feature1:value2": 90.0}}, "- Id: feature1\n Metrics: \n value1: 89.0\n value2: 90.0"}, + {"1 feature 0 metric value", args{"feature1", map[string]float64{"feature:feature2:value1": 89.0, "feature:feature3:value1": 90.0}}, "- Id: feature1\n Metrics: "}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := printFeature(tt.args.featureName, tt.args.metrics); got != tt.want { + t.Errorf("printEntity() = %v, want %v", got, tt.want) + println(len(got), len(tt.want)) + } + }) + } +} diff --git a/cli/feast/pkg/printer/specs.go b/cli/feast/pkg/printer/specs.go index 8c88bffbfe..6730938ba6 100644 --- a/cli/feast/pkg/printer/specs.go +++ b/cli/feast/pkg/printer/specs.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/gojek/feast/cli/feast/pkg/util" + "github.com/gojek/feast/cli/feast/pkg/timeutil" "github.com/gojek/feast/protos/generated/go/feast/core" ) @@ -29,8 +29,8 @@ func PrintFeatureDetail(featureDetail *core.UIServiceTypes_FeatureDetail) string lines = append(lines, fmt.Sprintf(" %s:\t%s", "Warehouse", wh.GetId())) } } - lines = append(lines, fmt.Sprintf("%s:\t%s", "Created", util.ParseTimestamp(*featureDetail.GetCreated()))) - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", util.ParseTimestamp(*featureDetail.GetLastUpdated()))) + lines = append(lines, fmt.Sprintf("%s:\t%s", "Created", timeutil.FormatToRFC3339(*featureDetail.GetCreated()))) + lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*featureDetail.GetLastUpdated()))) if jobs := featureDetail.GetJobs(); len(jobs) > 0 { lines = append(lines, "Related Jobs:") for _, job := range jobs { @@ -56,7 +56,7 @@ func PrintEntityDetail(entityDetail *core.UIServiceTypes_EntityDetail) string { if tags := spec.GetTags(); len(tags) > 0 { lines = append(lines, fmt.Sprintf("Tags: %s", strings.Join(tags, ","))) } - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", util.ParseTimestamp(*entityDetail.GetLastUpdated()))) + lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*entityDetail.GetLastUpdated()))) lines = append(lines, "Related Jobs:") for _, job := range entityDetail.GetJobs() { lines = append(lines, fmt.Sprintf("- %s", job)) @@ -78,7 +78,7 @@ func PrintStorageDetail(storageDetail *core.UIServiceTypes_StorageDetail) string for k, v := range spec.GetOptions() { lines = append(lines, fmt.Sprintf(" %s: %s", k, v)) } - lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", util.ParseTimestamp(*storageDetail.GetLastUpdated()))) + lines = append(lines, fmt.Sprintf("%s:\t%s", "LastUpdated", timeutil.FormatToRFC3339(*storageDetail.GetLastUpdated()))) out := strings.Join(lines, "\n") fmt.Println(out) return out diff --git a/cli/feast/pkg/printer/printer_test.go b/cli/feast/pkg/printer/specs_test.go similarity index 89% rename from cli/feast/pkg/printer/printer_test.go rename to cli/feast/pkg/printer/specs_test.go index 07332324c1..6055133211 100644 --- a/cli/feast/pkg/printer/printer_test.go +++ b/cli/feast/pkg/printer/specs_test.go @@ -4,7 +4,7 @@ import ( "fmt" "testing" - "github.com/gojek/feast/cli/feast/pkg/util" + "github.com/gojek/feast/cli/feast/pkg/timeutil" "github.com/golang/protobuf/ptypes/timestamp" @@ -58,8 +58,8 @@ LastUpdated: %s Related Jobs: - job1 - job2`, - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1}), - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1})), + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1}), + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})), }, { name: "no storage", input: &core.UIServiceTypes_FeatureDetail{ @@ -88,8 +88,8 @@ LastUpdated: %s Related Jobs: - job1 - job2`, - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1}), - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1})), + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1}), + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})), }, } @@ -121,7 +121,7 @@ LastUpdated: %s Related Jobs: - job1 - job2`, - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1})) + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})) if out != expected { t.Errorf("Expected output:\n%s \nActual:\n%s \n", expected, out) } @@ -146,7 +146,7 @@ Options: option1: value1 option2: value2 LastUpdated: %s`, - util.ParseTimestamp(timestamp.Timestamp{Seconds: 1})) + timeutil.FormatToRFC3339(timestamp.Timestamp{Seconds: 1})) if out != expected { t.Errorf("Expected output:\n%s \nActual:\n%s \n", expected, out) } diff --git a/cli/feast/pkg/util/utils.go b/cli/feast/pkg/timeutil/timeutil.go similarity index 75% rename from cli/feast/pkg/util/utils.go rename to cli/feast/pkg/timeutil/timeutil.go index a48ef6d741..379a3367e3 100644 --- a/cli/feast/pkg/util/utils.go +++ b/cli/feast/pkg/timeutil/timeutil.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package util +package timeutil import ( "fmt" @@ -22,9 +22,8 @@ import ( "github.com/golang/protobuf/ptypes/timestamp" ) -// ParseAge parses a given timestamp to a human readable duration. -func ParseAge(createdTimestamp timestamp.Timestamp) string { - timeSinceCreation := float64(time.Now().Unix() - createdTimestamp.GetSeconds()) +func DurationUntilNowInHumanFormat(timestamp timestamp.Timestamp) string { + timeSinceCreation := float64(time.Now().Unix() - timestamp.GetSeconds()) if days := math.Floor(timeSinceCreation / float64(8.64e+4)); days > 0 { return fmt.Sprintf("%dd", int(days)) } else if hours := math.Floor(timeSinceCreation / float64(3.6e+3)); hours > 0 { @@ -33,8 +32,7 @@ func ParseAge(createdTimestamp timestamp.Timestamp) string { return fmt.Sprintf("%dm", int(math.Floor(timeSinceCreation/float64(60)))) } -// ParseTimestamp parses a given timestamp to a human readable format. -func ParseTimestamp(ts timestamp.Timestamp) string { +func FormatToRFC3339(ts timestamp.Timestamp) string { t := time.Unix(ts.GetSeconds(), int64(ts.GetNanos())) return t.Format(time.RFC3339) } diff --git a/cli/feast/pkg/timeutil/timeutil_test.go b/cli/feast/pkg/timeutil/timeutil_test.go new file mode 100644 index 0000000000..907f60f5e6 --- /dev/null +++ b/cli/feast/pkg/timeutil/timeutil_test.go @@ -0,0 +1,49 @@ +// Copyright 2018 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package timeutil + +import ( + "testing" + "time" + + "github.com/golang/protobuf/ptypes/timestamp" +) + +func TestDurationUntilNowInHumanFormat(t *testing.T) { + type args struct { + createdTimestamp timestamp.Timestamp + } + + tests := []struct { + name string + args args + want string + }{ + {"59 second before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Second)).Unix()}}, "0m"}, + {"1 minute before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Minute)).Unix()}}, "1m"}, + {"30 minutes before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-30 * time.Duration(time.Minute)).Unix()}}, "30m"}, + {"1 hour before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-1 * time.Duration(time.Hour)).Unix()}}, "1h"}, + {"2 hours before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-2 * time.Duration(time.Hour)).Unix()}}, "2h"}, + {"1 day before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-24 * time.Duration(time.Hour)).Unix()}}, "1d"}, + {"2 days before now", args{timestamp.Timestamp{Seconds: time.Now().Add(-48 * time.Duration(time.Hour)).Unix()}}, "2d"}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := DurationUntilNowInHumanFormat(tt.args.createdTimestamp); got != tt.want { + t.Errorf("DurationUntilNowInHumanFormat() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java index 1372d09c1c..e718332070 100644 --- a/core/src/main/java/feast/core/http/HealthController.java +++ b/core/src/main/java/feast/core/http/HealthController.java @@ -63,7 +63,8 @@ public ResponseEntity healthz() { return ResponseEntity.ok("healthy"); } log.error("Unable to reach DB"); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Unable to establish connection with DB"); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Unable to establish connection with DB"); } catch (SQLException e) { log.error("Unable to reach DB: {}", e); return ResponseEntity.status(INTERNAL_SERVER_ERROR).body(e.getMessage()); diff --git a/core/src/main/java/feast/core/model/FeatureInfo.java b/core/src/main/java/feast/core/model/FeatureInfo.java index ac497019d3..e10944df1d 100644 --- a/core/src/main/java/feast/core/model/FeatureInfo.java +++ b/core/src/main/java/feast/core/model/FeatureInfo.java @@ -25,6 +25,7 @@ import feast.specs.FeatureSpecProto.FeatureSpec; import feast.types.ValueProto.ValueType; import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Getter; import lombok.Setter; diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index d80b5b7b42..ccd30ddfac 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -48,7 +48,7 @@ */ @RunWith(SpringRunner.class) @SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}${random.uuid}", + "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", "spring.datasource.url=jdbc:h2:mem:testdb", "feast.store.warehouse.type=file.json", "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java index 61f55202ad..6dcef362a2 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java @@ -49,7 +49,7 @@ */ @RunWith(SpringRunner.class) @SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}${random.uuid}", + "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", "spring.datasource.url=jdbc:h2:mem:testdb", "feast.store.warehouse.type=file.json", "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java index 6c8ee325aa..46b2e151a0 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java @@ -49,7 +49,7 @@ */ @RunWith(SpringRunner.class) @SpringBootTest(properties = { - "feast.jobs.workspace=${java.io.tmpdir}${random.uuid}", + "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", "spring.datasource.url=jdbc:h2:mem:testdb", "feast.store.serving.type=redis", "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", diff --git a/core/src/test/java/feast/core/http/HealthControllerTest.java b/core/src/test/java/feast/core/http/HealthControllerTest.java new file mode 100644 index 0000000000..030033b789 --- /dev/null +++ b/core/src/test/java/feast/core/http/HealthControllerTest.java @@ -0,0 +1,65 @@ +package feast.core.http; + +import org.junit.Test; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; + +import javax.sql.DataSource; +import java.sql.Connection; +import java.sql.SQLException; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.*; + +public class HealthControllerTest { + @Test + public void ping() { + HealthController healthController = new HealthController(null); + assertEquals(ResponseEntity.ok("pong"), healthController.ping()); + } + + @Test + public void healthz() { + assertEquals(ResponseEntity.ok("healthy"), mockHealthyController().healthz()); + assertEquals( + ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body("Unable to establish connection with DB"), + mockUnhealthyControllerBecauseInvalidConn().healthz()); + assertEquals( + ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("mocked sqlexception"), + mockUnhealthyControllerBecauseSQLException().healthz()); + } + + private HealthController mockHealthyController() { + DataSource mockDataSource = mock(DataSource.class); + Connection mockConnection = mock(Connection.class); + try { + when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.TRUE); + when(mockDataSource.getConnection()).thenReturn(mockConnection); + } catch (Exception e) { + e.printStackTrace(); + } + return new HealthController(mockDataSource); + } + + private HealthController mockUnhealthyControllerBecauseInvalidConn() { + DataSource mockDataSource = mock(DataSource.class); + Connection mockConnection = mock(Connection.class); + try { + when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.FALSE); + when(mockDataSource.getConnection()).thenReturn(mockConnection); + } catch (Exception ignored) { + } + return new HealthController(mockDataSource); + } + + private HealthController mockUnhealthyControllerBecauseSQLException() { + DataSource mockDataSource = mock(DataSource.class); + Connection mockConnection = mock(Connection.class); + try { + when(mockDataSource.getConnection()).thenThrow(new SQLException("mocked sqlexception")); + } catch (SQLException ignored) { + } + return new HealthController(mockDataSource); + } +} diff --git a/core/src/test/java/feast/core/http/UiServiceControllerTest.java b/core/src/test/java/feast/core/http/UiServiceControllerTest.java new file mode 100644 index 0000000000..6a858940df --- /dev/null +++ b/core/src/test/java/feast/core/http/UiServiceControllerTest.java @@ -0,0 +1,177 @@ +package feast.core.http; + +import feast.core.UIServiceProto; +import feast.core.UIServiceProto.UIServiceTypes.EntityDetail; +import feast.core.UIServiceProto.UIServiceTypes.FeatureDetail; +import feast.core.UIServiceProto.UIServiceTypes.FeatureGroupDetail; +import feast.core.UIServiceProto.UIServiceTypes.StorageDetail; +import feast.core.model.EntityInfo; +import feast.core.model.FeatureGroupInfo; +import feast.core.model.FeatureInfo; +import feast.core.model.StorageInfo; +import feast.core.service.JobManagementService; +import feast.core.service.SpecService; +import feast.specs.FeatureSpecProto; +import org.junit.Before; +import org.junit.Test; + +import java.util.Collections; + +import static feast.specs.FeatureSpecProto.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class UiServiceControllerTest { + private UiServiceController goodUiServiceController; + private UiServiceController badUiServiceController; + + @Before + public void setUp() throws Exception { + FeatureInfo mockFeatureInfo = mock(FeatureInfo.class); + when(mockFeatureInfo.getFeatureDetail()).thenReturn(FeatureDetail.getDefaultInstance()); + when(mockFeatureInfo.getFeatureSpec()).thenReturn(FeatureSpec.getDefaultInstance()); + when(mockFeatureInfo.resolve()).thenReturn(mockFeatureInfo); + + EntityInfo mockEntityInfo = mock(EntityInfo.class); + when(mockEntityInfo.getEntityDetail()).thenReturn(EntityDetail.getDefaultInstance()); + + FeatureGroupInfo mockFeatureGroupInfo = mock(FeatureGroupInfo.class); + when(mockFeatureGroupInfo.getFeatureGroupDetail()) + .thenReturn(FeatureGroupDetail.getDefaultInstance()); + + StorageInfo mockStorageInfo = mock(StorageInfo.class); + when(mockStorageInfo.getStorageDetail()).thenReturn(StorageDetail.getDefaultInstance()); + + SpecService goodMockSpecService = mock(SpecService.class); + when(goodMockSpecService.listFeatures()).thenReturn(Collections.singletonList(mockFeatureInfo)); + when(goodMockSpecService.getFeatures(Collections.singletonList("1"))) + .thenReturn(Collections.singletonList(mockFeatureInfo)); + when(goodMockSpecService.listFeatureGroups()) + .thenReturn(Collections.singletonList(mockFeatureGroupInfo)); + when(goodMockSpecService.getFeatureGroups(Collections.singletonList("1"))) + .thenReturn(Collections.singletonList(mockFeatureGroupInfo)); + when(goodMockSpecService.listEntities()).thenReturn(Collections.singletonList(mockEntityInfo)); + when(goodMockSpecService.getEntities(Collections.singletonList("1"))) + .thenReturn(Collections.singletonList(mockEntityInfo)); + when(goodMockSpecService.listStorage()).thenReturn(Collections.singletonList(mockStorageInfo)); + when(goodMockSpecService.getStorage(Collections.singletonList("1"))) + .thenReturn(Collections.singletonList(mockStorageInfo)); + + JobManagementService goodMockJobMangementService = mock(JobManagementService.class); + + goodUiServiceController = + new UiServiceController(goodMockSpecService, goodMockJobMangementService); + + SpecService badMockSpecService = mock(SpecService.class); + when(badMockSpecService.listFeatures()).thenReturn(null); + when(badMockSpecService.getFeatures(Collections.singletonList("1"))).thenReturn(null); + when(badMockSpecService.listFeatureGroups()).thenReturn(null); + when(badMockSpecService.getFeatureGroups(Collections.singletonList("1"))).thenReturn(null); + when(badMockSpecService.listEntities()).thenReturn(null); + when(badMockSpecService.getEntities(Collections.singletonList("1"))).thenReturn(null); + when(badMockSpecService.listStorage()).thenReturn(null); + when(badMockSpecService.getStorage(Collections.singletonList("1"))).thenReturn(null); + + JobManagementService badMockJobMangementService = mock(JobManagementService.class); + + badUiServiceController = + new UiServiceController(badMockSpecService, badMockJobMangementService); + } + + @Test + public void listFeatures() { + FeatureDetail expected = FeatureDetail.getDefaultInstance(); + FeatureDetail actual = goodUiServiceController.listFeatures().getFeaturesList().get(0); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void listFeaturesWithExecption() { + badUiServiceController.listFeatures(); + } + + @Test + public void getFeature() { + FeatureDetail expected = FeatureDetail.getDefaultInstance(); + FeatureDetail actual = goodUiServiceController.getFeature("1").getFeature(); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void getFeatureWithException() { + badUiServiceController.getFeature("1"); + } + + @Test + public void listFeatureGroups() { + FeatureGroupDetail expected = FeatureGroupDetail.getDefaultInstance(); + FeatureGroupDetail actual = goodUiServiceController.listFeatureGroups().getFeatureGroups(0); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void listFeatureGroupsWithException() { + badUiServiceController.listFeatureGroups(); + } + + @Test + public void getFeatureGroup() { + FeatureGroupDetail expected = FeatureGroupDetail.getDefaultInstance(); + FeatureGroupDetail actual = goodUiServiceController.getFeatureGroup("1").getFeatureGroup(); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void getFeatureGroupWithException() { + badUiServiceController.getFeatureGroup("1"); + } + + @Test + public void listEntities() { + EntityDetail expected = EntityDetail.getDefaultInstance(); + EntityDetail actual = goodUiServiceController.listEntities().getEntitiesList().get(0); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void listEntitiesWithException() { + badUiServiceController.listEntities(); + } + + @Test + public void getEntity() { + EntityDetail expected = EntityDetail.getDefaultInstance(); + EntityDetail actual = goodUiServiceController.getEntity("1").getEntity(); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void getEntityWithException() { + badUiServiceController.getEntity("1"); + } + + @Test + public void listStorage() { + StorageDetail expected = StorageDetail.getDefaultInstance(); + StorageDetail actual = goodUiServiceController.listStorage().getStorage(0); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void listStorageWithException() { + badUiServiceController.listStorage(); + } + + @Test + public void getStorage() { + StorageDetail expected = StorageDetail.getDefaultInstance(); + StorageDetail actual = goodUiServiceController.getStorage("1").getStorage(); + assertEquals(expected, actual); + } + + @Test(expected = Exception.class) + public void getStorageWithException() { + badUiServiceController.getStorage("1"); + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000000..84acd86f2b --- /dev/null +++ b/go.mod @@ -0,0 +1,14 @@ +module github.com/gojek/feast + +require ( + github.com/ghodss/yaml v1.0.0 + github.com/golang/mock v1.2.0 + github.com/golang/protobuf v1.3.1 + github.com/google/go-cmp v0.2.0 + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 + github.com/spf13/cobra v0.0.3 + github.com/spf13/viper v1.3.2 + google.golang.org/grpc v1.19.1 + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000000..c169d03619 --- /dev/null +++ b/go.sum @@ -0,0 +1,76 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM= +google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/protos/Makefile b/protos/Makefile index e2f61a8c37..e49b8c32d6 100644 --- a/protos/Makefile +++ b/protos/Makefile @@ -4,7 +4,7 @@ dirs = core serving specs storage types service_dirs = core serving gen-go: - @$(foreach dir,$(dirs),protoc -I/usr/local/include -I. --go_out=plugins=grpc:$$GOPATH/src feast/$(dir)/*.proto;) + @$(foreach dir,$(dirs),protoc -I/usr/local/include -I. --go_out=plugins=grpc,paths=source_relative:generated/go feast/$(dir)/*.proto;) gen-python: pip install grpcio-tools diff --git a/protos/generated/go/feast/core/CoreService.pb.go b/protos/generated/go/feast/core/CoreService.pb.go index 4037da58af..f903333006 100644 --- a/protos/generated/go/feast/core/CoreService.pb.go +++ b/protos/generated/go/feast/core/CoreService.pb.go @@ -1,17 +1,18 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/core/CoreService.proto -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" +package core import ( - context "golang.org/x/net/context" + context "context" + fmt "fmt" + specs "github.com/gojek/feast/protos/generated/go/feast/specs" + proto "github.com/golang/protobuf/proto" + empty "github.com/golang/protobuf/ptypes/empty" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -23,7 +24,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type CoreServiceTypes struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -35,16 +36,17 @@ func (m *CoreServiceTypes) Reset() { *m = CoreServiceTypes{} } func (m *CoreServiceTypes) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes) ProtoMessage() {} func (*CoreServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0} + return fileDescriptor_d9be266444105411, []int{0} } + func (m *CoreServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes.Unmarshal(m, b) } func (m *CoreServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes.Merge(dst, src) +func (m *CoreServiceTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes.Merge(m, src) } func (m *CoreServiceTypes) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes.Size(m) @@ -66,16 +68,17 @@ func (m *CoreServiceTypes_GetEntitiesRequest) Reset() { *m = CoreService func (m *CoreServiceTypes_GetEntitiesRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetEntitiesRequest) ProtoMessage() {} func (*CoreServiceTypes_GetEntitiesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 0} + return fileDescriptor_d9be266444105411, []int{0, 0} } + func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Unmarshal(m, b) } func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetEntitiesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Merge(dst, src) +func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Merge(m, src) } func (m *CoreServiceTypes_GetEntitiesRequest) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetEntitiesRequest.Size(m) @@ -104,16 +107,17 @@ func (m *CoreServiceTypes_GetEntitiesResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_GetEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetEntitiesResponse) ProtoMessage() {} func (*CoreServiceTypes_GetEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 1} + return fileDescriptor_d9be266444105411, []int{0, 1} } + func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Merge(dst, src) +func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Merge(m, src) } func (m *CoreServiceTypes_GetEntitiesResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetEntitiesResponse.Size(m) @@ -142,16 +146,17 @@ func (m *CoreServiceTypes_ListEntitiesResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListEntitiesResponse) ProtoMessage() {} func (*CoreServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 2} + return fileDescriptor_d9be266444105411, []int{0, 2} } + func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Merge(dst, src) +func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Merge(m, src) } func (m *CoreServiceTypes_ListEntitiesResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ListEntitiesResponse.Size(m) @@ -181,16 +186,17 @@ func (m *CoreServiceTypes_GetFeaturesRequest) Reset() { *m = CoreService func (m *CoreServiceTypes_GetFeaturesRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetFeaturesRequest) ProtoMessage() {} func (*CoreServiceTypes_GetFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 3} + return fileDescriptor_d9be266444105411, []int{0, 3} } + func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Unmarshal(m, b) } func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetFeaturesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Merge(dst, src) +func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Merge(m, src) } func (m *CoreServiceTypes_GetFeaturesRequest) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetFeaturesRequest.Size(m) @@ -219,16 +225,17 @@ func (m *CoreServiceTypes_GetFeaturesResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_GetFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetFeaturesResponse) ProtoMessage() {} func (*CoreServiceTypes_GetFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 4} + return fileDescriptor_d9be266444105411, []int{0, 4} } + func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Merge(dst, src) +func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Merge(m, src) } func (m *CoreServiceTypes_GetFeaturesResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetFeaturesResponse.Size(m) @@ -257,16 +264,17 @@ func (m *CoreServiceTypes_ListFeaturesResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListFeaturesResponse) ProtoMessage() {} func (*CoreServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 5} + return fileDescriptor_d9be266444105411, []int{0, 5} } + func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Merge(dst, src) +func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Merge(m, src) } func (m *CoreServiceTypes_ListFeaturesResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ListFeaturesResponse.Size(m) @@ -296,16 +304,17 @@ func (m *CoreServiceTypes_GetStorageRequest) Reset() { *m = CoreServiceT func (m *CoreServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetStorageRequest) ProtoMessage() {} func (*CoreServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 6} + return fileDescriptor_d9be266444105411, []int{0, 6} } + func (m *CoreServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Unmarshal(m, b) } func (m *CoreServiceTypes_GetStorageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Merge(dst, src) +func (m *CoreServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Merge(m, src) } func (m *CoreServiceTypes_GetStorageRequest) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetStorageRequest.Size(m) @@ -334,16 +343,17 @@ func (m *CoreServiceTypes_GetStorageResponse) Reset() { *m = CoreService func (m *CoreServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_GetStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 7} + return fileDescriptor_d9be266444105411, []int{0, 7} } + func (m *CoreServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_GetStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Merge(dst, src) +func (m *CoreServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Merge(m, src) } func (m *CoreServiceTypes_GetStorageResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_GetStorageResponse.Size(m) @@ -372,16 +382,17 @@ func (m *CoreServiceTypes_ListStorageResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ListStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 8} + return fileDescriptor_d9be266444105411, []int{0, 8} } + func (m *CoreServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ListStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Merge(dst, src) +func (m *CoreServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Merge(m, src) } func (m *CoreServiceTypes_ListStorageResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ListStorageResponse.Size(m) @@ -411,16 +422,17 @@ func (m *CoreServiceTypes_ApplyEntityResponse) Reset() { *m = CoreServic func (m *CoreServiceTypes_ApplyEntityResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyEntityResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 9} + return fileDescriptor_d9be266444105411, []int{0, 9} } + func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ApplyEntityResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Merge(dst, src) +func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Merge(m, src) } func (m *CoreServiceTypes_ApplyEntityResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ApplyEntityResponse.Size(m) @@ -450,16 +462,17 @@ func (m *CoreServiceTypes_ApplyFeatureResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ApplyFeatureResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyFeatureResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 10} + return fileDescriptor_d9be266444105411, []int{0, 10} } + func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ApplyFeatureResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Merge(dst, src) +func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Merge(m, src) } func (m *CoreServiceTypes_ApplyFeatureResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureResponse.Size(m) @@ -493,16 +506,17 @@ func (m *CoreServiceTypes_ApplyFeatureGroupResponse) String() string { } func (*CoreServiceTypes_ApplyFeatureGroupResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 11} + return fileDescriptor_d9be266444105411, []int{0, 11} } + func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Merge(dst, src) +func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Merge(m, src) } func (m *CoreServiceTypes_ApplyFeatureGroupResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ApplyFeatureGroupResponse.Size(m) @@ -532,16 +546,17 @@ func (m *CoreServiceTypes_ApplyStorageResponse) Reset() { *m = CoreServi func (m *CoreServiceTypes_ApplyStorageResponse) String() string { return proto.CompactTextString(m) } func (*CoreServiceTypes_ApplyStorageResponse) ProtoMessage() {} func (*CoreServiceTypes_ApplyStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_CoreService_a0e9a1504f969203, []int{0, 12} + return fileDescriptor_d9be266444105411, []int{0, 12} } + func (m *CoreServiceTypes_ApplyStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Unmarshal(m, b) } func (m *CoreServiceTypes_ApplyStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Marshal(b, m, deterministic) } -func (dst *CoreServiceTypes_ApplyStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Merge(dst, src) +func (m *CoreServiceTypes_ApplyStorageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Merge(m, src) } func (m *CoreServiceTypes_ApplyStorageResponse) XXX_Size() int { return xxx_messageInfo_CoreServiceTypes_ApplyStorageResponse.Size(m) @@ -576,6 +591,50 @@ func init() { proto.RegisterType((*CoreServiceTypes_ApplyStorageResponse)(nil), "feast.core.CoreServiceTypes.ApplyStorageResponse") } +func init() { proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_d9be266444105411) } + +var fileDescriptor_d9be266444105411 = []byte{ + // 602 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5b, 0x6f, 0x12, 0x41, + 0x14, 0x86, 0x34, 0x69, 0xe0, 0x40, 0x4c, 0x3b, 0x34, 0x8a, 0x23, 0x35, 0xcd, 0x26, 0x36, 0x7d, + 0x9a, 0xa9, 0xbd, 0xf8, 0xe4, 0x8b, 0x6d, 0x2a, 0xd1, 0x36, 0xc6, 0x2c, 0xfa, 0x60, 0x8d, 0x0f, + 0x5c, 0x0e, 0xeb, 0x2a, 0x30, 0xeb, 0xce, 0x60, 0xc2, 0xb3, 0xff, 0xcf, 0xdf, 0x64, 0x98, 0x1d, + 0x66, 0x07, 0xd8, 0xed, 0x12, 0xc3, 0x1b, 0x9c, 0xdb, 0x37, 0xdf, 0x39, 0xf3, 0x9d, 0x59, 0x68, + 0x0d, 0xb1, 0x2b, 0x15, 0xef, 0x8b, 0x18, 0xf9, 0xb5, 0x88, 0xb1, 0x83, 0xf1, 0xef, 0xb0, 0x8f, + 0x2c, 0x8a, 0x85, 0x12, 0x04, 0xb4, 0x97, 0xcd, 0xbd, 0xd4, 0x44, 0xca, 0x08, 0xfb, 0x92, 0xdf, + 0x4c, 0x54, 0xa8, 0x66, 0x9d, 0x08, 0xfb, 0x49, 0x24, 0x3d, 0x74, 0xbd, 0x6f, 0xb1, 0xab, 0xa6, + 0x31, 0x3a, 0x6e, 0x2f, 0xc3, 0xdd, 0x8e, 0xc5, 0x34, 0xca, 0x2b, 0xd1, 0x51, 0x22, 0xee, 0x06, + 0x6e, 0x89, 0x67, 0x81, 0x10, 0xc1, 0x08, 0xb9, 0xfe, 0xd7, 0x9b, 0x0e, 0x39, 0x8e, 0x23, 0x35, + 0x4b, 0x9c, 0xde, 0xdf, 0x5d, 0xd8, 0x73, 0x8e, 0xff, 0x69, 0x16, 0xa1, 0xa4, 0xc7, 0x40, 0xda, + 0xa8, 0xf4, 0x51, 0x43, 0x94, 0x3e, 0xfe, 0x9a, 0xa2, 0x54, 0x64, 0x0f, 0x76, 0xc2, 0x81, 0x6c, + 0x96, 0x8f, 0x76, 0x4e, 0xaa, 0xfe, 0xfc, 0x27, 0x7d, 0x0f, 0x8d, 0xa5, 0x38, 0x19, 0x89, 0x89, + 0x44, 0x72, 0x0e, 0x15, 0x34, 0x36, 0x1d, 0x5d, 0x3b, 0x7b, 0xc2, 0x92, 0x7e, 0xe8, 0x23, 0xb2, + 0xb4, 0x07, 0xbe, 0x0d, 0xa4, 0xb7, 0x70, 0x70, 0x17, 0xca, 0x2d, 0x15, 0x4b, 0x08, 0x98, 0x76, + 0x3d, 0x40, 0xe0, 0x56, 0x13, 0x48, 0xe3, 0x0c, 0xe6, 0x05, 0x54, 0x86, 0xc6, 0x66, 0x30, 0x9b, + 0x4b, 0x98, 0xce, 0x98, 0x7c, 0x1b, 0x49, 0xef, 0x12, 0x06, 0x5b, 0xaa, 0xf6, 0x02, 0xf6, 0xdb, + 0xa8, 0xcc, 0x34, 0xf3, 0x19, 0xf8, 0x9a, 0xa9, 0x0d, 0x33, 0x90, 0xaf, 0xa1, 0x2e, 0xd3, 0x7b, + 0x90, 0x0d, 0xeb, 0x5c, 0x14, 0x7f, 0x29, 0x9a, 0x76, 0xa0, 0x31, 0x27, 0xb2, 0xdd, 0xa2, 0x97, + 0xd0, 0x78, 0x13, 0x45, 0xa3, 0x59, 0x32, 0x2f, 0x5b, 0xf4, 0x39, 0x80, 0x9e, 0xda, 0xec, 0x43, + 0x77, 0x8c, 0xcd, 0xf2, 0x51, 0xf9, 0xa4, 0xea, 0x3b, 0x16, 0x7a, 0x01, 0x07, 0x3a, 0xcd, 0x34, + 0xc9, 0xe6, 0xb5, 0xa0, 0x6a, 0x5a, 0xf5, 0x6e, 0x60, 0xd2, 0x52, 0x03, 0xbd, 0x86, 0xa7, 0x6e, + 0x96, 0x16, 0x8c, 0x4d, 0x3d, 0x86, 0x47, 0x43, 0xc7, 0x6e, 0xf3, 0x57, 0xac, 0x16, 0x7a, 0xb5, + 0x0f, 0x2d, 0xa8, 0x1a, 0x66, 0x29, 0xb4, 0x35, 0x9c, 0xfd, 0xa9, 0x40, 0xcd, 0x11, 0x14, 0x89, + 0xa1, 0xe6, 0x68, 0x84, 0x70, 0x96, 0x6e, 0x06, 0xb6, 0x2a, 0x3c, 0xb6, 0xae, 0x3a, 0x7a, 0xba, + 0x79, 0x42, 0x72, 0x3e, 0xaf, 0x44, 0xbe, 0x42, 0xdd, 0xd5, 0x12, 0x79, 0xcc, 0x92, 0x15, 0xc0, + 0x16, 0x2b, 0x80, 0xdd, 0xcc, 0x57, 0x00, 0x7d, 0xf9, 0x60, 0xed, 0x2c, 0x39, 0x7a, 0x25, 0x43, + 0x68, 0x71, 0xcb, 0x8b, 0x09, 0xad, 0xa8, 0xb0, 0x98, 0xd0, 0xaa, 0x80, 0x52, 0x42, 0x16, 0xf4, + 0xff, 0x09, 0x65, 0x14, 0x17, 0x00, 0xa9, 0x84, 0x08, 0x2b, 0x3a, 0xde, 0xb2, 0x24, 0x29, 0xdf, + 0x38, 0xde, 0x02, 0x7e, 0x81, 0x9a, 0xa3, 0xaf, 0x5c, 0x32, 0xa7, 0x85, 0x64, 0xd6, 0x4b, 0x7f, + 0x83, 0xba, 0x7b, 0xf1, 0x49, 0xee, 0xa6, 0x29, 0x68, 0x55, 0x96, 0xe6, 0xbc, 0x12, 0x19, 0xc1, + 0xfe, 0x9a, 0xae, 0xc8, 0x61, 0x16, 0x86, 0x7d, 0xa3, 0xe8, 0xab, 0x8d, 0x81, 0x96, 0x64, 0xea, + 0x95, 0xc8, 0x3d, 0xd4, 0x9c, 0x95, 0x41, 0xf2, 0xf6, 0x7e, 0x41, 0xa3, 0x32, 0xb6, 0x8e, 0xd3, + 0xa8, 0xc5, 0x10, 0x72, 0xd7, 0xd8, 0x26, 0x8d, 0x5a, 0x9b, 0xc3, 0xd5, 0x67, 0x70, 0xbe, 0x00, + 0xae, 0xdc, 0x17, 0xf6, 0xe3, 0x7c, 0xc8, 0xf7, 0x97, 0x41, 0xa8, 0xbe, 0x4f, 0x7b, 0xac, 0x2f, + 0xc6, 0x3c, 0x10, 0x3f, 0xf0, 0x27, 0x4f, 0xde, 0x70, 0x7d, 0x05, 0x24, 0x0f, 0x70, 0x82, 0x71, + 0x57, 0xe1, 0x80, 0x07, 0x82, 0xa7, 0x1f, 0x1a, 0xbd, 0x5d, 0xed, 0x3f, 0xff, 0x17, 0x00, 0x00, + 0xff, 0xff, 0x57, 0x65, 0xdd, 0x73, 0x7d, 0x08, 0x00, 0x00, +} + // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn @@ -589,44 +648,44 @@ const _ = grpc.SupportPackageIsVersion4 // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type CoreServiceClient interface { // - // Get entities specified in request. - // This process returns a list of entity specs. + //Get entities specified in request. + //This process returns a list of entity specs. GetEntities(ctx context.Context, in *CoreServiceTypes_GetEntitiesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetEntitiesResponse, error) // - // Get all entities - // This process returns a list of entity specs. + //Get all entities + //This process returns a list of entity specs. ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListEntitiesResponse, error) // - // Get features specified in request. - // This process returns a list of feature specs. + //Get features specified in request. + //This process returns a list of feature specs. GetFeatures(ctx context.Context, in *CoreServiceTypes_GetFeaturesRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetFeaturesResponse, error) // - // Get all features. - // This process returns a list of entity specs. + //Get all features. + //This process returns a list of entity specs. ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListFeaturesResponse, error) // - // Get storage specs specified in request. - // This process returns a list of storage specs. + //Get storage specs specified in request. + //This process returns a list of storage specs. GetStorage(ctx context.Context, in *CoreServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*CoreServiceTypes_GetStorageResponse, error) // - // Get all storage specs. - // This process returns a list of storage specs. + //Get all storage specs. + //This process returns a list of storage specs. ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*CoreServiceTypes_ListStorageResponse, error) // - // Register a new feature to the metadata store, or update an existing feature. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new feature to the metadata store, or update an existing feature. + //If any validation errors occur, only the first encountered error will be returned. ApplyFeature(ctx context.Context, in *specs.FeatureSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureResponse, error) // - // Register a new feature group to the metadata store, or update an existing feature group. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new feature group to the metadata store, or update an existing feature group. + //If any validation errors occur, only the first encountered error will be returned. ApplyFeatureGroup(ctx context.Context, in *specs.FeatureGroupSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) // - // Register a new entity to the metadata store, or update an existing entity. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new entity to the metadata store, or update an existing entity. + //If any validation errors occur, only the first encountered error will be returned. ApplyEntity(ctx context.Context, in *specs.EntitySpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyEntityResponse, error) // - // Register a new storage spec to the metadata store, or update an existing storage. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new storage spec to the metadata store, or update an existing storage. + //If any validation errors occur, only the first encountered error will be returned. ApplyStorage(ctx context.Context, in *specs.StorageSpec, opts ...grpc.CallOption) (*CoreServiceTypes_ApplyStorageResponse, error) } @@ -731,47 +790,82 @@ func (c *coreServiceClient) ApplyStorage(ctx context.Context, in *specs.StorageS // CoreServiceServer is the server API for CoreService service. type CoreServiceServer interface { // - // Get entities specified in request. - // This process returns a list of entity specs. + //Get entities specified in request. + //This process returns a list of entity specs. GetEntities(context.Context, *CoreServiceTypes_GetEntitiesRequest) (*CoreServiceTypes_GetEntitiesResponse, error) // - // Get all entities - // This process returns a list of entity specs. + //Get all entities + //This process returns a list of entity specs. ListEntities(context.Context, *empty.Empty) (*CoreServiceTypes_ListEntitiesResponse, error) // - // Get features specified in request. - // This process returns a list of feature specs. + //Get features specified in request. + //This process returns a list of feature specs. GetFeatures(context.Context, *CoreServiceTypes_GetFeaturesRequest) (*CoreServiceTypes_GetFeaturesResponse, error) // - // Get all features. - // This process returns a list of entity specs. + //Get all features. + //This process returns a list of entity specs. ListFeatures(context.Context, *empty.Empty) (*CoreServiceTypes_ListFeaturesResponse, error) // - // Get storage specs specified in request. - // This process returns a list of storage specs. + //Get storage specs specified in request. + //This process returns a list of storage specs. GetStorage(context.Context, *CoreServiceTypes_GetStorageRequest) (*CoreServiceTypes_GetStorageResponse, error) // - // Get all storage specs. - // This process returns a list of storage specs. + //Get all storage specs. + //This process returns a list of storage specs. ListStorage(context.Context, *empty.Empty) (*CoreServiceTypes_ListStorageResponse, error) // - // Register a new feature to the metadata store, or update an existing feature. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new feature to the metadata store, or update an existing feature. + //If any validation errors occur, only the first encountered error will be returned. ApplyFeature(context.Context, *specs.FeatureSpec) (*CoreServiceTypes_ApplyFeatureResponse, error) // - // Register a new feature group to the metadata store, or update an existing feature group. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new feature group to the metadata store, or update an existing feature group. + //If any validation errors occur, only the first encountered error will be returned. ApplyFeatureGroup(context.Context, *specs.FeatureGroupSpec) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) // - // Register a new entity to the metadata store, or update an existing entity. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new entity to the metadata store, or update an existing entity. + //If any validation errors occur, only the first encountered error will be returned. ApplyEntity(context.Context, *specs.EntitySpec) (*CoreServiceTypes_ApplyEntityResponse, error) // - // Register a new storage spec to the metadata store, or update an existing storage. - // If any validation errors occur, only the first encountered error will be returned. + //Register a new storage spec to the metadata store, or update an existing storage. + //If any validation errors occur, only the first encountered error will be returned. ApplyStorage(context.Context, *specs.StorageSpec) (*CoreServiceTypes_ApplyStorageResponse, error) } +// UnimplementedCoreServiceServer can be embedded to have forward compatible implementations. +type UnimplementedCoreServiceServer struct { +} + +func (*UnimplementedCoreServiceServer) GetEntities(ctx context.Context, req *CoreServiceTypes_GetEntitiesRequest) (*CoreServiceTypes_GetEntitiesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetEntities not implemented") +} +func (*UnimplementedCoreServiceServer) ListEntities(ctx context.Context, req *empty.Empty) (*CoreServiceTypes_ListEntitiesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListEntities not implemented") +} +func (*UnimplementedCoreServiceServer) GetFeatures(ctx context.Context, req *CoreServiceTypes_GetFeaturesRequest) (*CoreServiceTypes_GetFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeatures not implemented") +} +func (*UnimplementedCoreServiceServer) ListFeatures(ctx context.Context, req *empty.Empty) (*CoreServiceTypes_ListFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListFeatures not implemented") +} +func (*UnimplementedCoreServiceServer) GetStorage(ctx context.Context, req *CoreServiceTypes_GetStorageRequest) (*CoreServiceTypes_GetStorageResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStorage not implemented") +} +func (*UnimplementedCoreServiceServer) ListStorage(ctx context.Context, req *empty.Empty) (*CoreServiceTypes_ListStorageResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListStorage not implemented") +} +func (*UnimplementedCoreServiceServer) ApplyFeature(ctx context.Context, req *specs.FeatureSpec) (*CoreServiceTypes_ApplyFeatureResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ApplyFeature not implemented") +} +func (*UnimplementedCoreServiceServer) ApplyFeatureGroup(ctx context.Context, req *specs.FeatureGroupSpec) (*CoreServiceTypes_ApplyFeatureGroupResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ApplyFeatureGroup not implemented") +} +func (*UnimplementedCoreServiceServer) ApplyEntity(ctx context.Context, req *specs.EntitySpec) (*CoreServiceTypes_ApplyEntityResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ApplyEntity not implemented") +} +func (*UnimplementedCoreServiceServer) ApplyStorage(ctx context.Context, req *specs.StorageSpec) (*CoreServiceTypes_ApplyStorageResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ApplyStorage not implemented") +} + func RegisterCoreServiceServer(s *grpc.Server, srv CoreServiceServer) { s.RegisterService(&_CoreService_serviceDesc, srv) } @@ -1004,49 +1098,3 @@ var _CoreService_serviceDesc = grpc.ServiceDesc{ Streams: []grpc.StreamDesc{}, Metadata: "feast/core/CoreService.proto", } - -func init() { - proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_CoreService_a0e9a1504f969203) -} - -var fileDescriptor_CoreService_a0e9a1504f969203 = []byte{ - // 602 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5b, 0x6f, 0x12, 0x41, - 0x14, 0x86, 0x34, 0x69, 0xe0, 0x40, 0x4c, 0x3b, 0x34, 0x8a, 0x23, 0x35, 0xcd, 0x26, 0x36, 0x7d, - 0x9a, 0xa9, 0xbd, 0xf8, 0xe4, 0x8b, 0x6d, 0x2a, 0xd1, 0x36, 0xc6, 0x2c, 0xfa, 0x60, 0x8d, 0x0f, - 0x5c, 0x0e, 0xeb, 0x2a, 0x30, 0xeb, 0xce, 0x60, 0xc2, 0xb3, 0xff, 0xcf, 0xdf, 0x64, 0x98, 0x1d, - 0x66, 0x07, 0xd8, 0xed, 0x12, 0xc3, 0x1b, 0x9c, 0xdb, 0x37, 0xdf, 0x39, 0xf3, 0x9d, 0x59, 0x68, - 0x0d, 0xb1, 0x2b, 0x15, 0xef, 0x8b, 0x18, 0xf9, 0xb5, 0x88, 0xb1, 0x83, 0xf1, 0xef, 0xb0, 0x8f, - 0x2c, 0x8a, 0x85, 0x12, 0x04, 0xb4, 0x97, 0xcd, 0xbd, 0xd4, 0x44, 0xca, 0x08, 0xfb, 0x92, 0xdf, - 0x4c, 0x54, 0xa8, 0x66, 0x9d, 0x08, 0xfb, 0x49, 0x24, 0x3d, 0x74, 0xbd, 0x6f, 0xb1, 0xab, 0xa6, - 0x31, 0x3a, 0x6e, 0x2f, 0xc3, 0xdd, 0x8e, 0xc5, 0x34, 0xca, 0x2b, 0xd1, 0x51, 0x22, 0xee, 0x06, - 0x6e, 0x89, 0x67, 0x81, 0x10, 0xc1, 0x08, 0xb9, 0xfe, 0xd7, 0x9b, 0x0e, 0x39, 0x8e, 0x23, 0x35, - 0x4b, 0x9c, 0xde, 0xdf, 0x5d, 0xd8, 0x73, 0x8e, 0xff, 0x69, 0x16, 0xa1, 0xa4, 0xc7, 0x40, 0xda, - 0xa8, 0xf4, 0x51, 0x43, 0x94, 0x3e, 0xfe, 0x9a, 0xa2, 0x54, 0x64, 0x0f, 0x76, 0xc2, 0x81, 0x6c, - 0x96, 0x8f, 0x76, 0x4e, 0xaa, 0xfe, 0xfc, 0x27, 0x7d, 0x0f, 0x8d, 0xa5, 0x38, 0x19, 0x89, 0x89, - 0x44, 0x72, 0x0e, 0x15, 0x34, 0x36, 0x1d, 0x5d, 0x3b, 0x7b, 0xc2, 0x92, 0x7e, 0xe8, 0x23, 0xb2, - 0xb4, 0x07, 0xbe, 0x0d, 0xa4, 0xb7, 0x70, 0x70, 0x17, 0xca, 0x2d, 0x15, 0x4b, 0x08, 0x98, 0x76, - 0x3d, 0x40, 0xe0, 0x56, 0x13, 0x48, 0xe3, 0x0c, 0xe6, 0x05, 0x54, 0x86, 0xc6, 0x66, 0x30, 0x9b, - 0x4b, 0x98, 0xce, 0x98, 0x7c, 0x1b, 0x49, 0xef, 0x12, 0x06, 0x5b, 0xaa, 0xf6, 0x02, 0xf6, 0xdb, - 0xa8, 0xcc, 0x34, 0xf3, 0x19, 0xf8, 0x9a, 0xa9, 0x0d, 0x33, 0x90, 0xaf, 0xa1, 0x2e, 0xd3, 0x7b, - 0x90, 0x0d, 0xeb, 0x5c, 0x14, 0x7f, 0x29, 0x9a, 0x76, 0xa0, 0x31, 0x27, 0xb2, 0xdd, 0xa2, 0x97, - 0xd0, 0x78, 0x13, 0x45, 0xa3, 0x59, 0x32, 0x2f, 0x5b, 0xf4, 0x39, 0x80, 0x9e, 0xda, 0xec, 0x43, - 0x77, 0x8c, 0xcd, 0xf2, 0x51, 0xf9, 0xa4, 0xea, 0x3b, 0x16, 0x7a, 0x01, 0x07, 0x3a, 0xcd, 0x34, - 0xc9, 0xe6, 0xb5, 0xa0, 0x6a, 0x5a, 0xf5, 0x6e, 0x60, 0xd2, 0x52, 0x03, 0xbd, 0x86, 0xa7, 0x6e, - 0x96, 0x16, 0x8c, 0x4d, 0x3d, 0x86, 0x47, 0x43, 0xc7, 0x6e, 0xf3, 0x57, 0xac, 0x16, 0x7a, 0xb5, - 0x0f, 0x2d, 0xa8, 0x1a, 0x66, 0x29, 0xb4, 0x35, 0x9c, 0xfd, 0xa9, 0x40, 0xcd, 0x11, 0x14, 0x89, - 0xa1, 0xe6, 0x68, 0x84, 0x70, 0x96, 0x6e, 0x06, 0xb6, 0x2a, 0x3c, 0xb6, 0xae, 0x3a, 0x7a, 0xba, - 0x79, 0x42, 0x72, 0x3e, 0xaf, 0x44, 0xbe, 0x42, 0xdd, 0xd5, 0x12, 0x79, 0xcc, 0x92, 0x15, 0xc0, - 0x16, 0x2b, 0x80, 0xdd, 0xcc, 0x57, 0x00, 0x7d, 0xf9, 0x60, 0xed, 0x2c, 0x39, 0x7a, 0x25, 0x43, - 0x68, 0x71, 0xcb, 0x8b, 0x09, 0xad, 0xa8, 0xb0, 0x98, 0xd0, 0xaa, 0x80, 0x52, 0x42, 0x16, 0xf4, - 0xff, 0x09, 0x65, 0x14, 0x17, 0x00, 0xa9, 0x84, 0x08, 0x2b, 0x3a, 0xde, 0xb2, 0x24, 0x29, 0xdf, - 0x38, 0xde, 0x02, 0x7e, 0x81, 0x9a, 0xa3, 0xaf, 0x5c, 0x32, 0xa7, 0x85, 0x64, 0xd6, 0x4b, 0x7f, - 0x83, 0xba, 0x7b, 0xf1, 0x49, 0xee, 0xa6, 0x29, 0x68, 0x55, 0x96, 0xe6, 0xbc, 0x12, 0x19, 0xc1, - 0xfe, 0x9a, 0xae, 0xc8, 0x61, 0x16, 0x86, 0x7d, 0xa3, 0xe8, 0xab, 0x8d, 0x81, 0x96, 0x64, 0xea, - 0x95, 0xc8, 0x3d, 0xd4, 0x9c, 0x95, 0x41, 0xf2, 0xf6, 0x7e, 0x41, 0xa3, 0x32, 0xb6, 0x8e, 0xd3, - 0xa8, 0xc5, 0x10, 0x72, 0xd7, 0xd8, 0x26, 0x8d, 0x5a, 0x9b, 0xc3, 0xd5, 0x67, 0x70, 0xbe, 0x00, - 0xae, 0xdc, 0x17, 0xf6, 0xe3, 0x7c, 0xc8, 0xf7, 0x97, 0x41, 0xa8, 0xbe, 0x4f, 0x7b, 0xac, 0x2f, - 0xc6, 0x3c, 0x10, 0x3f, 0xf0, 0x27, 0x4f, 0xde, 0x70, 0x7d, 0x05, 0x24, 0x0f, 0x70, 0x82, 0x71, - 0x57, 0xe1, 0x80, 0x07, 0x82, 0xa7, 0x1f, 0x1a, 0xbd, 0x5d, 0xed, 0x3f, 0xff, 0x17, 0x00, 0x00, - 0xff, 0xff, 0x57, 0x65, 0xdd, 0x73, 0x7d, 0x08, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/DatasetService.pb.go b/protos/generated/go/feast/core/DatasetService.pb.go index d2d3b01c93..e93f569c8e 100644 --- a/protos/generated/go/feast/core/DatasetService.pb.go +++ b/protos/generated/go/feast/core/DatasetService.pb.go @@ -1,16 +1,17 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/core/DatasetService.proto -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +package core import ( - context "golang.org/x/net/context" + context "context" + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -22,7 +23,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type DatasetServiceTypes struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -34,16 +35,17 @@ func (m *DatasetServiceTypes) Reset() { *m = DatasetServiceTypes{} } func (m *DatasetServiceTypes) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes) ProtoMessage() {} func (*DatasetServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0} + return fileDescriptor_3edc37a8b0d37b39, []int{0} } + func (m *DatasetServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes.Unmarshal(m, b) } func (m *DatasetServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DatasetServiceTypes.Marshal(b, m, deterministic) } -func (dst *DatasetServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes.Merge(dst, src) +func (m *DatasetServiceTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetServiceTypes.Merge(m, src) } func (m *DatasetServiceTypes) XXX_Size() int { return xxx_messageInfo_DatasetServiceTypes.Size(m) @@ -77,16 +79,17 @@ func (m *DatasetServiceTypes_CreateDatasetRequest) Reset() { func (m *DatasetServiceTypes_CreateDatasetRequest) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes_CreateDatasetRequest) ProtoMessage() {} func (*DatasetServiceTypes_CreateDatasetRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 0} + return fileDescriptor_3edc37a8b0d37b39, []int{0, 0} } + func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Unmarshal(m, b) } func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Marshal(b, m, deterministic) } -func (dst *DatasetServiceTypes_CreateDatasetRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Merge(dst, src) +func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Merge(m, src) } func (m *DatasetServiceTypes_CreateDatasetRequest) XXX_Size() int { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetRequest.Size(m) @@ -146,16 +149,17 @@ func (m *DatasetServiceTypes_CreateDatasetResponse) Reset() { func (m *DatasetServiceTypes_CreateDatasetResponse) String() string { return proto.CompactTextString(m) } func (*DatasetServiceTypes_CreateDatasetResponse) ProtoMessage() {} func (*DatasetServiceTypes_CreateDatasetResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{0, 1} + return fileDescriptor_3edc37a8b0d37b39, []int{0, 1} } + func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Unmarshal(m, b) } func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Marshal(b, m, deterministic) } -func (dst *DatasetServiceTypes_CreateDatasetResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Merge(dst, src) +func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Merge(m, src) } func (m *DatasetServiceTypes_CreateDatasetResponse) XXX_Size() int { return xxx_messageInfo_DatasetServiceTypes_CreateDatasetResponse.Size(m) @@ -188,16 +192,17 @@ func (m *FeatureSet) Reset() { *m = FeatureSet{} } func (m *FeatureSet) String() string { return proto.CompactTextString(m) } func (*FeatureSet) ProtoMessage() {} func (*FeatureSet) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{1} + return fileDescriptor_3edc37a8b0d37b39, []int{1} } + func (m *FeatureSet) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureSet.Unmarshal(m, b) } func (m *FeatureSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureSet.Marshal(b, m, deterministic) } -func (dst *FeatureSet) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureSet.Merge(dst, src) +func (m *FeatureSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSet.Merge(m, src) } func (m *FeatureSet) XXX_Size() int { return xxx_messageInfo_FeatureSet.Size(m) @@ -237,16 +242,17 @@ func (m *DatasetInfo) Reset() { *m = DatasetInfo{} } func (m *DatasetInfo) String() string { return proto.CompactTextString(m) } func (*DatasetInfo) ProtoMessage() {} func (*DatasetInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_DatasetService_37ae639a8c7b5dd5, []int{2} + return fileDescriptor_3edc37a8b0d37b39, []int{2} } + func (m *DatasetInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DatasetInfo.Unmarshal(m, b) } func (m *DatasetInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DatasetInfo.Marshal(b, m, deterministic) } -func (dst *DatasetInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_DatasetInfo.Merge(dst, src) +func (m *DatasetInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetInfo.Merge(m, src) } func (m *DatasetInfo) XXX_Size() int { return xxx_messageInfo_DatasetInfo.Size(m) @@ -279,6 +285,38 @@ func init() { proto.RegisterType((*DatasetInfo)(nil), "feast.core.DatasetInfo") } +func init() { proto.RegisterFile("feast/core/DatasetService.proto", fileDescriptor_3edc37a8b0d37b39) } + +var fileDescriptor_3edc37a8b0d37b39 = []byte{ + // 414 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x6e, 0xd4, 0x30, + 0x10, 0x25, 0xbb, 0x2d, 0x90, 0x59, 0xc1, 0xc1, 0x14, 0x88, 0x72, 0xa0, 0x51, 0x4e, 0x7b, 0xb2, + 0xa5, 0xd2, 0x22, 0x38, 0x70, 0x29, 0x2b, 0xa4, 0x4a, 0x08, 0x55, 0x6e, 0x91, 0x10, 0x37, 0x67, + 0x33, 0x09, 0x81, 0x24, 0x0e, 0xf6, 0x04, 0xd1, 0x0b, 0xdf, 0xc0, 0x27, 0xf2, 0x11, 0x7c, 0x00, + 0x8a, 0xb3, 0xdb, 0x78, 0xab, 0x4a, 0x88, 0x9b, 0x3d, 0xef, 0xcd, 0x78, 0xde, 0xf8, 0x0d, 0x1c, + 0x16, 0xa8, 0x2c, 0x89, 0xb5, 0x36, 0x28, 0x56, 0x8a, 0x94, 0x45, 0xba, 0x40, 0xf3, 0xbd, 0x5a, + 0x23, 0xef, 0x8c, 0x26, 0xcd, 0xc0, 0x11, 0xf8, 0x40, 0x88, 0x0f, 0x4b, 0xad, 0xcb, 0x1a, 0x85, + 0x43, 0xb2, 0xbe, 0x10, 0x54, 0x35, 0x68, 0x49, 0x35, 0xdd, 0x48, 0x4e, 0x7f, 0xcf, 0xe0, 0xd1, + 0x6e, 0x95, 0xcb, 0xab, 0x0e, 0x6d, 0xfc, 0x27, 0x80, 0x83, 0x37, 0x06, 0x15, 0xe1, 0x06, 0x95, + 0xf8, 0xad, 0x47, 0x4b, 0xec, 0x05, 0x0c, 0xf5, 0xa9, 0x37, 0x78, 0x81, 0x14, 0x05, 0x49, 0xb0, + 0x5c, 0x1c, 0x3d, 0xe1, 0xd3, 0x93, 0xfc, 0xed, 0x35, 0x2a, 0x3d, 0x26, 0x7b, 0x09, 0xa1, 0x25, + 0x65, 0x68, 0xa5, 0x08, 0xa3, 0x99, 0x4b, 0x8b, 0xf9, 0xd8, 0x1d, 0xdf, 0x76, 0xc7, 0x2f, 0xb7, + 0xdd, 0xc9, 0x89, 0xcc, 0x8e, 0xe1, 0x1e, 0xb6, 0xb9, 0xcb, 0x9b, 0xff, 0x33, 0x6f, 0x4b, 0x65, + 0x07, 0xb0, 0x5f, 0x57, 0x4d, 0x45, 0xd1, 0x5e, 0x12, 0x2c, 0xe7, 0x72, 0xbc, 0xb0, 0x67, 0x00, + 0xad, 0x6a, 0xf0, 0xdc, 0x60, 0x51, 0xfd, 0x88, 0xf6, 0x93, 0x60, 0x19, 0x4a, 0x2f, 0x12, 0x4b, + 0x78, 0x7c, 0x43, 0xb5, 0xed, 0x74, 0x6b, 0x91, 0xbd, 0x82, 0x45, 0x3e, 0x86, 0xce, 0xda, 0x42, + 0x6f, 0x74, 0x3f, 0xf5, 0x75, 0xaf, 0x26, 0x58, 0xfa, 0xdc, 0xf4, 0x1d, 0xc0, 0x34, 0x93, 0xa1, + 0x03, 0x6c, 0xa9, 0xa2, 0xab, 0xf7, 0xaa, 0x41, 0x57, 0x27, 0x94, 0x5e, 0x64, 0xc0, 0x37, 0x53, + 0x3b, 0xcb, 0x6d, 0x34, 0x4b, 0xe6, 0x03, 0x3e, 0x45, 0xd2, 0xd7, 0xb0, 0xf0, 0x5e, 0x62, 0x0c, + 0xf6, 0xda, 0xa9, 0x90, 0x3b, 0xb3, 0x18, 0xee, 0x93, 0xca, 0x6a, 0xfc, 0x60, 0x6a, 0x37, 0xe9, + 0x50, 0x5e, 0xdf, 0x8f, 0x7e, 0x05, 0xf0, 0x70, 0xf7, 0xbf, 0xd9, 0x4f, 0x78, 0xb0, 0xa3, 0x99, + 0x1d, 0xdf, 0x22, 0xcb, 0x37, 0x07, 0xbf, 0xcd, 0x18, 0xf1, 0xc9, 0x7f, 0x66, 0x8d, 0x83, 0x4d, + 0xef, 0x9c, 0x7e, 0x04, 0xcf, 0xb1, 0xa7, 0x37, 0xdc, 0x78, 0x3e, 0x7c, 0xf1, 0xa7, 0x93, 0xb2, + 0xa2, 0xcf, 0x7d, 0xc6, 0xd7, 0xba, 0x11, 0xa5, 0xfe, 0x82, 0x5f, 0xc5, 0xb8, 0x04, 0xce, 0x00, + 0x56, 0x94, 0xd8, 0xa2, 0x51, 0x84, 0xb9, 0x28, 0xb5, 0x98, 0xd6, 0x23, 0xbb, 0xeb, 0xf0, 0xe7, + 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x9e, 0x4d, 0xe0, 0x33, 0x03, 0x00, 0x00, +} + // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn @@ -318,6 +356,14 @@ type DatasetServiceServer interface { CreateDataset(context.Context, *DatasetServiceTypes_CreateDatasetRequest) (*DatasetServiceTypes_CreateDatasetResponse, error) } +// UnimplementedDatasetServiceServer can be embedded to have forward compatible implementations. +type UnimplementedDatasetServiceServer struct { +} + +func (*UnimplementedDatasetServiceServer) CreateDataset(ctx context.Context, req *DatasetServiceTypes_CreateDatasetRequest) (*DatasetServiceTypes_CreateDatasetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateDataset not implemented") +} + func RegisterDatasetServiceServer(s *grpc.Server, srv DatasetServiceServer) { s.RegisterService(&_DatasetService_serviceDesc, srv) } @@ -352,37 +398,3 @@ var _DatasetService_serviceDesc = grpc.ServiceDesc{ Streams: []grpc.StreamDesc{}, Metadata: "feast/core/DatasetService.proto", } - -func init() { - proto.RegisterFile("feast/core/DatasetService.proto", fileDescriptor_DatasetService_37ae639a8c7b5dd5) -} - -var fileDescriptor_DatasetService_37ae639a8c7b5dd5 = []byte{ - // 414 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x6e, 0xd4, 0x30, - 0x10, 0x25, 0xbb, 0x2d, 0x90, 0x59, 0xc1, 0xc1, 0x14, 0x88, 0x72, 0xa0, 0x51, 0x4e, 0x7b, 0xb2, - 0xa5, 0xd2, 0x22, 0x38, 0x70, 0x29, 0x2b, 0xa4, 0x4a, 0x08, 0x55, 0x6e, 0x91, 0x10, 0x37, 0x67, - 0x33, 0x09, 0x81, 0x24, 0x0e, 0xf6, 0x04, 0xd1, 0x0b, 0xdf, 0xc0, 0x27, 0xf2, 0x11, 0x7c, 0x00, - 0x8a, 0xb3, 0xdb, 0x78, 0xab, 0x4a, 0x88, 0x9b, 0x3d, 0xef, 0xcd, 0x78, 0xde, 0xf8, 0x0d, 0x1c, - 0x16, 0xa8, 0x2c, 0x89, 0xb5, 0x36, 0x28, 0x56, 0x8a, 0x94, 0x45, 0xba, 0x40, 0xf3, 0xbd, 0x5a, - 0x23, 0xef, 0x8c, 0x26, 0xcd, 0xc0, 0x11, 0xf8, 0x40, 0x88, 0x0f, 0x4b, 0xad, 0xcb, 0x1a, 0x85, - 0x43, 0xb2, 0xbe, 0x10, 0x54, 0x35, 0x68, 0x49, 0x35, 0xdd, 0x48, 0x4e, 0x7f, 0xcf, 0xe0, 0xd1, - 0x6e, 0x95, 0xcb, 0xab, 0x0e, 0x6d, 0xfc, 0x27, 0x80, 0x83, 0x37, 0x06, 0x15, 0xe1, 0x06, 0x95, - 0xf8, 0xad, 0x47, 0x4b, 0xec, 0x05, 0x0c, 0xf5, 0xa9, 0x37, 0x78, 0x81, 0x14, 0x05, 0x49, 0xb0, - 0x5c, 0x1c, 0x3d, 0xe1, 0xd3, 0x93, 0xfc, 0xed, 0x35, 0x2a, 0x3d, 0x26, 0x7b, 0x09, 0xa1, 0x25, - 0x65, 0x68, 0xa5, 0x08, 0xa3, 0x99, 0x4b, 0x8b, 0xf9, 0xd8, 0x1d, 0xdf, 0x76, 0xc7, 0x2f, 0xb7, - 0xdd, 0xc9, 0x89, 0xcc, 0x8e, 0xe1, 0x1e, 0xb6, 0xb9, 0xcb, 0x9b, 0xff, 0x33, 0x6f, 0x4b, 0x65, - 0x07, 0xb0, 0x5f, 0x57, 0x4d, 0x45, 0xd1, 0x5e, 0x12, 0x2c, 0xe7, 0x72, 0xbc, 0xb0, 0x67, 0x00, - 0xad, 0x6a, 0xf0, 0xdc, 0x60, 0x51, 0xfd, 0x88, 0xf6, 0x93, 0x60, 0x19, 0x4a, 0x2f, 0x12, 0x4b, - 0x78, 0x7c, 0x43, 0xb5, 0xed, 0x74, 0x6b, 0x91, 0xbd, 0x82, 0x45, 0x3e, 0x86, 0xce, 0xda, 0x42, - 0x6f, 0x74, 0x3f, 0xf5, 0x75, 0xaf, 0x26, 0x58, 0xfa, 0xdc, 0xf4, 0x1d, 0xc0, 0x34, 0x93, 0xa1, - 0x03, 0x6c, 0xa9, 0xa2, 0xab, 0xf7, 0xaa, 0x41, 0x57, 0x27, 0x94, 0x5e, 0x64, 0xc0, 0x37, 0x53, - 0x3b, 0xcb, 0x6d, 0x34, 0x4b, 0xe6, 0x03, 0x3e, 0x45, 0xd2, 0xd7, 0xb0, 0xf0, 0x5e, 0x62, 0x0c, - 0xf6, 0xda, 0xa9, 0x90, 0x3b, 0xb3, 0x18, 0xee, 0x93, 0xca, 0x6a, 0xfc, 0x60, 0x6a, 0x37, 0xe9, - 0x50, 0x5e, 0xdf, 0x8f, 0x7e, 0x05, 0xf0, 0x70, 0xf7, 0xbf, 0xd9, 0x4f, 0x78, 0xb0, 0xa3, 0x99, - 0x1d, 0xdf, 0x22, 0xcb, 0x37, 0x07, 0xbf, 0xcd, 0x18, 0xf1, 0xc9, 0x7f, 0x66, 0x8d, 0x83, 0x4d, - 0xef, 0x9c, 0x7e, 0x04, 0xcf, 0xb1, 0xa7, 0x37, 0xdc, 0x78, 0x3e, 0x7c, 0xf1, 0xa7, 0x93, 0xb2, - 0xa2, 0xcf, 0x7d, 0xc6, 0xd7, 0xba, 0x11, 0xa5, 0xfe, 0x82, 0x5f, 0xc5, 0xb8, 0x04, 0xce, 0x00, - 0x56, 0x94, 0xd8, 0xa2, 0x51, 0x84, 0xb9, 0x28, 0xb5, 0x98, 0xd6, 0x23, 0xbb, 0xeb, 0xf0, 0xe7, - 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x9e, 0x4d, 0xe0, 0x33, 0x03, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/JobService.pb.go b/protos/generated/go/feast/core/JobService.pb.go index 6a3a4e6dc0..2a6455e5c8 100644 --- a/protos/generated/go/feast/core/JobService.pb.go +++ b/protos/generated/go/feast/core/JobService.pb.go @@ -1,18 +1,19 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/core/JobService.proto -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +package core import ( - context "golang.org/x/net/context" + context "context" + fmt "fmt" + specs "github.com/gojek/feast/protos/generated/go/feast/specs" + proto "github.com/golang/protobuf/proto" + empty "github.com/golang/protobuf/ptypes/empty" + timestamp "github.com/golang/protobuf/ptypes/timestamp" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -24,7 +25,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type JobServiceTypes struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -36,16 +37,17 @@ func (m *JobServiceTypes) Reset() { *m = JobServiceTypes{} } func (m *JobServiceTypes) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes) ProtoMessage() {} func (*JobServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0} + return fileDescriptor_7115affcfc7885c4, []int{0} } + func (m *JobServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes.Unmarshal(m, b) } func (m *JobServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes.Merge(dst, src) +func (m *JobServiceTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes.Merge(m, src) } func (m *JobServiceTypes) XXX_Size() int { return xxx_messageInfo_JobServiceTypes.Size(m) @@ -70,16 +72,17 @@ func (m *JobServiceTypes_SubmitImportJobRequest) Reset() { func (m *JobServiceTypes_SubmitImportJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_SubmitImportJobRequest) ProtoMessage() {} func (*JobServiceTypes_SubmitImportJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 0} + return fileDescriptor_7115affcfc7885c4, []int{0, 0} } + func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Unmarshal(m, b) } func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_SubmitImportJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Merge(dst, src) +func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Merge(m, src) } func (m *JobServiceTypes_SubmitImportJobRequest) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_SubmitImportJobRequest.Size(m) @@ -117,16 +120,17 @@ func (m *JobServiceTypes_SubmitImportJobResponse) Reset() { func (m *JobServiceTypes_SubmitImportJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_SubmitImportJobResponse) ProtoMessage() {} func (*JobServiceTypes_SubmitImportJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 1} + return fileDescriptor_7115affcfc7885c4, []int{0, 1} } + func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Unmarshal(m, b) } func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_SubmitImportJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Merge(dst, src) +func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Merge(m, src) } func (m *JobServiceTypes_SubmitImportJobResponse) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_SubmitImportJobResponse.Size(m) @@ -155,16 +159,17 @@ func (m *JobServiceTypes_ListJobsResponse) Reset() { *m = JobServiceType func (m *JobServiceTypes_ListJobsResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_ListJobsResponse) ProtoMessage() {} func (*JobServiceTypes_ListJobsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 2} + return fileDescriptor_7115affcfc7885c4, []int{0, 2} } + func (m *JobServiceTypes_ListJobsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Unmarshal(m, b) } func (m *JobServiceTypes_ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_ListJobsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_ListJobsResponse.Merge(dst, src) +func (m *JobServiceTypes_ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_ListJobsResponse.Merge(m, src) } func (m *JobServiceTypes_ListJobsResponse) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_ListJobsResponse.Size(m) @@ -193,16 +198,17 @@ func (m *JobServiceTypes_GetJobRequest) Reset() { *m = JobServiceTypes_G func (m *JobServiceTypes_GetJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_GetJobRequest) ProtoMessage() {} func (*JobServiceTypes_GetJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 3} + return fileDescriptor_7115affcfc7885c4, []int{0, 3} } + func (m *JobServiceTypes_GetJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_GetJobRequest.Unmarshal(m, b) } func (m *JobServiceTypes_GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_GetJobRequest.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_GetJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_GetJobRequest.Merge(dst, src) +func (m *JobServiceTypes_GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_GetJobRequest.Merge(m, src) } func (m *JobServiceTypes_GetJobRequest) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_GetJobRequest.Size(m) @@ -231,16 +237,17 @@ func (m *JobServiceTypes_GetJobResponse) Reset() { *m = JobServiceTypes_ func (m *JobServiceTypes_GetJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_GetJobResponse) ProtoMessage() {} func (*JobServiceTypes_GetJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 4} + return fileDescriptor_7115affcfc7885c4, []int{0, 4} } + func (m *JobServiceTypes_GetJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_GetJobResponse.Unmarshal(m, b) } func (m *JobServiceTypes_GetJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_GetJobResponse.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_GetJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_GetJobResponse.Merge(dst, src) +func (m *JobServiceTypes_GetJobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_GetJobResponse.Merge(m, src) } func (m *JobServiceTypes_GetJobResponse) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_GetJobResponse.Size(m) @@ -269,16 +276,17 @@ func (m *JobServiceTypes_AbortJobRequest) Reset() { *m = JobServiceTypes func (m *JobServiceTypes_AbortJobRequest) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_AbortJobRequest) ProtoMessage() {} func (*JobServiceTypes_AbortJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 5} + return fileDescriptor_7115affcfc7885c4, []int{0, 5} } + func (m *JobServiceTypes_AbortJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Unmarshal(m, b) } func (m *JobServiceTypes_AbortJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_AbortJobRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_AbortJobRequest.Merge(dst, src) +func (m *JobServiceTypes_AbortJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_AbortJobRequest.Merge(m, src) } func (m *JobServiceTypes_AbortJobRequest) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_AbortJobRequest.Size(m) @@ -307,16 +315,17 @@ func (m *JobServiceTypes_AbortJobResponse) Reset() { *m = JobServiceType func (m *JobServiceTypes_AbortJobResponse) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_AbortJobResponse) ProtoMessage() {} func (*JobServiceTypes_AbortJobResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 6} + return fileDescriptor_7115affcfc7885c4, []int{0, 6} } + func (m *JobServiceTypes_AbortJobResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Unmarshal(m, b) } func (m *JobServiceTypes_AbortJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_AbortJobResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_AbortJobResponse.Merge(dst, src) +func (m *JobServiceTypes_AbortJobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_AbortJobResponse.Merge(m, src) } func (m *JobServiceTypes_AbortJobResponse) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_AbortJobResponse.Size(m) @@ -356,16 +365,17 @@ func (m *JobServiceTypes_JobDetail) Reset() { *m = JobServiceTypes_JobDe func (m *JobServiceTypes_JobDetail) String() string { return proto.CompactTextString(m) } func (*JobServiceTypes_JobDetail) ProtoMessage() {} func (*JobServiceTypes_JobDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_JobService_edcd183b773c9f62, []int{0, 7} + return fileDescriptor_7115affcfc7885c4, []int{0, 7} } + func (m *JobServiceTypes_JobDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_JobServiceTypes_JobDetail.Unmarshal(m, b) } func (m *JobServiceTypes_JobDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_JobServiceTypes_JobDetail.Marshal(b, m, deterministic) } -func (dst *JobServiceTypes_JobDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobServiceTypes_JobDetail.Merge(dst, src) +func (m *JobServiceTypes_JobDetail) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobServiceTypes_JobDetail.Merge(m, src) } func (m *JobServiceTypes_JobDetail) XXX_Size() int { return xxx_messageInfo_JobServiceTypes_JobDetail.Size(m) @@ -459,6 +469,51 @@ func init() { proto.RegisterMapType((map[string]float64)(nil), "feast.core.JobServiceTypes.JobDetail.MetricsEntry") } +func init() { proto.RegisterFile("feast/core/JobService.proto", fileDescriptor_7115affcfc7885c4) } + +var fileDescriptor_7115affcfc7885c4 = []byte{ + // 621 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x4e, 0xdb, 0x4c, + 0x10, 0x55, 0x62, 0x08, 0xf1, 0xf0, 0x7d, 0x80, 0x56, 0x15, 0x58, 0x4b, 0x25, 0x52, 0xa4, 0x4a, + 0xe9, 0x8f, 0x6c, 0x29, 0xb4, 0xa2, 0x45, 0xbd, 0x29, 0x2a, 0xaa, 0x82, 0x40, 0x42, 0x86, 0xde, + 0xf4, 0xa6, 0xb2, 0x9d, 0xc1, 0xdd, 0x10, 0x7b, 0x5d, 0xef, 0x1a, 0x35, 0xef, 0xd2, 0x37, 0xe8, + 0x13, 0xf5, 0x6d, 0xaa, 0xdd, 0xb5, 0x63, 0x63, 0xaa, 0x94, 0xde, 0xf9, 0xec, 0x9c, 0xd9, 0xb3, + 0x73, 0x3c, 0x33, 0xb0, 0x7b, 0x8d, 0x81, 0x90, 0x5e, 0xc4, 0x73, 0xf4, 0x4e, 0x79, 0x78, 0x89, + 0xf9, 0x2d, 0x8b, 0xd0, 0xcd, 0x72, 0x2e, 0x39, 0x01, 0x1d, 0x74, 0x55, 0x90, 0x3e, 0x36, 0x44, + 0x91, 0x61, 0x24, 0xbc, 0x71, 0x92, 0xf1, 0x5c, 0x5e, 0x66, 0x18, 0x19, 0x26, 0xdd, 0x8d, 0x39, + 0x8f, 0x67, 0xe8, 0x69, 0x14, 0x16, 0xd7, 0x1e, 0x26, 0x99, 0x9c, 0x97, 0xc1, 0xbd, 0x76, 0x50, + 0xb2, 0x04, 0x85, 0x0c, 0x92, 0xcc, 0x10, 0xf6, 0x7f, 0xf5, 0x60, 0xb3, 0x16, 0xbf, 0x9a, 0x67, + 0x28, 0x28, 0xc2, 0xf6, 0x65, 0x11, 0x26, 0x4c, 0x1a, 0xad, 0x53, 0x1e, 0xfa, 0xf8, 0xad, 0x40, + 0x21, 0xc9, 0x21, 0x00, 0x5b, 0xe8, 0x3b, 0x9d, 0x41, 0x67, 0xb8, 0x3e, 0xda, 0x71, 0xcd, 0x53, + 0xf5, 0xf3, 0xdc, 0xfa, 0x79, 0x7e, 0x83, 0x4a, 0x08, 0xac, 0xa4, 0x41, 0x82, 0x4e, 0x77, 0xd0, + 0x19, 0xda, 0xbe, 0xfe, 0xa6, 0x1e, 0xec, 0xdc, 0x93, 0x11, 0x19, 0x4f, 0x05, 0x92, 0x47, 0xb0, + 0x3a, 0xe5, 0xe1, 0x78, 0xa2, 0x25, 0x6c, 0xdf, 0x00, 0x7a, 0x0e, 0x5b, 0x67, 0x4c, 0x28, 0xa2, + 0x58, 0x30, 0xdf, 0xc2, 0xca, 0x94, 0x87, 0xc2, 0xe9, 0x0c, 0xac, 0xe1, 0xfa, 0xe8, 0xa9, 0x5b, + 0xdb, 0xe6, 0xb6, 0xca, 0x52, 0xf8, 0x03, 0xca, 0x80, 0xcd, 0x7c, 0x9d, 0x42, 0xf7, 0xe0, 0xff, + 0x8f, 0xd8, 0xac, 0x6e, 0x03, 0xba, 0xac, 0x92, 0xec, 0xb2, 0x09, 0x1d, 0xc3, 0x46, 0x45, 0x28, + 0xd5, 0x0e, 0xc1, 0x9a, 0xf2, 0xb0, 0x2c, 0xfc, 0x81, 0x62, 0x2a, 0x83, 0x3e, 0x81, 0xcd, 0xf7, + 0xe1, 0x5d, 0x2f, 0xdb, 0x6a, 0xfb, 0xb0, 0x55, 0x53, 0x4a, 0xbd, 0x36, 0xe7, 0xa7, 0x05, 0xf6, + 0xe2, 0xe6, 0x76, 0x54, 0xb9, 0x86, 0xdf, 0xe5, 0x78, 0x52, 0xba, 0x6c, 0x80, 0xb2, 0x5e, 0xce, + 0x33, 0x74, 0x2c, 0x63, 0xbd, 0xfa, 0x26, 0xdb, 0xd0, 0xcb, 0x8b, 0x34, 0xc5, 0xdc, 0x59, 0xd1, + 0xa7, 0x25, 0x52, 0xe7, 0x42, 0x06, 0xb2, 0x10, 0xce, 0xaa, 0x39, 0x37, 0x88, 0x50, 0xe8, 0x63, + 0x2a, 0x99, 0x64, 0x28, 0x9c, 0xde, 0xc0, 0x1a, 0xda, 0xfe, 0x02, 0xab, 0xd8, 0x35, 0x06, 0xb2, + 0xc8, 0x51, 0x38, 0x6b, 0x26, 0x56, 0x61, 0x72, 0x06, 0x6b, 0x09, 0xca, 0x9c, 0x45, 0xc2, 0xe9, + 0xeb, 0x1f, 0x34, 0x7a, 0x90, 0x67, 0xee, 0xb9, 0x49, 0x3a, 0x49, 0x65, 0x3e, 0xf7, 0xab, 0x2b, + 0xc8, 0x3b, 0x58, 0x9f, 0x05, 0x42, 0x7e, 0xca, 0x26, 0x81, 0xc4, 0x89, 0x63, 0xeb, 0xbf, 0x40, + 0x5d, 0xd3, 0xe2, 0x6e, 0xd5, 0xe2, 0xee, 0x55, 0xd5, 0xe2, 0x7e, 0x93, 0x4e, 0x5e, 0xc1, 0x5a, + 0x94, 0xa3, 0xce, 0x84, 0xbf, 0x66, 0x56, 0x54, 0x7a, 0x04, 0xff, 0x35, 0x1f, 0x43, 0xb6, 0xc0, + 0xba, 0xc1, 0x79, 0x69, 0xba, 0xfa, 0x54, 0xae, 0xdf, 0x06, 0xb3, 0xc2, 0xf4, 0x76, 0xc7, 0x37, + 0xe0, 0xa8, 0xfb, 0xa6, 0x33, 0xfa, 0x61, 0x01, 0xd4, 0x35, 0x12, 0x09, 0xb6, 0xe9, 0xf7, 0x53, + 0x1e, 0x92, 0xa5, 0x46, 0xfc, 0x79, 0xfa, 0xe8, 0xc1, 0x3f, 0xe5, 0x94, 0x2d, 0x74, 0x01, 0xfd, + 0x6a, 0x68, 0xc8, 0xf6, 0xbd, 0x8a, 0x4f, 0xd4, 0xae, 0xa0, 0x2f, 0x97, 0x5d, 0x7c, 0x6f, 0xe4, + 0xbe, 0x40, 0xcf, 0x8c, 0x05, 0x79, 0xb6, 0x2c, 0xef, 0xce, 0x6c, 0xd1, 0xe7, 0x0f, 0xa1, 0x96, + 0x02, 0x08, 0xfd, 0x6a, 0x12, 0xc8, 0x8b, 0x65, 0x79, 0xad, 0x91, 0x5a, 0x5e, 0x47, 0x7b, 0xb8, + 0x8e, 0xaf, 0xa0, 0xb1, 0x64, 0x8f, 0x1b, 0x5b, 0xf0, 0x42, 0xb9, 0xf3, 0xf9, 0x75, 0xcc, 0xe4, + 0xd7, 0x22, 0x74, 0x23, 0x9e, 0x78, 0x31, 0x9f, 0xe2, 0x8d, 0x67, 0xd6, 0xb0, 0xf6, 0x4e, 0x78, + 0x31, 0xa6, 0x98, 0xab, 0x16, 0xf1, 0x62, 0xee, 0xd5, 0x9b, 0x3c, 0xec, 0xe9, 0xf8, 0xc1, 0xef, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xf4, 0xb6, 0x7c, 0xde, 0x05, 0x00, 0x00, +} + // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn @@ -537,6 +592,23 @@ type JobServiceServer interface { AbortJob(context.Context, *JobServiceTypes_AbortJobRequest) (*JobServiceTypes_AbortJobResponse, error) } +// UnimplementedJobServiceServer can be embedded to have forward compatible implementations. +type UnimplementedJobServiceServer struct { +} + +func (*UnimplementedJobServiceServer) SubmitJob(ctx context.Context, req *JobServiceTypes_SubmitImportJobRequest) (*JobServiceTypes_SubmitImportJobResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SubmitJob not implemented") +} +func (*UnimplementedJobServiceServer) ListJobs(ctx context.Context, req *empty.Empty) (*JobServiceTypes_ListJobsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListJobs not implemented") +} +func (*UnimplementedJobServiceServer) GetJob(ctx context.Context, req *JobServiceTypes_GetJobRequest) (*JobServiceTypes_GetJobResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetJob not implemented") +} +func (*UnimplementedJobServiceServer) AbortJob(ctx context.Context, req *JobServiceTypes_AbortJobRequest) (*JobServiceTypes_AbortJobResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AbortJob not implemented") +} + func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { s.RegisterService(&_JobService_serviceDesc, srv) } @@ -637,50 +709,3 @@ var _JobService_serviceDesc = grpc.ServiceDesc{ Streams: []grpc.StreamDesc{}, Metadata: "feast/core/JobService.proto", } - -func init() { - proto.RegisterFile("feast/core/JobService.proto", fileDescriptor_JobService_edcd183b773c9f62) -} - -var fileDescriptor_JobService_edcd183b773c9f62 = []byte{ - // 621 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x4e, 0xdb, 0x4c, - 0x10, 0x55, 0x62, 0x08, 0xf1, 0xf0, 0x7d, 0x80, 0x56, 0x15, 0x58, 0x4b, 0x25, 0x52, 0xa4, 0x4a, - 0xe9, 0x8f, 0x6c, 0x29, 0xb4, 0xa2, 0x45, 0xbd, 0x29, 0x2a, 0xaa, 0x82, 0x40, 0x42, 0x86, 0xde, - 0xf4, 0xa6, 0xb2, 0x9d, 0xc1, 0xdd, 0x10, 0x7b, 0x5d, 0xef, 0x1a, 0x35, 0xef, 0xd2, 0x37, 0xe8, - 0x13, 0xf5, 0x6d, 0xaa, 0xdd, 0xb5, 0x63, 0x63, 0xaa, 0x94, 0xde, 0xf9, 0xec, 0x9c, 0xd9, 0xb3, - 0x73, 0x3c, 0x33, 0xb0, 0x7b, 0x8d, 0x81, 0x90, 0x5e, 0xc4, 0x73, 0xf4, 0x4e, 0x79, 0x78, 0x89, - 0xf9, 0x2d, 0x8b, 0xd0, 0xcd, 0x72, 0x2e, 0x39, 0x01, 0x1d, 0x74, 0x55, 0x90, 0x3e, 0x36, 0x44, - 0x91, 0x61, 0x24, 0xbc, 0x71, 0x92, 0xf1, 0x5c, 0x5e, 0x66, 0x18, 0x19, 0x26, 0xdd, 0x8d, 0x39, - 0x8f, 0x67, 0xe8, 0x69, 0x14, 0x16, 0xd7, 0x1e, 0x26, 0x99, 0x9c, 0x97, 0xc1, 0xbd, 0x76, 0x50, - 0xb2, 0x04, 0x85, 0x0c, 0x92, 0xcc, 0x10, 0xf6, 0x7f, 0xf5, 0x60, 0xb3, 0x16, 0xbf, 0x9a, 0x67, - 0x28, 0x28, 0xc2, 0xf6, 0x65, 0x11, 0x26, 0x4c, 0x1a, 0xad, 0x53, 0x1e, 0xfa, 0xf8, 0xad, 0x40, - 0x21, 0xc9, 0x21, 0x00, 0x5b, 0xe8, 0x3b, 0x9d, 0x41, 0x67, 0xb8, 0x3e, 0xda, 0x71, 0xcd, 0x53, - 0xf5, 0xf3, 0xdc, 0xfa, 0x79, 0x7e, 0x83, 0x4a, 0x08, 0xac, 0xa4, 0x41, 0x82, 0x4e, 0x77, 0xd0, - 0x19, 0xda, 0xbe, 0xfe, 0xa6, 0x1e, 0xec, 0xdc, 0x93, 0x11, 0x19, 0x4f, 0x05, 0x92, 0x47, 0xb0, - 0x3a, 0xe5, 0xe1, 0x78, 0xa2, 0x25, 0x6c, 0xdf, 0x00, 0x7a, 0x0e, 0x5b, 0x67, 0x4c, 0x28, 0xa2, - 0x58, 0x30, 0xdf, 0xc2, 0xca, 0x94, 0x87, 0xc2, 0xe9, 0x0c, 0xac, 0xe1, 0xfa, 0xe8, 0xa9, 0x5b, - 0xdb, 0xe6, 0xb6, 0xca, 0x52, 0xf8, 0x03, 0xca, 0x80, 0xcd, 0x7c, 0x9d, 0x42, 0xf7, 0xe0, 0xff, - 0x8f, 0xd8, 0xac, 0x6e, 0x03, 0xba, 0xac, 0x92, 0xec, 0xb2, 0x09, 0x1d, 0xc3, 0x46, 0x45, 0x28, - 0xd5, 0x0e, 0xc1, 0x9a, 0xf2, 0xb0, 0x2c, 0xfc, 0x81, 0x62, 0x2a, 0x83, 0x3e, 0x81, 0xcd, 0xf7, - 0xe1, 0x5d, 0x2f, 0xdb, 0x6a, 0xfb, 0xb0, 0x55, 0x53, 0x4a, 0xbd, 0x36, 0xe7, 0xa7, 0x05, 0xf6, - 0xe2, 0xe6, 0x76, 0x54, 0xb9, 0x86, 0xdf, 0xe5, 0x78, 0x52, 0xba, 0x6c, 0x80, 0xb2, 0x5e, 0xce, - 0x33, 0x74, 0x2c, 0x63, 0xbd, 0xfa, 0x26, 0xdb, 0xd0, 0xcb, 0x8b, 0x34, 0xc5, 0xdc, 0x59, 0xd1, - 0xa7, 0x25, 0x52, 0xe7, 0x42, 0x06, 0xb2, 0x10, 0xce, 0xaa, 0x39, 0x37, 0x88, 0x50, 0xe8, 0x63, - 0x2a, 0x99, 0x64, 0x28, 0x9c, 0xde, 0xc0, 0x1a, 0xda, 0xfe, 0x02, 0xab, 0xd8, 0x35, 0x06, 0xb2, - 0xc8, 0x51, 0x38, 0x6b, 0x26, 0x56, 0x61, 0x72, 0x06, 0x6b, 0x09, 0xca, 0x9c, 0x45, 0xc2, 0xe9, - 0xeb, 0x1f, 0x34, 0x7a, 0x90, 0x67, 0xee, 0xb9, 0x49, 0x3a, 0x49, 0x65, 0x3e, 0xf7, 0xab, 0x2b, - 0xc8, 0x3b, 0x58, 0x9f, 0x05, 0x42, 0x7e, 0xca, 0x26, 0x81, 0xc4, 0x89, 0x63, 0xeb, 0xbf, 0x40, - 0x5d, 0xd3, 0xe2, 0x6e, 0xd5, 0xe2, 0xee, 0x55, 0xd5, 0xe2, 0x7e, 0x93, 0x4e, 0x5e, 0xc1, 0x5a, - 0x94, 0xa3, 0xce, 0x84, 0xbf, 0x66, 0x56, 0x54, 0x7a, 0x04, 0xff, 0x35, 0x1f, 0x43, 0xb6, 0xc0, - 0xba, 0xc1, 0x79, 0x69, 0xba, 0xfa, 0x54, 0xae, 0xdf, 0x06, 0xb3, 0xc2, 0xf4, 0x76, 0xc7, 0x37, - 0xe0, 0xa8, 0xfb, 0xa6, 0x33, 0xfa, 0x61, 0x01, 0xd4, 0x35, 0x12, 0x09, 0xb6, 0xe9, 0xf7, 0x53, - 0x1e, 0x92, 0xa5, 0x46, 0xfc, 0x79, 0xfa, 0xe8, 0xc1, 0x3f, 0xe5, 0x94, 0x2d, 0x74, 0x01, 0xfd, - 0x6a, 0x68, 0xc8, 0xf6, 0xbd, 0x8a, 0x4f, 0xd4, 0xae, 0xa0, 0x2f, 0x97, 0x5d, 0x7c, 0x6f, 0xe4, - 0xbe, 0x40, 0xcf, 0x8c, 0x05, 0x79, 0xb6, 0x2c, 0xef, 0xce, 0x6c, 0xd1, 0xe7, 0x0f, 0xa1, 0x96, - 0x02, 0x08, 0xfd, 0x6a, 0x12, 0xc8, 0x8b, 0x65, 0x79, 0xad, 0x91, 0x5a, 0x5e, 0x47, 0x7b, 0xb8, - 0x8e, 0xaf, 0xa0, 0xb1, 0x64, 0x8f, 0x1b, 0x5b, 0xf0, 0x42, 0xb9, 0xf3, 0xf9, 0x75, 0xcc, 0xe4, - 0xd7, 0x22, 0x74, 0x23, 0x9e, 0x78, 0x31, 0x9f, 0xe2, 0x8d, 0x67, 0xd6, 0xb0, 0xf6, 0x4e, 0x78, - 0x31, 0xa6, 0x98, 0xab, 0x16, 0xf1, 0x62, 0xee, 0xd5, 0x9b, 0x3c, 0xec, 0xe9, 0xf8, 0xc1, 0xef, - 0x00, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xf4, 0xb6, 0x7c, 0xde, 0x05, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/core/UIService.pb.go b/protos/generated/go/feast/core/UIService.pb.go index 772c548c87..1d06d0c5ef 100644 --- a/protos/generated/go/feast/core/UIService.pb.go +++ b/protos/generated/go/feast/core/UIService.pb.go @@ -1,18 +1,19 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/core/UIService.proto -package core // import "github.com/gojek/feast/protos/generated/go/feast/core" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import specs "github.com/gojek/feast/protos/generated/go/feast/specs" -import empty "github.com/golang/protobuf/ptypes/empty" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +package core import ( - context "golang.org/x/net/context" + context "context" + fmt "fmt" + specs "github.com/gojek/feast/protos/generated/go/feast/specs" + proto "github.com/golang/protobuf/proto" + empty "github.com/golang/protobuf/ptypes/empty" + timestamp "github.com/golang/protobuf/ptypes/timestamp" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -24,7 +25,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type UIServiceTypes struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -36,16 +37,17 @@ func (m *UIServiceTypes) Reset() { *m = UIServiceTypes{} } func (m *UIServiceTypes) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes) ProtoMessage() {} func (*UIServiceTypes) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0} + return fileDescriptor_c13b3edb35d457e8, []int{0} } + func (m *UIServiceTypes) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes.Unmarshal(m, b) } func (m *UIServiceTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes.Merge(dst, src) +func (m *UIServiceTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes.Merge(m, src) } func (m *UIServiceTypes) XXX_Size() int { return xxx_messageInfo_UIServiceTypes.Size(m) @@ -70,16 +72,17 @@ func (m *UIServiceTypes_EntityDetail) Reset() { *m = UIServiceTypes_Enti func (m *UIServiceTypes_EntityDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_EntityDetail) ProtoMessage() {} func (*UIServiceTypes_EntityDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 0} + return fileDescriptor_c13b3edb35d457e8, []int{0, 0} } + func (m *UIServiceTypes_EntityDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_EntityDetail.Unmarshal(m, b) } func (m *UIServiceTypes_EntityDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_EntityDetail.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_EntityDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_EntityDetail.Merge(dst, src) +func (m *UIServiceTypes_EntityDetail) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_EntityDetail.Merge(m, src) } func (m *UIServiceTypes_EntityDetail) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_EntityDetail.Size(m) @@ -122,16 +125,17 @@ func (m *UIServiceTypes_GetEntityRequest) Reset() { *m = UIServiceTypes_ func (m *UIServiceTypes_GetEntityRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetEntityRequest) ProtoMessage() {} func (*UIServiceTypes_GetEntityRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 1} + return fileDescriptor_c13b3edb35d457e8, []int{0, 1} } + func (m *UIServiceTypes_GetEntityRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Unmarshal(m, b) } func (m *UIServiceTypes_GetEntityRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetEntityRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetEntityRequest.Merge(dst, src) +func (m *UIServiceTypes_GetEntityRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetEntityRequest.Merge(m, src) } func (m *UIServiceTypes_GetEntityRequest) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetEntityRequest.Size(m) @@ -160,16 +164,17 @@ func (m *UIServiceTypes_GetEntityResponse) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetEntityResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetEntityResponse) ProtoMessage() {} func (*UIServiceTypes_GetEntityResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 2} + return fileDescriptor_c13b3edb35d457e8, []int{0, 2} } + func (m *UIServiceTypes_GetEntityResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Unmarshal(m, b) } func (m *UIServiceTypes_GetEntityResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetEntityResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetEntityResponse.Merge(dst, src) +func (m *UIServiceTypes_GetEntityResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetEntityResponse.Merge(m, src) } func (m *UIServiceTypes_GetEntityResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetEntityResponse.Size(m) @@ -198,16 +203,17 @@ func (m *UIServiceTypes_ListEntitiesResponse) Reset() { *m = UIServiceTy func (m *UIServiceTypes_ListEntitiesResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListEntitiesResponse) ProtoMessage() {} func (*UIServiceTypes_ListEntitiesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 3} + return fileDescriptor_c13b3edb35d457e8, []int{0, 3} } + func (m *UIServiceTypes_ListEntitiesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Unmarshal(m, b) } func (m *UIServiceTypes_ListEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Merge(dst, src) +func (m *UIServiceTypes_ListEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Merge(m, src) } func (m *UIServiceTypes_ListEntitiesResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_ListEntitiesResponse.Size(m) @@ -242,16 +248,17 @@ func (m *UIServiceTypes_FeatureDetail) Reset() { *m = UIServiceTypes_Fea func (m *UIServiceTypes_FeatureDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_FeatureDetail) ProtoMessage() {} func (*UIServiceTypes_FeatureDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 4} + return fileDescriptor_c13b3edb35d457e8, []int{0, 4} } + func (m *UIServiceTypes_FeatureDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_FeatureDetail.Unmarshal(m, b) } func (m *UIServiceTypes_FeatureDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_FeatureDetail.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_FeatureDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_FeatureDetail.Merge(dst, src) +func (m *UIServiceTypes_FeatureDetail) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_FeatureDetail.Merge(m, src) } func (m *UIServiceTypes_FeatureDetail) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_FeatureDetail.Size(m) @@ -315,16 +322,17 @@ func (m *UIServiceTypes_GetFeatureRequest) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetFeatureRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureRequest) ProtoMessage() {} func (*UIServiceTypes_GetFeatureRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 5} + return fileDescriptor_c13b3edb35d457e8, []int{0, 5} } + func (m *UIServiceTypes_GetFeatureRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Unmarshal(m, b) } func (m *UIServiceTypes_GetFeatureRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetFeatureRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Merge(dst, src) +func (m *UIServiceTypes_GetFeatureRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Merge(m, src) } func (m *UIServiceTypes_GetFeatureRequest) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetFeatureRequest.Size(m) @@ -354,16 +362,17 @@ func (m *UIServiceTypes_GetFeatureResponse) Reset() { *m = UIServiceType func (m *UIServiceTypes_GetFeatureResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureResponse) ProtoMessage() {} func (*UIServiceTypes_GetFeatureResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 6} + return fileDescriptor_c13b3edb35d457e8, []int{0, 6} } + func (m *UIServiceTypes_GetFeatureResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Unmarshal(m, b) } func (m *UIServiceTypes_GetFeatureResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetFeatureResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Merge(dst, src) +func (m *UIServiceTypes_GetFeatureResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Merge(m, src) } func (m *UIServiceTypes_GetFeatureResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetFeatureResponse.Size(m) @@ -399,16 +408,17 @@ func (m *UIServiceTypes_ListFeaturesResponse) Reset() { *m = UIServiceTy func (m *UIServiceTypes_ListFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListFeaturesResponse) ProtoMessage() {} func (*UIServiceTypes_ListFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 7} + return fileDescriptor_c13b3edb35d457e8, []int{0, 7} } + func (m *UIServiceTypes_ListFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Unmarshal(m, b) } func (m *UIServiceTypes_ListFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Merge(dst, src) +func (m *UIServiceTypes_ListFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Merge(m, src) } func (m *UIServiceTypes_ListFeaturesResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_ListFeaturesResponse.Size(m) @@ -439,16 +449,17 @@ func (m *UIServiceTypes_FeatureGroupDetail) Reset() { *m = UIServiceType func (m *UIServiceTypes_FeatureGroupDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_FeatureGroupDetail) ProtoMessage() {} func (*UIServiceTypes_FeatureGroupDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 8} + return fileDescriptor_c13b3edb35d457e8, []int{0, 8} } + func (m *UIServiceTypes_FeatureGroupDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Unmarshal(m, b) } func (m *UIServiceTypes_FeatureGroupDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_FeatureGroupDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Merge(dst, src) +func (m *UIServiceTypes_FeatureGroupDetail) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Merge(m, src) } func (m *UIServiceTypes_FeatureGroupDetail) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_FeatureGroupDetail.Size(m) @@ -484,16 +495,17 @@ func (m *UIServiceTypes_GetFeatureGroupRequest) Reset() { *m = UIService func (m *UIServiceTypes_GetFeatureGroupRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureGroupRequest) ProtoMessage() {} func (*UIServiceTypes_GetFeatureGroupRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 9} + return fileDescriptor_c13b3edb35d457e8, []int{0, 9} } + func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Unmarshal(m, b) } func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetFeatureGroupRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Merge(dst, src) +func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Merge(m, src) } func (m *UIServiceTypes_GetFeatureGroupRequest) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupRequest.Size(m) @@ -524,16 +536,17 @@ func (m *UIServiceTypes_GetFeatureGroupResponse) Reset() { func (m *UIServiceTypes_GetFeatureGroupResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetFeatureGroupResponse) ProtoMessage() {} func (*UIServiceTypes_GetFeatureGroupResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 10} + return fileDescriptor_c13b3edb35d457e8, []int{0, 10} } + func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Unmarshal(m, b) } func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetFeatureGroupResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Merge(dst, src) +func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Merge(m, src) } func (m *UIServiceTypes_GetFeatureGroupResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetFeatureGroupResponse.Size(m) @@ -564,16 +577,17 @@ func (m *UIServiceTypes_ListFeatureGroupsResponse) Reset() { func (m *UIServiceTypes_ListFeatureGroupsResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListFeatureGroupsResponse) ProtoMessage() {} func (*UIServiceTypes_ListFeatureGroupsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 11} + return fileDescriptor_c13b3edb35d457e8, []int{0, 11} } + func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Unmarshal(m, b) } func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_ListFeatureGroupsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Merge(dst, src) +func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Merge(m, src) } func (m *UIServiceTypes_ListFeatureGroupsResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_ListFeatureGroupsResponse.Size(m) @@ -604,16 +618,17 @@ func (m *UIServiceTypes_StorageDetail) Reset() { *m = UIServiceTypes_Sto func (m *UIServiceTypes_StorageDetail) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_StorageDetail) ProtoMessage() {} func (*UIServiceTypes_StorageDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 12} + return fileDescriptor_c13b3edb35d457e8, []int{0, 12} } + func (m *UIServiceTypes_StorageDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_StorageDetail.Unmarshal(m, b) } func (m *UIServiceTypes_StorageDetail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_StorageDetail.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_StorageDetail) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_StorageDetail.Merge(dst, src) +func (m *UIServiceTypes_StorageDetail) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_StorageDetail.Merge(m, src) } func (m *UIServiceTypes_StorageDetail) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_StorageDetail.Size(m) @@ -649,16 +664,17 @@ func (m *UIServiceTypes_GetStorageRequest) Reset() { *m = UIServiceTypes func (m *UIServiceTypes_GetStorageRequest) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetStorageRequest) ProtoMessage() {} func (*UIServiceTypes_GetStorageRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 13} + return fileDescriptor_c13b3edb35d457e8, []int{0, 13} } + func (m *UIServiceTypes_GetStorageRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Unmarshal(m, b) } func (m *UIServiceTypes_GetStorageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetStorageRequest.Merge(dst, src) +func (m *UIServiceTypes_GetStorageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetStorageRequest.Merge(m, src) } func (m *UIServiceTypes_GetStorageRequest) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetStorageRequest.Size(m) @@ -687,16 +703,17 @@ func (m *UIServiceTypes_GetStorageResponse) Reset() { *m = UIServiceType func (m *UIServiceTypes_GetStorageResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_GetStorageResponse) ProtoMessage() {} func (*UIServiceTypes_GetStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 14} + return fileDescriptor_c13b3edb35d457e8, []int{0, 14} } + func (m *UIServiceTypes_GetStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Unmarshal(m, b) } func (m *UIServiceTypes_GetStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_GetStorageResponse.Merge(dst, src) +func (m *UIServiceTypes_GetStorageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_GetStorageResponse.Merge(m, src) } func (m *UIServiceTypes_GetStorageResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_GetStorageResponse.Size(m) @@ -725,16 +742,17 @@ func (m *UIServiceTypes_ListStorageResponse) Reset() { *m = UIServiceTyp func (m *UIServiceTypes_ListStorageResponse) String() string { return proto.CompactTextString(m) } func (*UIServiceTypes_ListStorageResponse) ProtoMessage() {} func (*UIServiceTypes_ListStorageResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_UIService_04866529701c634c, []int{0, 15} + return fileDescriptor_c13b3edb35d457e8, []int{0, 15} } + func (m *UIServiceTypes_ListStorageResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Unmarshal(m, b) } func (m *UIServiceTypes_ListStorageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Marshal(b, m, deterministic) } -func (dst *UIServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_UIServiceTypes_ListStorageResponse.Merge(dst, src) +func (m *UIServiceTypes_ListStorageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UIServiceTypes_ListStorageResponse.Merge(m, src) } func (m *UIServiceTypes_ListStorageResponse) XXX_Size() int { return xxx_messageInfo_UIServiceTypes_ListStorageResponse.Size(m) @@ -772,6 +790,61 @@ func init() { proto.RegisterType((*UIServiceTypes_ListStorageResponse)(nil), "feast.core.UIServiceTypes.ListStorageResponse") } +func init() { proto.RegisterFile("feast/core/UIService.proto", fileDescriptor_c13b3edb35d457e8) } + +var fileDescriptor_c13b3edb35d457e8 = []byte{ + // 784 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0xcd, 0x47, 0x49, 0x9a, 0x69, 0x5a, 0xe8, 0x82, 0xda, 0xb0, 0x50, 0x51, 0x99, 0x03, 0x91, + 0xda, 0xda, 0x6a, 0x28, 0x37, 0x24, 0xa4, 0xd2, 0x52, 0x21, 0x71, 0x00, 0xa7, 0x05, 0x0a, 0x5c, + 0x9c, 0x64, 0xe2, 0xba, 0x4d, 0x62, 0xd7, 0xde, 0x50, 0x05, 0x8e, 0x88, 0x1b, 0xfc, 0x26, 0xfe, + 0x16, 0x47, 0x94, 0xdd, 0xb5, 0xbd, 0x4e, 0x9a, 0xc4, 0xa9, 0xb8, 0x25, 0x3b, 0x33, 0x6f, 0x67, + 0xde, 0x9b, 0x19, 0x2f, 0xd0, 0x36, 0x5a, 0x01, 0x33, 0x9a, 0xae, 0x8f, 0xc6, 0xc9, 0xeb, 0x3a, + 0xfa, 0x5f, 0x9d, 0x26, 0xea, 0x9e, 0xef, 0x32, 0x97, 0x00, 0xb7, 0xe9, 0x43, 0x1b, 0x7d, 0x28, + 0xfc, 0x02, 0x0f, 0x9b, 0x81, 0x71, 0xd8, 0x63, 0x0e, 0x1b, 0xd4, 0x3d, 0x6c, 0x0a, 0x4f, 0xba, + 0xa1, 0x5a, 0x5f, 0xa1, 0xc5, 0xfa, 0x3e, 0x2a, 0x66, 0xed, 0x1a, 0xf3, 0x91, 0xef, 0xf6, 0xbd, + 0x49, 0x10, 0x75, 0xe6, 0xfa, 0x96, 0xad, 0x42, 0x3c, 0xb0, 0x5d, 0xd7, 0xee, 0xa0, 0xc1, 0xff, + 0x35, 0xfa, 0x6d, 0x03, 0xbb, 0x1e, 0x1b, 0x48, 0xe3, 0xa3, 0x51, 0x23, 0x73, 0xba, 0x18, 0x30, + 0xab, 0xeb, 0x09, 0x07, 0xed, 0x6f, 0x19, 0x56, 0xa2, 0xea, 0x8e, 0x07, 0x1e, 0x06, 0xf4, 0x77, + 0x16, 0xca, 0xa2, 0x8e, 0x03, 0x64, 0x96, 0xd3, 0x21, 0x5b, 0xb0, 0x30, 0xbc, 0xbc, 0x92, 0xdd, + 0xcc, 0x56, 0x97, 0x6a, 0xeb, 0xba, 0x28, 0x9e, 0xe7, 0xa3, 0xc7, 0x05, 0x9b, 0xdc, 0x89, 0x10, + 0x58, 0x38, 0x77, 0x1b, 0x41, 0x25, 0xb7, 0x99, 0xaf, 0x96, 0x4c, 0xfe, 0x9b, 0x3c, 0x87, 0xa5, + 0x8e, 0x15, 0xb0, 0x13, 0xaf, 0x65, 0x31, 0x6c, 0x55, 0xf2, 0x1c, 0x87, 0xea, 0x22, 0x37, 0x3d, + 0xcc, 0x4d, 0x3f, 0x0e, 0x73, 0x33, 0x55, 0x77, 0xaa, 0xc1, 0x9d, 0x23, 0x64, 0xe2, 0x22, 0x13, + 0x2f, 0xfb, 0x18, 0x30, 0xb2, 0x02, 0x39, 0xa7, 0xc5, 0x13, 0x2a, 0x99, 0x39, 0xa7, 0x45, 0x8f, + 0x61, 0x55, 0xf1, 0x09, 0x3c, 0xb7, 0x17, 0x20, 0x79, 0x01, 0x05, 0xe4, 0x27, 0x32, 0xf3, 0x27, + 0x7a, 0x2c, 0x9b, 0x9e, 0x2c, 0x5a, 0x57, 0x0b, 0x36, 0x65, 0x18, 0xfd, 0x0c, 0xf7, 0xde, 0x38, + 0x81, 0x80, 0x75, 0x30, 0x88, 0x80, 0x5f, 0xc2, 0x22, 0xca, 0xb3, 0x4a, 0x76, 0x33, 0x3f, 0x0f, + 0x74, 0x14, 0x48, 0x7f, 0xe4, 0x60, 0x59, 0x2a, 0x2e, 0x79, 0xde, 0x4e, 0xf0, 0x5c, 0x49, 0xf0, + 0xac, 0xb4, 0x8e, 0x24, 0x5a, 0x83, 0x72, 0xc3, 0xb1, 0x2f, 0xfb, 0xe8, 0x0f, 0xde, 0x3b, 0x78, + 0x55, 0xc9, 0x71, 0x32, 0x12, 0x67, 0xa4, 0x02, 0x45, 0xec, 0x59, 0x8d, 0x8e, 0x24, 0x7d, 0xd1, + 0x0c, 0xff, 0x46, 0x32, 0x2d, 0x4c, 0x96, 0xe9, 0xd6, 0x5c, 0x32, 0x91, 0x3d, 0x28, 0x36, 0x7d, + 0xe4, 0x91, 0x85, 0x99, 0x91, 0xa1, 0x2b, 0x7d, 0xcc, 0x85, 0x93, 0xd5, 0x4d, 0x52, 0xf7, 0x57, + 0x16, 0x88, 0xea, 0x25, 0x65, 0xd8, 0x87, 0x62, 0x5b, 0x1c, 0x49, 0xca, 0xaa, 0x53, 0x54, 0x48, + 0x50, 0x6d, 0x86, 0x81, 0xa4, 0x06, 0x45, 0xdf, 0xba, 0x1a, 0xd2, 0xca, 0x09, 0x9c, 0x46, 0x7b, + 0xe8, 0x48, 0xbf, 0x88, 0xb6, 0x90, 0xb6, 0xb8, 0x2d, 0x0e, 0x60, 0x51, 0xc2, 0x86, 0x6d, 0x91, + 0x3e, 0xa1, 0x28, 0x92, 0xfe, 0xcc, 0x02, 0x51, 0x37, 0x81, 0x6c, 0x8e, 0xdd, 0x44, 0x73, 0x6c, + 0x5c, 0x97, 0x65, 0xb4, 0x38, 0x64, 0x87, 0x8c, 0xe8, 0x99, 0x9b, 0x6f, 0xec, 0xaa, 0xb0, 0x16, + 0x73, 0xce, 0xa1, 0x27, 0xc9, 0xd3, 0x81, 0xf5, 0x31, 0x4f, 0x49, 0xc9, 0x3b, 0x28, 0xb7, 0x95, + 0x73, 0x99, 0xfd, 0xce, 0x6c, 0x5a, 0x94, 0xd2, 0xcd, 0x04, 0x04, 0xf5, 0xe0, 0xbe, 0xc2, 0x3e, + 0x3f, 0x8b, 0x25, 0xa8, 0xc3, 0xb2, 0xea, 0x1c, 0xea, 0x30, 0xe7, 0x85, 0x49, 0x0c, 0xfa, 0x1d, + 0x96, 0xe5, 0xda, 0x4d, 0x31, 0xa8, 0xca, 0x82, 0xfe, 0x2f, 0x32, 0x88, 0x01, 0x91, 0xa8, 0x93, + 0x14, 0xf8, 0xc8, 0xe7, 0x23, 0x72, 0x8a, 0xe7, 0x23, 0x10, 0x47, 0x29, 0xe6, 0x23, 0x51, 0xa1, + 0x19, 0x06, 0xd2, 0x53, 0xb8, 0x3b, 0x64, 0x7b, 0x2a, 0x74, 0xfe, 0x46, 0xd0, 0xb5, 0x3f, 0x05, + 0x28, 0x45, 0x9e, 0xe4, 0x0c, 0x4a, 0xd1, 0x06, 0x27, 0x5b, 0x53, 0xd0, 0x46, 0xbf, 0x05, 0x74, + 0x3b, 0x9d, 0xb3, 0xc8, 0x5c, 0xcb, 0x90, 0x53, 0x28, 0xab, 0x5b, 0x9d, 0xac, 0x8d, 0x49, 0x71, + 0x38, 0xfc, 0x82, 0x52, 0x63, 0x0a, 0xee, 0x75, 0x9f, 0x05, 0x2d, 0x43, 0x2e, 0x00, 0xe2, 0x49, + 0x20, 0x33, 0x12, 0x4b, 0x2e, 0x3d, 0xba, 0x93, 0xd2, 0x7b, 0xb4, 0x8e, 0x70, 0x0d, 0xdd, 0xb8, + 0x8e, 0xd1, 0x3d, 0xa6, 0x65, 0xc8, 0x37, 0xb8, 0x3d, 0x32, 0xd1, 0x64, 0x37, 0x55, 0x7a, 0xea, + 0x9e, 0xa0, 0xb5, 0x79, 0x42, 0xa2, 0xbb, 0x9b, 0xb0, 0x3a, 0x36, 0xdf, 0x13, 0x6b, 0xdb, 0x4b, + 0x57, 0x5b, 0x72, 0x4b, 0x44, 0x42, 0xc9, 0xc6, 0x9c, 0x25, 0x54, 0x72, 0xf8, 0x66, 0x09, 0x35, + 0x32, 0x2a, 0x5a, 0x86, 0x7c, 0x80, 0x25, 0x65, 0x86, 0x26, 0xd6, 0xa2, 0xcf, 0xa8, 0x65, 0x0c, + 0x78, 0xbf, 0x0e, 0xca, 0x43, 0x74, 0x3f, 0x7e, 0xc7, 0xbd, 0x1d, 0x02, 0x7f, 0x7a, 0x66, 0x3b, + 0xec, 0xac, 0xdf, 0xd0, 0x9b, 0x6e, 0xd7, 0xb0, 0xdd, 0x73, 0xbc, 0x30, 0xc4, 0x43, 0x92, 0x5f, + 0x1b, 0x18, 0x36, 0xf6, 0xd0, 0x1f, 0xae, 0x18, 0xc3, 0x76, 0x8d, 0xf8, 0xad, 0xdb, 0x28, 0x70, + 0xfb, 0xd3, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0x76, 0x0d, 0x62, 0x00, 0x0b, 0x00, 0x00, +} + // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn @@ -785,34 +858,34 @@ const _ = grpc.SupportPackageIsVersion4 // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type UIServiceClient interface { // - // Get entity specified in request. - // This process returns a single of entity specs. + //Get entity specified in request. + //This process returns a single of entity specs. GetEntity(ctx context.Context, in *UIServiceTypes_GetEntityRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetEntityResponse, error) // - // Get all entities. - // This process returns a list of entity specs. + //Get all entities. + //This process returns a list of entity specs. ListEntities(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListEntitiesResponse, error) // - // Get feature specified in request. + //Get feature specified in request. GetFeature(ctx context.Context, in *UIServiceTypes_GetFeatureRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureResponse, error) // - // Get all features. - // This process returns a list of feature specs. + //Get all features. + //This process returns a list of feature specs. ListFeatures(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeaturesResponse, error) // - // Get feature group specified in request. + //Get feature group specified in request. GetFeatureGroup(ctx context.Context, in *UIServiceTypes_GetFeatureGroupRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetFeatureGroupResponse, error) // - // Get all feature groups. - // This process returns a list of feature group specs. + //Get all feature groups. + //This process returns a list of feature group specs. ListFeatureGroups(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListFeatureGroupsResponse, error) // - // Get storage spec specified in request. + //Get storage spec specified in request. // GetStorage(ctx context.Context, in *UIServiceTypes_GetStorageRequest, opts ...grpc.CallOption) (*UIServiceTypes_GetStorageResponse, error) // - // Get all storage specs. - // This process returns a list of storage specs. + //Get all storage specs. + //This process returns a list of storage specs. ListStorage(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UIServiceTypes_ListStorageResponse, error) } @@ -899,37 +972,66 @@ func (c *uIServiceClient) ListStorage(ctx context.Context, in *empty.Empty, opts // UIServiceServer is the server API for UIService service. type UIServiceServer interface { // - // Get entity specified in request. - // This process returns a single of entity specs. + //Get entity specified in request. + //This process returns a single of entity specs. GetEntity(context.Context, *UIServiceTypes_GetEntityRequest) (*UIServiceTypes_GetEntityResponse, error) // - // Get all entities. - // This process returns a list of entity specs. + //Get all entities. + //This process returns a list of entity specs. ListEntities(context.Context, *empty.Empty) (*UIServiceTypes_ListEntitiesResponse, error) // - // Get feature specified in request. + //Get feature specified in request. GetFeature(context.Context, *UIServiceTypes_GetFeatureRequest) (*UIServiceTypes_GetFeatureResponse, error) // - // Get all features. - // This process returns a list of feature specs. + //Get all features. + //This process returns a list of feature specs. ListFeatures(context.Context, *empty.Empty) (*UIServiceTypes_ListFeaturesResponse, error) // - // Get feature group specified in request. + //Get feature group specified in request. GetFeatureGroup(context.Context, *UIServiceTypes_GetFeatureGroupRequest) (*UIServiceTypes_GetFeatureGroupResponse, error) // - // Get all feature groups. - // This process returns a list of feature group specs. + //Get all feature groups. + //This process returns a list of feature group specs. ListFeatureGroups(context.Context, *empty.Empty) (*UIServiceTypes_ListFeatureGroupsResponse, error) // - // Get storage spec specified in request. + //Get storage spec specified in request. // GetStorage(context.Context, *UIServiceTypes_GetStorageRequest) (*UIServiceTypes_GetStorageResponse, error) // - // Get all storage specs. - // This process returns a list of storage specs. + //Get all storage specs. + //This process returns a list of storage specs. ListStorage(context.Context, *empty.Empty) (*UIServiceTypes_ListStorageResponse, error) } +// UnimplementedUIServiceServer can be embedded to have forward compatible implementations. +type UnimplementedUIServiceServer struct { +} + +func (*UnimplementedUIServiceServer) GetEntity(ctx context.Context, req *UIServiceTypes_GetEntityRequest) (*UIServiceTypes_GetEntityResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetEntity not implemented") +} +func (*UnimplementedUIServiceServer) ListEntities(ctx context.Context, req *empty.Empty) (*UIServiceTypes_ListEntitiesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListEntities not implemented") +} +func (*UnimplementedUIServiceServer) GetFeature(ctx context.Context, req *UIServiceTypes_GetFeatureRequest) (*UIServiceTypes_GetFeatureResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeature not implemented") +} +func (*UnimplementedUIServiceServer) ListFeatures(ctx context.Context, req *empty.Empty) (*UIServiceTypes_ListFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListFeatures not implemented") +} +func (*UnimplementedUIServiceServer) GetFeatureGroup(ctx context.Context, req *UIServiceTypes_GetFeatureGroupRequest) (*UIServiceTypes_GetFeatureGroupResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeatureGroup not implemented") +} +func (*UnimplementedUIServiceServer) ListFeatureGroups(ctx context.Context, req *empty.Empty) (*UIServiceTypes_ListFeatureGroupsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListFeatureGroups not implemented") +} +func (*UnimplementedUIServiceServer) GetStorage(ctx context.Context, req *UIServiceTypes_GetStorageRequest) (*UIServiceTypes_GetStorageResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStorage not implemented") +} +func (*UnimplementedUIServiceServer) ListStorage(ctx context.Context, req *empty.Empty) (*UIServiceTypes_ListStorageResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListStorage not implemented") +} + func RegisterUIServiceServer(s *grpc.Server, srv UIServiceServer) { s.RegisterService(&_UIService_serviceDesc, srv) } @@ -1118,60 +1220,3 @@ var _UIService_serviceDesc = grpc.ServiceDesc{ Streams: []grpc.StreamDesc{}, Metadata: "feast/core/UIService.proto", } - -func init() { - proto.RegisterFile("feast/core/UIService.proto", fileDescriptor_UIService_04866529701c634c) -} - -var fileDescriptor_UIService_04866529701c634c = []byte{ - // 784 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6f, 0xd3, 0x40, - 0x10, 0xcd, 0x47, 0x49, 0x9a, 0x69, 0x5a, 0xe8, 0x82, 0xda, 0xb0, 0x50, 0x51, 0x99, 0x03, 0x91, - 0xda, 0xda, 0x6a, 0x28, 0x37, 0x24, 0xa4, 0xd2, 0x52, 0x21, 0x71, 0x00, 0xa7, 0x05, 0x0a, 0x5c, - 0x9c, 0x64, 0xe2, 0xba, 0x4d, 0x62, 0xd7, 0xde, 0x50, 0x05, 0x8e, 0x88, 0x1b, 0xfc, 0x26, 0xfe, - 0x16, 0x47, 0x94, 0xdd, 0xb5, 0xbd, 0x4e, 0x9a, 0xc4, 0xa9, 0xb8, 0x25, 0x3b, 0x33, 0x6f, 0x67, - 0xde, 0x9b, 0x19, 0x2f, 0xd0, 0x36, 0x5a, 0x01, 0x33, 0x9a, 0xae, 0x8f, 0xc6, 0xc9, 0xeb, 0x3a, - 0xfa, 0x5f, 0x9d, 0x26, 0xea, 0x9e, 0xef, 0x32, 0x97, 0x00, 0xb7, 0xe9, 0x43, 0x1b, 0x7d, 0x28, - 0xfc, 0x02, 0x0f, 0x9b, 0x81, 0x71, 0xd8, 0x63, 0x0e, 0x1b, 0xd4, 0x3d, 0x6c, 0x0a, 0x4f, 0xba, - 0xa1, 0x5a, 0x5f, 0xa1, 0xc5, 0xfa, 0x3e, 0x2a, 0x66, 0xed, 0x1a, 0xf3, 0x91, 0xef, 0xf6, 0xbd, - 0x49, 0x10, 0x75, 0xe6, 0xfa, 0x96, 0xad, 0x42, 0x3c, 0xb0, 0x5d, 0xd7, 0xee, 0xa0, 0xc1, 0xff, - 0x35, 0xfa, 0x6d, 0x03, 0xbb, 0x1e, 0x1b, 0x48, 0xe3, 0xa3, 0x51, 0x23, 0x73, 0xba, 0x18, 0x30, - 0xab, 0xeb, 0x09, 0x07, 0xed, 0x6f, 0x19, 0x56, 0xa2, 0xea, 0x8e, 0x07, 0x1e, 0x06, 0xf4, 0x77, - 0x16, 0xca, 0xa2, 0x8e, 0x03, 0x64, 0x96, 0xd3, 0x21, 0x5b, 0xb0, 0x30, 0xbc, 0xbc, 0x92, 0xdd, - 0xcc, 0x56, 0x97, 0x6a, 0xeb, 0xba, 0x28, 0x9e, 0xe7, 0xa3, 0xc7, 0x05, 0x9b, 0xdc, 0x89, 0x10, - 0x58, 0x38, 0x77, 0x1b, 0x41, 0x25, 0xb7, 0x99, 0xaf, 0x96, 0x4c, 0xfe, 0x9b, 0x3c, 0x87, 0xa5, - 0x8e, 0x15, 0xb0, 0x13, 0xaf, 0x65, 0x31, 0x6c, 0x55, 0xf2, 0x1c, 0x87, 0xea, 0x22, 0x37, 0x3d, - 0xcc, 0x4d, 0x3f, 0x0e, 0x73, 0x33, 0x55, 0x77, 0xaa, 0xc1, 0x9d, 0x23, 0x64, 0xe2, 0x22, 0x13, - 0x2f, 0xfb, 0x18, 0x30, 0xb2, 0x02, 0x39, 0xa7, 0xc5, 0x13, 0x2a, 0x99, 0x39, 0xa7, 0x45, 0x8f, - 0x61, 0x55, 0xf1, 0x09, 0x3c, 0xb7, 0x17, 0x20, 0x79, 0x01, 0x05, 0xe4, 0x27, 0x32, 0xf3, 0x27, - 0x7a, 0x2c, 0x9b, 0x9e, 0x2c, 0x5a, 0x57, 0x0b, 0x36, 0x65, 0x18, 0xfd, 0x0c, 0xf7, 0xde, 0x38, - 0x81, 0x80, 0x75, 0x30, 0x88, 0x80, 0x5f, 0xc2, 0x22, 0xca, 0xb3, 0x4a, 0x76, 0x33, 0x3f, 0x0f, - 0x74, 0x14, 0x48, 0x7f, 0xe4, 0x60, 0x59, 0x2a, 0x2e, 0x79, 0xde, 0x4e, 0xf0, 0x5c, 0x49, 0xf0, - 0xac, 0xb4, 0x8e, 0x24, 0x5a, 0x83, 0x72, 0xc3, 0xb1, 0x2f, 0xfb, 0xe8, 0x0f, 0xde, 0x3b, 0x78, - 0x55, 0xc9, 0x71, 0x32, 0x12, 0x67, 0xa4, 0x02, 0x45, 0xec, 0x59, 0x8d, 0x8e, 0x24, 0x7d, 0xd1, - 0x0c, 0xff, 0x46, 0x32, 0x2d, 0x4c, 0x96, 0xe9, 0xd6, 0x5c, 0x32, 0x91, 0x3d, 0x28, 0x36, 0x7d, - 0xe4, 0x91, 0x85, 0x99, 0x91, 0xa1, 0x2b, 0x7d, 0xcc, 0x85, 0x93, 0xd5, 0x4d, 0x52, 0xf7, 0x57, - 0x16, 0x88, 0xea, 0x25, 0x65, 0xd8, 0x87, 0x62, 0x5b, 0x1c, 0x49, 0xca, 0xaa, 0x53, 0x54, 0x48, - 0x50, 0x6d, 0x86, 0x81, 0xa4, 0x06, 0x45, 0xdf, 0xba, 0x1a, 0xd2, 0xca, 0x09, 0x9c, 0x46, 0x7b, - 0xe8, 0x48, 0xbf, 0x88, 0xb6, 0x90, 0xb6, 0xb8, 0x2d, 0x0e, 0x60, 0x51, 0xc2, 0x86, 0x6d, 0x91, - 0x3e, 0xa1, 0x28, 0x92, 0xfe, 0xcc, 0x02, 0x51, 0x37, 0x81, 0x6c, 0x8e, 0xdd, 0x44, 0x73, 0x6c, - 0x5c, 0x97, 0x65, 0xb4, 0x38, 0x64, 0x87, 0x8c, 0xe8, 0x99, 0x9b, 0x6f, 0xec, 0xaa, 0xb0, 0x16, - 0x73, 0xce, 0xa1, 0x27, 0xc9, 0xd3, 0x81, 0xf5, 0x31, 0x4f, 0x49, 0xc9, 0x3b, 0x28, 0xb7, 0x95, - 0x73, 0x99, 0xfd, 0xce, 0x6c, 0x5a, 0x94, 0xd2, 0xcd, 0x04, 0x04, 0xf5, 0xe0, 0xbe, 0xc2, 0x3e, - 0x3f, 0x8b, 0x25, 0xa8, 0xc3, 0xb2, 0xea, 0x1c, 0xea, 0x30, 0xe7, 0x85, 0x49, 0x0c, 0xfa, 0x1d, - 0x96, 0xe5, 0xda, 0x4d, 0x31, 0xa8, 0xca, 0x82, 0xfe, 0x2f, 0x32, 0x88, 0x01, 0x91, 0xa8, 0x93, - 0x14, 0xf8, 0xc8, 0xe7, 0x23, 0x72, 0x8a, 0xe7, 0x23, 0x10, 0x47, 0x29, 0xe6, 0x23, 0x51, 0xa1, - 0x19, 0x06, 0xd2, 0x53, 0xb8, 0x3b, 0x64, 0x7b, 0x2a, 0x74, 0xfe, 0x46, 0xd0, 0xb5, 0x3f, 0x05, - 0x28, 0x45, 0x9e, 0xe4, 0x0c, 0x4a, 0xd1, 0x06, 0x27, 0x5b, 0x53, 0xd0, 0x46, 0xbf, 0x05, 0x74, - 0x3b, 0x9d, 0xb3, 0xc8, 0x5c, 0xcb, 0x90, 0x53, 0x28, 0xab, 0x5b, 0x9d, 0xac, 0x8d, 0x49, 0x71, - 0x38, 0xfc, 0x82, 0x52, 0x63, 0x0a, 0xee, 0x75, 0x9f, 0x05, 0x2d, 0x43, 0x2e, 0x00, 0xe2, 0x49, - 0x20, 0x33, 0x12, 0x4b, 0x2e, 0x3d, 0xba, 0x93, 0xd2, 0x7b, 0xb4, 0x8e, 0x70, 0x0d, 0xdd, 0xb8, - 0x8e, 0xd1, 0x3d, 0xa6, 0x65, 0xc8, 0x37, 0xb8, 0x3d, 0x32, 0xd1, 0x64, 0x37, 0x55, 0x7a, 0xea, - 0x9e, 0xa0, 0xb5, 0x79, 0x42, 0xa2, 0xbb, 0x9b, 0xb0, 0x3a, 0x36, 0xdf, 0x13, 0x6b, 0xdb, 0x4b, - 0x57, 0x5b, 0x72, 0x4b, 0x44, 0x42, 0xc9, 0xc6, 0x9c, 0x25, 0x54, 0x72, 0xf8, 0x66, 0x09, 0x35, - 0x32, 0x2a, 0x5a, 0x86, 0x7c, 0x80, 0x25, 0x65, 0x86, 0x26, 0xd6, 0xa2, 0xcf, 0xa8, 0x65, 0x0c, - 0x78, 0xbf, 0x0e, 0xca, 0x43, 0x74, 0x3f, 0x7e, 0xc7, 0xbd, 0x1d, 0x02, 0x7f, 0x7a, 0x66, 0x3b, - 0xec, 0xac, 0xdf, 0xd0, 0x9b, 0x6e, 0xd7, 0xb0, 0xdd, 0x73, 0xbc, 0x30, 0xc4, 0x43, 0x92, 0x5f, - 0x1b, 0x18, 0x36, 0xf6, 0xd0, 0x1f, 0xae, 0x18, 0xc3, 0x76, 0x8d, 0xf8, 0xad, 0xdb, 0x28, 0x70, - 0xfb, 0xd3, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0x76, 0x0d, 0x62, 0x00, 0x0b, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/serving/Serving.pb.go b/protos/generated/go/feast/serving/Serving.pb.go index 6f61517bee..c2366dafa9 100644 --- a/protos/generated/go/feast/serving/Serving.pb.go +++ b/protos/generated/go/feast/serving/Serving.pb.go @@ -1,17 +1,18 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/serving/Serving.proto -package serving // import "github.com/gojek/feast/protos/generated/go/feast/serving" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +package serving import ( - context "golang.org/x/net/context" + context "context" + fmt "fmt" + types "github.com/gojek/feast/protos/generated/go/feast/types" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + math "math" ) // Reference imports to suppress errors if they are not otherwise used. @@ -23,7 +24,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type QueryFeaturesRequest struct { // e.g. "driver", "customer", "city". @@ -44,16 +45,17 @@ func (m *QueryFeaturesRequest) Reset() { *m = QueryFeaturesRequest{} } func (m *QueryFeaturesRequest) String() string { return proto.CompactTextString(m) } func (*QueryFeaturesRequest) ProtoMessage() {} func (*QueryFeaturesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{0} + return fileDescriptor_7609de6de542e6f0, []int{0} } + func (m *QueryFeaturesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueryFeaturesRequest.Unmarshal(m, b) } func (m *QueryFeaturesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_QueryFeaturesRequest.Marshal(b, m, deterministic) } -func (dst *QueryFeaturesRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueryFeaturesRequest.Merge(dst, src) +func (m *QueryFeaturesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryFeaturesRequest.Merge(m, src) } func (m *QueryFeaturesRequest) XXX_Size() int { return xxx_messageInfo_QueryFeaturesRequest.Size(m) @@ -99,16 +101,17 @@ func (m *QueryFeaturesResponse) Reset() { *m = QueryFeaturesResponse{} } func (m *QueryFeaturesResponse) String() string { return proto.CompactTextString(m) } func (*QueryFeaturesResponse) ProtoMessage() {} func (*QueryFeaturesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{1} + return fileDescriptor_7609de6de542e6f0, []int{1} } + func (m *QueryFeaturesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueryFeaturesResponse.Unmarshal(m, b) } func (m *QueryFeaturesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_QueryFeaturesResponse.Marshal(b, m, deterministic) } -func (dst *QueryFeaturesResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QueryFeaturesResponse.Merge(dst, src) +func (m *QueryFeaturesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryFeaturesResponse.Merge(m, src) } func (m *QueryFeaturesResponse) XXX_Size() int { return xxx_messageInfo_QueryFeaturesResponse.Size(m) @@ -145,16 +148,17 @@ func (m *Entity) Reset() { *m = Entity{} } func (m *Entity) String() string { return proto.CompactTextString(m) } func (*Entity) ProtoMessage() {} func (*Entity) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{2} + return fileDescriptor_7609de6de542e6f0, []int{2} } + func (m *Entity) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Entity.Unmarshal(m, b) } func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Entity.Marshal(b, m, deterministic) } -func (dst *Entity) XXX_Merge(src proto.Message) { - xxx_messageInfo_Entity.Merge(dst, src) +func (m *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(m, src) } func (m *Entity) XXX_Size() int { return xxx_messageInfo_Entity.Size(m) @@ -186,16 +190,17 @@ func (m *FeatureValue) Reset() { *m = FeatureValue{} } func (m *FeatureValue) String() string { return proto.CompactTextString(m) } func (*FeatureValue) ProtoMessage() {} func (*FeatureValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Serving_f91320f9a3f0c4cf, []int{3} + return fileDescriptor_7609de6de542e6f0, []int{3} } + func (m *FeatureValue) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureValue.Unmarshal(m, b) } func (m *FeatureValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureValue.Marshal(b, m, deterministic) } -func (dst *FeatureValue) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureValue.Merge(dst, src) +func (m *FeatureValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureValue.Merge(m, src) } func (m *FeatureValue) XXX_Size() int { return xxx_messageInfo_FeatureValue.Size(m) @@ -229,6 +234,39 @@ func init() { proto.RegisterType((*FeatureValue)(nil), "feast.serving.FeatureValue") } +func init() { proto.RegisterFile("feast/serving/Serving.proto", fileDescriptor_7609de6de542e6f0) } + +var fileDescriptor_7609de6de542e6f0 = []byte{ + // 429 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd3, 0x40, + 0x14, 0x75, 0x5a, 0x5c, 0x36, 0x77, 0x0d, 0xca, 0xe0, 0x62, 0xc8, 0x8a, 0x96, 0xac, 0x0f, 0x01, + 0x61, 0x06, 0xe3, 0x4b, 0xf1, 0x45, 0x5c, 0x58, 0xa1, 0x2f, 0x4b, 0x8d, 0x22, 0x52, 0x7c, 0x49, + 0xed, 0x6d, 0x4c, 0xdb, 0x64, 0x62, 0x66, 0x52, 0xc8, 0xf7, 0xf8, 0x59, 0xfe, 0x8c, 0x74, 0x66, + 0xd2, 0x26, 0xa5, 0xe8, 0x3e, 0x25, 0x33, 0xe7, 0xde, 0x73, 0xce, 0xcc, 0x3d, 0x03, 0x57, 0x4b, + 0x4c, 0xa4, 0xe2, 0x12, 0xab, 0x6d, 0x56, 0xa4, 0xfc, 0xb3, 0xf9, 0xb2, 0xb2, 0x12, 0x4a, 0x50, + 0x57, 0x83, 0xcc, 0x82, 0xfe, 0xcb, 0x54, 0x88, 0x74, 0x83, 0x5c, 0x83, 0xf3, 0x7a, 0xc9, 0x55, + 0x96, 0xa3, 0x54, 0x49, 0x5e, 0x9a, 0x7a, 0xff, 0x99, 0x21, 0x53, 0x4d, 0x89, 0x92, 0x7f, 0x4d, + 0x36, 0x35, 0x1a, 0x20, 0x28, 0xe1, 0xe9, 0xa7, 0x1a, 0xab, 0xe6, 0x23, 0x26, 0xaa, 0xae, 0x50, + 0xc6, 0xf8, 0xab, 0x46, 0xa9, 0xe8, 0x0b, 0x00, 0x2c, 0x54, 0xa6, 0x9a, 0xbb, 0x24, 0x47, 0x8f, + 0x8c, 0x48, 0xe8, 0xc4, 0x9d, 0x1d, 0xea, 0xc3, 0xb9, 0x59, 0x4d, 0x16, 0xde, 0x60, 0x34, 0x0c, + 0x9d, 0x78, 0xbf, 0xa6, 0xcf, 0xc1, 0x59, 0x1a, 0xba, 0xc9, 0xc2, 0x1b, 0x6a, 0xf0, 0xb0, 0x11, + 0xfc, 0x21, 0x70, 0x79, 0x24, 0x29, 0x4b, 0x51, 0x48, 0xfc, 0xaf, 0xe6, 0x9d, 0xd5, 0xcc, 0x50, + 0x6a, 0xcd, 0x8b, 0x28, 0x62, 0xbd, 0x7b, 0x60, 0x27, 0x79, 0xd9, 0xad, 0x6d, 0xba, 0x2d, 0x54, + 0xd5, 0xc4, 0x7b, 0x0e, 0x3f, 0x06, 0xb7, 0x07, 0xd1, 0x27, 0x30, 0x5c, 0x63, 0x63, 0x95, 0x77, + 0xbf, 0xf4, 0x35, 0x3c, 0xdc, 0xee, 0x6e, 0xcb, 0x1b, 0x8c, 0x48, 0x78, 0x11, 0x5d, 0x1e, 0xe9, + 0xe9, 0xf6, 0x26, 0x36, 0x35, 0xef, 0x06, 0x63, 0x12, 0xfc, 0x26, 0x70, 0x66, 0x76, 0xe9, 0x7b, + 0x38, 0xb7, 0xa7, 0x96, 0x1e, 0xd1, 0x76, 0xaf, 0x4f, 0xb6, 0xb3, 0xd6, 0xb0, 0xf5, 0xd7, 0x36, + 0xf9, 0xdf, 0xc0, 0xed, 0x41, 0x27, 0xfc, 0xbd, 0xe9, 0xfb, 0xbb, 0x3a, 0x12, 0xb0, 0xed, 0x7a, + 0xe0, 0x5d, 0x97, 0x15, 0x3c, 0xea, 0x42, 0x34, 0x6c, 0x69, 0x88, 0xa6, 0xa1, 0x96, 0x46, 0xc7, + 0x85, 0x75, 0xbb, 0xe9, 0x18, 0x9c, 0x7d, 0xb6, 0xac, 0xa8, 0xcf, 0x4c, 0xfa, 0x58, 0x9b, 0x3e, + 0xf6, 0xa5, 0xad, 0x88, 0x0f, 0xc5, 0xd1, 0x0a, 0xc0, 0x66, 0xf8, 0xc3, 0x74, 0x42, 0xbf, 0x83, + 0xdb, 0x1b, 0x16, 0xbd, 0xfe, 0xf7, 0x28, 0x75, 0x2a, 0xfd, 0x57, 0xf7, 0x99, 0x77, 0xf0, 0xe0, + 0x66, 0x06, 0xfd, 0x07, 0x72, 0xf3, 0xf8, 0x20, 0x3d, 0xdd, 0xb9, 0x9c, 0x8d, 0xd3, 0x4c, 0xfd, + 0xac, 0xe7, 0xec, 0x87, 0xc8, 0x79, 0x2a, 0x56, 0xb8, 0xe6, 0xe6, 0x85, 0xe8, 0x33, 0x48, 0x9e, + 0x62, 0x81, 0x55, 0xa2, 0x70, 0xc1, 0x53, 0xc1, 0x7b, 0x0f, 0x71, 0x7e, 0xa6, 0x4b, 0xde, 0xfe, + 0x0d, 0x00, 0x00, 0xff, 0xff, 0x77, 0xb5, 0x0e, 0xd1, 0xa0, 0x03, 0x00, 0x00, +} + // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn @@ -268,6 +306,14 @@ type ServingAPIServer interface { QueryFeatures(context.Context, *QueryFeaturesRequest) (*QueryFeaturesResponse, error) } +// UnimplementedServingAPIServer can be embedded to have forward compatible implementations. +type UnimplementedServingAPIServer struct { +} + +func (*UnimplementedServingAPIServer) QueryFeatures(ctx context.Context, req *QueryFeaturesRequest) (*QueryFeaturesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method QueryFeatures not implemented") +} + func RegisterServingAPIServer(s *grpc.Server, srv ServingAPIServer) { s.RegisterService(&_ServingAPI_serviceDesc, srv) } @@ -302,38 +348,3 @@ var _ServingAPI_serviceDesc = grpc.ServiceDesc{ Streams: []grpc.StreamDesc{}, Metadata: "feast/serving/Serving.proto", } - -func init() { - proto.RegisterFile("feast/serving/Serving.proto", fileDescriptor_Serving_f91320f9a3f0c4cf) -} - -var fileDescriptor_Serving_f91320f9a3f0c4cf = []byte{ - // 429 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd3, 0x40, - 0x14, 0x75, 0x5a, 0x5c, 0x36, 0x77, 0x0d, 0xca, 0xe0, 0x62, 0xc8, 0x8a, 0x96, 0xac, 0x0f, 0x01, - 0x61, 0x06, 0xe3, 0x4b, 0xf1, 0x45, 0x5c, 0x58, 0xa1, 0x2f, 0x4b, 0x8d, 0x22, 0x52, 0x7c, 0x49, - 0xed, 0x6d, 0x4c, 0xdb, 0x64, 0x62, 0x66, 0x52, 0xc8, 0xf7, 0xf8, 0x59, 0xfe, 0x8c, 0x74, 0x66, - 0xd2, 0x26, 0xa5, 0xe8, 0x3e, 0x25, 0x33, 0xe7, 0xde, 0x73, 0xce, 0xcc, 0x3d, 0x03, 0x57, 0x4b, - 0x4c, 0xa4, 0xe2, 0x12, 0xab, 0x6d, 0x56, 0xa4, 0xfc, 0xb3, 0xf9, 0xb2, 0xb2, 0x12, 0x4a, 0x50, - 0x57, 0x83, 0xcc, 0x82, 0xfe, 0xcb, 0x54, 0x88, 0x74, 0x83, 0x5c, 0x83, 0xf3, 0x7a, 0xc9, 0x55, - 0x96, 0xa3, 0x54, 0x49, 0x5e, 0x9a, 0x7a, 0xff, 0x99, 0x21, 0x53, 0x4d, 0x89, 0x92, 0x7f, 0x4d, - 0x36, 0x35, 0x1a, 0x20, 0x28, 0xe1, 0xe9, 0xa7, 0x1a, 0xab, 0xe6, 0x23, 0x26, 0xaa, 0xae, 0x50, - 0xc6, 0xf8, 0xab, 0x46, 0xa9, 0xe8, 0x0b, 0x00, 0x2c, 0x54, 0xa6, 0x9a, 0xbb, 0x24, 0x47, 0x8f, - 0x8c, 0x48, 0xe8, 0xc4, 0x9d, 0x1d, 0xea, 0xc3, 0xb9, 0x59, 0x4d, 0x16, 0xde, 0x60, 0x34, 0x0c, - 0x9d, 0x78, 0xbf, 0xa6, 0xcf, 0xc1, 0x59, 0x1a, 0xba, 0xc9, 0xc2, 0x1b, 0x6a, 0xf0, 0xb0, 0x11, - 0xfc, 0x21, 0x70, 0x79, 0x24, 0x29, 0x4b, 0x51, 0x48, 0xfc, 0xaf, 0xe6, 0x9d, 0xd5, 0xcc, 0x50, - 0x6a, 0xcd, 0x8b, 0x28, 0x62, 0xbd, 0x7b, 0x60, 0x27, 0x79, 0xd9, 0xad, 0x6d, 0xba, 0x2d, 0x54, - 0xd5, 0xc4, 0x7b, 0x0e, 0x3f, 0x06, 0xb7, 0x07, 0xd1, 0x27, 0x30, 0x5c, 0x63, 0x63, 0x95, 0x77, - 0xbf, 0xf4, 0x35, 0x3c, 0xdc, 0xee, 0x6e, 0xcb, 0x1b, 0x8c, 0x48, 0x78, 0x11, 0x5d, 0x1e, 0xe9, - 0xe9, 0xf6, 0x26, 0x36, 0x35, 0xef, 0x06, 0x63, 0x12, 0xfc, 0x26, 0x70, 0x66, 0x76, 0xe9, 0x7b, - 0x38, 0xb7, 0xa7, 0x96, 0x1e, 0xd1, 0x76, 0xaf, 0x4f, 0xb6, 0xb3, 0xd6, 0xb0, 0xf5, 0xd7, 0x36, - 0xf9, 0xdf, 0xc0, 0xed, 0x41, 0x27, 0xfc, 0xbd, 0xe9, 0xfb, 0xbb, 0x3a, 0x12, 0xb0, 0xed, 0x7a, - 0xe0, 0x5d, 0x97, 0x15, 0x3c, 0xea, 0x42, 0x34, 0x6c, 0x69, 0x88, 0xa6, 0xa1, 0x96, 0x46, 0xc7, - 0x85, 0x75, 0xbb, 0xe9, 0x18, 0x9c, 0x7d, 0xb6, 0xac, 0xa8, 0xcf, 0x4c, 0xfa, 0x58, 0x9b, 0x3e, - 0xf6, 0xa5, 0xad, 0x88, 0x0f, 0xc5, 0xd1, 0x0a, 0xc0, 0x66, 0xf8, 0xc3, 0x74, 0x42, 0xbf, 0x83, - 0xdb, 0x1b, 0x16, 0xbd, 0xfe, 0xf7, 0x28, 0x75, 0x2a, 0xfd, 0x57, 0xf7, 0x99, 0x77, 0xf0, 0xe0, - 0x66, 0x06, 0xfd, 0x07, 0x72, 0xf3, 0xf8, 0x20, 0x3d, 0xdd, 0xb9, 0x9c, 0x8d, 0xd3, 0x4c, 0xfd, - 0xac, 0xe7, 0xec, 0x87, 0xc8, 0x79, 0x2a, 0x56, 0xb8, 0xe6, 0xe6, 0x85, 0xe8, 0x33, 0x48, 0x9e, - 0x62, 0x81, 0x55, 0xa2, 0x70, 0xc1, 0x53, 0xc1, 0x7b, 0x0f, 0x71, 0x7e, 0xa6, 0x4b, 0xde, 0xfe, - 0x0d, 0x00, 0x00, 0xff, 0xff, 0x77, 0xb5, 0x0e, 0xd1, 0xa0, 0x03, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/specs/EntitySpec.pb.go b/protos/generated/go/feast/specs/EntitySpec.pb.go index 0f4374c679..75ef9aa318 100644 --- a/protos/generated/go/feast/specs/EntitySpec.pb.go +++ b/protos/generated/go/feast/specs/EntitySpec.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/EntitySpec.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type EntitySpec struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` @@ -31,16 +33,17 @@ func (m *EntitySpec) Reset() { *m = EntitySpec{} } func (m *EntitySpec) String() string { return proto.CompactTextString(m) } func (*EntitySpec) ProtoMessage() {} func (*EntitySpec) Descriptor() ([]byte, []int) { - return fileDescriptor_EntitySpec_b8950ded39b854cb, []int{0} + return fileDescriptor_a3230229c2278d5e, []int{0} } + func (m *EntitySpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EntitySpec.Unmarshal(m, b) } func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_EntitySpec.Marshal(b, m, deterministic) } -func (dst *EntitySpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_EntitySpec.Merge(dst, src) +func (m *EntitySpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntitySpec.Merge(m, src) } func (m *EntitySpec) XXX_Size() int { return xxx_messageInfo_EntitySpec.Size(m) @@ -76,11 +79,9 @@ func init() { proto.RegisterType((*EntitySpec)(nil), "feast.specs.EntitySpec") } -func init() { - proto.RegisterFile("feast/specs/EntitySpec.proto", fileDescriptor_EntitySpec_b8950ded39b854cb) -} +func init() { proto.RegisterFile("feast/specs/EntitySpec.proto", fileDescriptor_a3230229c2278d5e) } -var fileDescriptor_EntitySpec_b8950ded39b854cb = []byte{ +var fileDescriptor_a3230229c2278d5e = []byte{ // 177 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0xcd, 0x2b, 0xc9, 0x2c, 0xa9, 0x0c, 0x2e, diff --git a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go b/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go index 8349e5a53a..ed04332693 100644 --- a/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go +++ b/protos/generated/go/feast/specs/FeatureGroupSpec.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/FeatureGroupSpec.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type FeatureGroupSpec struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` @@ -31,16 +33,17 @@ func (m *FeatureGroupSpec) Reset() { *m = FeatureGroupSpec{} } func (m *FeatureGroupSpec) String() string { return proto.CompactTextString(m) } func (*FeatureGroupSpec) ProtoMessage() {} func (*FeatureGroupSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureGroupSpec_21c67ee01edd412c, []int{0} + return fileDescriptor_df3b6a9e736b5719, []int{0} } + func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureGroupSpec.Unmarshal(m, b) } func (m *FeatureGroupSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureGroupSpec.Marshal(b, m, deterministic) } -func (dst *FeatureGroupSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureGroupSpec.Merge(dst, src) +func (m *FeatureGroupSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureGroupSpec.Merge(m, src) } func (m *FeatureGroupSpec) XXX_Size() int { return xxx_messageInfo_FeatureGroupSpec.Size(m) @@ -76,11 +79,9 @@ func init() { proto.RegisterType((*FeatureGroupSpec)(nil), "feast.specs.FeatureGroupSpec") } -func init() { - proto.RegisterFile("feast/specs/FeatureGroupSpec.proto", fileDescriptor_FeatureGroupSpec_21c67ee01edd412c) -} +func init() { proto.RegisterFile("feast/specs/FeatureGroupSpec.proto", fileDescriptor_df3b6a9e736b5719) } -var fileDescriptor_FeatureGroupSpec_21c67ee01edd412c = []byte{ +var fileDescriptor_df3b6a9e736b5719 = []byte{ // 203 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0x75, diff --git a/protos/generated/go/feast/specs/FeatureSpec.pb.go b/protos/generated/go/feast/specs/FeatureSpec.pb.go index 34ca43cc6b..a8cbb886a8 100644 --- a/protos/generated/go/feast/specs/FeatureSpec.pb.go +++ b/protos/generated/go/feast/specs/FeatureSpec.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/FeatureSpec.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" +import ( + fmt "fmt" + types "github.com/gojek/feast/protos/generated/go/feast/types" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,7 +19,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type FeatureSpec struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` @@ -40,16 +42,17 @@ func (m *FeatureSpec) Reset() { *m = FeatureSpec{} } func (m *FeatureSpec) String() string { return proto.CompactTextString(m) } func (*FeatureSpec) ProtoMessage() {} func (*FeatureSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{0} + return fileDescriptor_b8f1468f11147fbe, []int{0} } + func (m *FeatureSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureSpec.Unmarshal(m, b) } func (m *FeatureSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureSpec.Marshal(b, m, deterministic) } -func (dst *FeatureSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureSpec.Merge(dst, src) +func (m *FeatureSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSpec.Merge(m, src) } func (m *FeatureSpec) XXX_Size() int { return xxx_messageInfo_FeatureSpec.Size(m) @@ -149,16 +152,17 @@ func (m *DataStores) Reset() { *m = DataStores{} } func (m *DataStores) String() string { return proto.CompactTextString(m) } func (*DataStores) ProtoMessage() {} func (*DataStores) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{1} + return fileDescriptor_b8f1468f11147fbe, []int{1} } + func (m *DataStores) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DataStores.Unmarshal(m, b) } func (m *DataStores) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DataStores.Marshal(b, m, deterministic) } -func (dst *DataStores) XXX_Merge(src proto.Message) { - xxx_messageInfo_DataStores.Merge(dst, src) +func (m *DataStores) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataStores.Merge(m, src) } func (m *DataStores) XXX_Size() int { return xxx_messageInfo_DataStores.Size(m) @@ -195,16 +199,17 @@ func (m *DataStore) Reset() { *m = DataStore{} } func (m *DataStore) String() string { return proto.CompactTextString(m) } func (*DataStore) ProtoMessage() {} func (*DataStore) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureSpec_de24bbc3c8f2bed6, []int{2} + return fileDescriptor_b8f1468f11147fbe, []int{2} } + func (m *DataStore) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DataStore.Unmarshal(m, b) } func (m *DataStore) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DataStore.Marshal(b, m, deterministic) } -func (dst *DataStore) XXX_Merge(src proto.Message) { - xxx_messageInfo_DataStore.Merge(dst, src) +func (m *DataStore) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataStore.Merge(m, src) } func (m *DataStore) XXX_Size() int { return xxx_messageInfo_DataStore.Size(m) @@ -237,11 +242,9 @@ func init() { proto.RegisterMapType((map[string]string)(nil), "feast.specs.DataStore.OptionsEntry") } -func init() { - proto.RegisterFile("feast/specs/FeatureSpec.proto", fileDescriptor_FeatureSpec_de24bbc3c8f2bed6) -} +func init() { proto.RegisterFile("feast/specs/FeatureSpec.proto", fileDescriptor_b8f1468f11147fbe) } -var fileDescriptor_FeatureSpec_de24bbc3c8f2bed6 = []byte{ +var fileDescriptor_b8f1468f11147fbe = []byte{ // 451 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0xdf, 0x6b, 0xd4, 0x40, 0x10, 0x26, 0x49, 0xdb, 0x33, 0x93, 0x52, 0xca, 0x22, 0xed, 0x72, 0x2a, 0x84, 0x13, 0xe1, 0x9e, diff --git a/protos/generated/go/feast/specs/ImportJobSpecs.pb.go b/protos/generated/go/feast/specs/ImportJobSpecs.pb.go index d4c940e947..924175e7bd 100644 --- a/protos/generated/go/feast/specs/ImportJobSpecs.pb.go +++ b/protos/generated/go/feast/specs/ImportJobSpecs.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/ImportJobSpecs.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type ImportJobSpecs struct { JobId string `protobuf:"bytes,1,opt,name=jobId,proto3" json:"jobId,omitempty"` @@ -35,16 +37,17 @@ func (m *ImportJobSpecs) Reset() { *m = ImportJobSpecs{} } func (m *ImportJobSpecs) String() string { return proto.CompactTextString(m) } func (*ImportJobSpecs) ProtoMessage() {} func (*ImportJobSpecs) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportJobSpecs_c867f2e818ce78e5, []int{0} + return fileDescriptor_ca149f837727a759, []int{0} } + func (m *ImportJobSpecs) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ImportJobSpecs.Unmarshal(m, b) } func (m *ImportJobSpecs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ImportJobSpecs.Marshal(b, m, deterministic) } -func (dst *ImportJobSpecs) XXX_Merge(src proto.Message) { - xxx_messageInfo_ImportJobSpecs.Merge(dst, src) +func (m *ImportJobSpecs) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportJobSpecs.Merge(m, src) } func (m *ImportJobSpecs) XXX_Size() int { return xxx_messageInfo_ImportJobSpecs.Size(m) @@ -108,11 +111,9 @@ func init() { proto.RegisterType((*ImportJobSpecs)(nil), "feast.specs.ImportJobSpecs") } -func init() { - proto.RegisterFile("feast/specs/ImportJobSpecs.proto", fileDescriptor_ImportJobSpecs_c867f2e818ce78e5) -} +func init() { proto.RegisterFile("feast/specs/ImportJobSpecs.proto", fileDescriptor_ca149f837727a759) } -var fileDescriptor_ImportJobSpecs_c867f2e818ce78e5 = []byte{ +var fileDescriptor_ca149f837727a759 = []byte{ // 306 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x41, 0x4b, 0xc3, 0x40, 0x10, 0x85, 0xa9, 0xb5, 0x15, 0xb7, 0x22, 0xb8, 0x55, 0x5c, 0x44, 0x21, 0x78, 0xea, 0x29, 0x0b, diff --git a/protos/generated/go/feast/specs/ImportSpec.pb.go b/protos/generated/go/feast/specs/ImportSpec.pb.go index d16f9fa07b..d7788c1372 100644 --- a/protos/generated/go/feast/specs/ImportSpec.pb.go +++ b/protos/generated/go/feast/specs/ImportSpec.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/ImportSpec.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,7 +19,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type ImportSpec struct { Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` @@ -34,16 +36,17 @@ func (m *ImportSpec) Reset() { *m = ImportSpec{} } func (m *ImportSpec) String() string { return proto.CompactTextString(m) } func (*ImportSpec) ProtoMessage() {} func (*ImportSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{0} + return fileDescriptor_bf21bfd2215b3e0c, []int{0} } + func (m *ImportSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ImportSpec.Unmarshal(m, b) } func (m *ImportSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ImportSpec.Marshal(b, m, deterministic) } -func (dst *ImportSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_ImportSpec.Merge(dst, src) +func (m *ImportSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSpec.Merge(m, src) } func (m *ImportSpec) XXX_Size() int { return xxx_messageInfo_ImportSpec.Size(m) @@ -107,16 +110,17 @@ func (m *Schema) Reset() { *m = Schema{} } func (m *Schema) String() string { return proto.CompactTextString(m) } func (*Schema) ProtoMessage() {} func (*Schema) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{1} + return fileDescriptor_bf21bfd2215b3e0c, []int{1} } + func (m *Schema) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Schema.Unmarshal(m, b) } func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Schema.Marshal(b, m, deterministic) } -func (dst *Schema) XXX_Merge(src proto.Message) { - xxx_messageInfo_Schema.Merge(dst, src) +func (m *Schema) XXX_Merge(src proto.Message) { + xxx_messageInfo_Schema.Merge(m, src) } func (m *Schema) XXX_Size() int { return xxx_messageInfo_Schema.Size(m) @@ -178,76 +182,14 @@ func (m *Schema) GetEntityIdColumn() string { return "" } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Schema) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Schema_OneofMarshaler, _Schema_OneofUnmarshaler, _Schema_OneofSizer, []interface{}{ +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Schema) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*Schema_TimestampColumn)(nil), (*Schema_TimestampValue)(nil), } } -func _Schema_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Schema) - // timestamp - switch x := m.Timestamp.(type) { - case *Schema_TimestampColumn: - b.EncodeVarint(5<<3 | proto.WireBytes) - b.EncodeStringBytes(x.TimestampColumn) - case *Schema_TimestampValue: - b.EncodeVarint(6<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampValue); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Schema.Timestamp has unexpected type %T", x) - } - return nil -} - -func _Schema_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Schema) - switch tag { - case 5: // timestamp.timestampColumn - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Timestamp = &Schema_TimestampColumn{x} - return true, err - case 6: // timestamp.timestampValue - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(timestamp.Timestamp) - err := b.DecodeMessage(msg) - m.Timestamp = &Schema_TimestampValue{msg} - return true, err - default: - return false, nil - } -} - -func _Schema_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Schema) - // timestamp - switch x := m.Timestamp.(type) { - case *Schema_TimestampColumn: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.TimestampColumn))) - n += len(x.TimestampColumn) - case *Schema_TimestampValue: - s := proto.Size(x.TimestampValue) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - type Field struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` FeatureId string `protobuf:"bytes,2,opt,name=featureId,proto3" json:"featureId,omitempty"` @@ -260,16 +202,17 @@ func (m *Field) Reset() { *m = Field{} } func (m *Field) String() string { return proto.CompactTextString(m) } func (*Field) ProtoMessage() {} func (*Field) Descriptor() ([]byte, []int) { - return fileDescriptor_ImportSpec_673bc4f248a91137, []int{2} + return fileDescriptor_bf21bfd2215b3e0c, []int{2} } + func (m *Field) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Field.Unmarshal(m, b) } func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Field.Marshal(b, m, deterministic) } -func (dst *Field) XXX_Merge(src proto.Message) { - xxx_messageInfo_Field.Merge(dst, src) +func (m *Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field.Merge(m, src) } func (m *Field) XXX_Size() int { return xxx_messageInfo_Field.Size(m) @@ -302,11 +245,9 @@ func init() { proto.RegisterType((*Field)(nil), "feast.specs.Field") } -func init() { - proto.RegisterFile("feast/specs/ImportSpec.proto", fileDescriptor_ImportSpec_673bc4f248a91137) -} +func init() { proto.RegisterFile("feast/specs/ImportSpec.proto", fileDescriptor_bf21bfd2215b3e0c) } -var fileDescriptor_ImportSpec_673bc4f248a91137 = []byte{ +var fileDescriptor_bf21bfd2215b3e0c = []byte{ // 440 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x8f, 0x93, 0x40, 0x14, 0x5d, 0xca, 0x16, 0xed, 0x6d, 0xdc, 0x9a, 0xab, 0x0f, 0x84, 0x6c, 0x62, 0xd3, 0x07, 0x6d, diff --git a/protos/generated/go/feast/specs/StorageSpec.pb.go b/protos/generated/go/feast/specs/StorageSpec.pb.go index fefe56a1be..180fcf14c5 100644 --- a/protos/generated/go/feast/specs/StorageSpec.pb.go +++ b/protos/generated/go/feast/specs/StorageSpec.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/specs/StorageSpec.proto -package specs // import "github.com/gojek/feast/protos/generated/go/feast/specs" +package specs -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type StorageSpec struct { // unique identifier for this instance @@ -36,16 +38,17 @@ func (m *StorageSpec) Reset() { *m = StorageSpec{} } func (m *StorageSpec) String() string { return proto.CompactTextString(m) } func (*StorageSpec) ProtoMessage() {} func (*StorageSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_StorageSpec_bfb8a5e5cf34de95, []int{0} + return fileDescriptor_7783b72a0d689614, []int{0} } + func (m *StorageSpec) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StorageSpec.Unmarshal(m, b) } func (m *StorageSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StorageSpec.Marshal(b, m, deterministic) } -func (dst *StorageSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_StorageSpec.Merge(dst, src) +func (m *StorageSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageSpec.Merge(m, src) } func (m *StorageSpec) XXX_Size() int { return xxx_messageInfo_StorageSpec.Size(m) @@ -82,11 +85,9 @@ func init() { proto.RegisterMapType((map[string]string)(nil), "feast.specs.StorageSpec.OptionsEntry") } -func init() { - proto.RegisterFile("feast/specs/StorageSpec.proto", fileDescriptor_StorageSpec_bfb8a5e5cf34de95) -} +func init() { proto.RegisterFile("feast/specs/StorageSpec.proto", fileDescriptor_7783b72a0d689614) } -var fileDescriptor_StorageSpec_bfb8a5e5cf34de95 = []byte{ +var fileDescriptor_7783b72a0d689614 = []byte{ // 227 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0x2e, 0x48, 0x4d, 0x2e, 0xd6, 0x0f, 0x2e, 0xc9, 0x2f, 0x4a, 0x4c, 0x4f, 0x0d, diff --git a/protos/generated/go/feast/storage/BigTable.pb.go b/protos/generated/go/feast/storage/BigTable.pb.go index e65b5e35fc..7ad1e0d5b9 100644 --- a/protos/generated/go/feast/storage/BigTable.pb.go +++ b/protos/generated/go/feast/storage/BigTable.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/storage/BigTable.proto -package storage // import "github.com/gojek/feast/protos/generated/go/feast/storage" +package storage -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,13 +18,12 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type BigTableRowKey struct { // This should be the first 7 characters of a sha1 of the entityKey proto encoded - Sha1Prefix string `protobuf:"bytes,1,opt,name=sha1Prefix,proto3" json:"sha1Prefix,omitempty"` - EntityKey string `protobuf:"bytes,2,opt,name=entityKey,proto3" json:"entityKey,omitempty"` - // This should be String.valueOf(int64.MAX_VALUE - roundToEntityGranularity(eventTimestamp)) + Sha1Prefix string `protobuf:"bytes,1,opt,name=sha1Prefix,proto3" json:"sha1Prefix,omitempty"` + EntityKey string `protobuf:"bytes,2,opt,name=entityKey,proto3" json:"entityKey,omitempty"` ReversedMillis string `protobuf:"bytes,3,opt,name=reversedMillis,proto3" json:"reversedMillis,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -33,16 +34,17 @@ func (m *BigTableRowKey) Reset() { *m = BigTableRowKey{} } func (m *BigTableRowKey) String() string { return proto.CompactTextString(m) } func (*BigTableRowKey) ProtoMessage() {} func (*BigTableRowKey) Descriptor() ([]byte, []int) { - return fileDescriptor_BigTable_e25bdeffe0669ae7, []int{0} + return fileDescriptor_1d33ec3bd45c712c, []int{0} } + func (m *BigTableRowKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BigTableRowKey.Unmarshal(m, b) } func (m *BigTableRowKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BigTableRowKey.Marshal(b, m, deterministic) } -func (dst *BigTableRowKey) XXX_Merge(src proto.Message) { - xxx_messageInfo_BigTableRowKey.Merge(dst, src) +func (m *BigTableRowKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigTableRowKey.Merge(m, src) } func (m *BigTableRowKey) XXX_Size() int { return xxx_messageInfo_BigTableRowKey.Size(m) @@ -78,11 +80,9 @@ func init() { proto.RegisterType((*BigTableRowKey)(nil), "feast.storage.BigTableRowKey") } -func init() { - proto.RegisterFile("feast/storage/BigTable.proto", fileDescriptor_BigTable_e25bdeffe0669ae7) -} +func init() { proto.RegisterFile("feast/storage/BigTable.proto", fileDescriptor_1d33ec3bd45c712c) } -var fileDescriptor_BigTable_e25bdeffe0669ae7 = []byte{ +var fileDescriptor_1d33ec3bd45c712c = []byte{ // 193 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0xb1, 0x0b, 0x82, 0x40, 0x18, 0x47, 0xb1, 0x20, 0xf0, 0x40, 0x87, 0x9b, 0x1c, 0x24, 0xa2, 0x21, 0x9a, 0x3c, 0xa2, 0xa5, diff --git a/protos/generated/go/feast/storage/Redis.pb.go b/protos/generated/go/feast/storage/Redis.pb.go index 49f80a061e..4d42fa2fcc 100644 --- a/protos/generated/go/feast/storage/Redis.pb.go +++ b/protos/generated/go/feast/storage/Redis.pb.go @@ -1,13 +1,15 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/storage/Redis.proto -package storage // import "github.com/gojek/feast/protos/generated/go/feast/storage" +package storage -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import types "github.com/gojek/feast/protos/generated/go/feast/types" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + types "github.com/gojek/feast/protos/generated/go/feast/types" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -18,16 +20,16 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type RedisBucketKey struct { // Entity key from the FeatureRow EntityKey string `protobuf:"bytes,2,opt,name=entityKey,proto3" json:"entityKey,omitempty"` - // * + //* // This should be the first 7 characters of a sha1 of the featureId // This is just to save storage space as it's kept in memory. FeatureIdSha1Prefix string `protobuf:"bytes,3,opt,name=featureIdSha1Prefix,proto3" json:"featureIdSha1Prefix,omitempty"` - // * + //* // This groups a feature's values (for different eventTimestamps), // into buckets so many can be retrieved together. // @@ -43,16 +45,17 @@ func (m *RedisBucketKey) Reset() { *m = RedisBucketKey{} } func (m *RedisBucketKey) String() string { return proto.CompactTextString(m) } func (*RedisBucketKey) ProtoMessage() {} func (*RedisBucketKey) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{0} + return fileDescriptor_64e898a359fc9e5d, []int{0} } + func (m *RedisBucketKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketKey.Unmarshal(m, b) } func (m *RedisBucketKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_RedisBucketKey.Marshal(b, m, deterministic) } -func (dst *RedisBucketKey) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketKey.Merge(dst, src) +func (m *RedisBucketKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedisBucketKey.Merge(m, src) } func (m *RedisBucketKey) XXX_Size() int { return xxx_messageInfo_RedisBucketKey.Size(m) @@ -84,7 +87,7 @@ func (m *RedisBucketKey) GetBucketId() uint64 { return 0 } -// * +//* // Because in redis features are stored as a key per feature not per // feature row, we need the event timestamp in the value. type RedisBucketValue struct { @@ -99,16 +102,17 @@ func (m *RedisBucketValue) Reset() { *m = RedisBucketValue{} } func (m *RedisBucketValue) String() string { return proto.CompactTextString(m) } func (*RedisBucketValue) ProtoMessage() {} func (*RedisBucketValue) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{1} + return fileDescriptor_64e898a359fc9e5d, []int{1} } + func (m *RedisBucketValue) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketValue.Unmarshal(m, b) } func (m *RedisBucketValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_RedisBucketValue.Marshal(b, m, deterministic) } -func (dst *RedisBucketValue) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketValue.Merge(dst, src) +func (m *RedisBucketValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedisBucketValue.Merge(m, src) } func (m *RedisBucketValue) XXX_Size() int { return xxx_messageInfo_RedisBucketValue.Size(m) @@ -133,7 +137,7 @@ func (m *RedisBucketValue) GetEventTimestamp() *timestamp.Timestamp { return nil } -// * +//* // This allows us to group multiple bucket values together in a // single list to make it easier to keep sets together type RedisBucketValueList struct { @@ -147,16 +151,17 @@ func (m *RedisBucketValueList) Reset() { *m = RedisBucketValueList{} } func (m *RedisBucketValueList) String() string { return proto.CompactTextString(m) } func (*RedisBucketValueList) ProtoMessage() {} func (*RedisBucketValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Redis_cef62c817c1622ce, []int{2} + return fileDescriptor_64e898a359fc9e5d, []int{2} } + func (m *RedisBucketValueList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RedisBucketValueList.Unmarshal(m, b) } func (m *RedisBucketValueList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_RedisBucketValueList.Marshal(b, m, deterministic) } -func (dst *RedisBucketValueList) XXX_Merge(src proto.Message) { - xxx_messageInfo_RedisBucketValueList.Merge(dst, src) +func (m *RedisBucketValueList) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedisBucketValueList.Merge(m, src) } func (m *RedisBucketValueList) XXX_Size() int { return xxx_messageInfo_RedisBucketValueList.Size(m) @@ -180,9 +185,9 @@ func init() { proto.RegisterType((*RedisBucketValueList)(nil), "feast.storage.RedisBucketValueList") } -func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_Redis_cef62c817c1622ce) } +func init() { proto.RegisterFile("feast/storage/Redis.proto", fileDescriptor_64e898a359fc9e5d) } -var fileDescriptor_Redis_cef62c817c1622ce = []byte{ +var fileDescriptor_64e898a359fc9e5d = []byte{ // 325 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xcd, 0x4f, 0xf2, 0x40, 0x10, 0xc6, 0xd3, 0x97, 0x57, 0x22, 0x4b, 0x24, 0x66, 0x35, 0xb1, 0x36, 0x26, 0x34, 0x9c, 0x7a, diff --git a/protos/generated/go/feast/types/Feature.pb.go b/protos/generated/go/feast/types/Feature.pb.go index 51ed41fb54..0ef6e65d55 100644 --- a/protos/generated/go/feast/types/Feature.pb.go +++ b/protos/generated/go/feast/types/Feature.pb.go @@ -1,11 +1,13 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/types/Feature.proto -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" +package types -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -16,7 +18,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type Feature struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` @@ -30,16 +32,17 @@ func (m *Feature) Reset() { *m = Feature{} } func (m *Feature) String() string { return proto.CompactTextString(m) } func (*Feature) ProtoMessage() {} func (*Feature) Descriptor() ([]byte, []int) { - return fileDescriptor_Feature_c2a5d99d9bf3ca9c, []int{0} + return fileDescriptor_4e19474999533fc9, []int{0} } + func (m *Feature) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Feature.Unmarshal(m, b) } func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Feature.Marshal(b, m, deterministic) } -func (dst *Feature) XXX_Merge(src proto.Message) { - xxx_messageInfo_Feature.Merge(dst, src) +func (m *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(m, src) } func (m *Feature) XXX_Size() int { return xxx_messageInfo_Feature.Size(m) @@ -68,9 +71,9 @@ func init() { proto.RegisterType((*Feature)(nil), "feast.types.Feature") } -func init() { proto.RegisterFile("feast/types/Feature.proto", fileDescriptor_Feature_c2a5d99d9bf3ca9c) } +func init() { proto.RegisterFile("feast/types/Feature.proto", fileDescriptor_4e19474999533fc9) } -var fileDescriptor_Feature_c2a5d99d9bf3ca9c = []byte{ +var fileDescriptor_4e19474999533fc9 = []byte{ // 173 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0x4b, 0x4d, 0x2c, 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0x4b, 0x4d, 0x2c, 0x29, 0x2d, 0x4a, 0xd5, diff --git a/protos/generated/go/feast/types/FeatureRow.pb.go b/protos/generated/go/feast/types/FeatureRow.pb.go index dc6099ba75..9d482da4a4 100644 --- a/protos/generated/go/feast/types/FeatureRow.pb.go +++ b/protos/generated/go/feast/types/FeatureRow.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/types/FeatureRow.proto -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" +package types -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,7 +19,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type FeatureRowKey struct { EntityKey string `protobuf:"bytes,1,opt,name=entityKey,proto3" json:"entityKey,omitempty"` @@ -32,16 +34,17 @@ func (m *FeatureRowKey) Reset() { *m = FeatureRowKey{} } func (m *FeatureRowKey) String() string { return proto.CompactTextString(m) } func (*FeatureRowKey) ProtoMessage() {} func (*FeatureRowKey) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRow_3072498459678ede, []int{0} + return fileDescriptor_fbbea9c89787d1c7, []int{0} } + func (m *FeatureRowKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRowKey.Unmarshal(m, b) } func (m *FeatureRowKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureRowKey.Marshal(b, m, deterministic) } -func (dst *FeatureRowKey) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureRowKey.Merge(dst, src) +func (m *FeatureRowKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureRowKey.Merge(m, src) } func (m *FeatureRowKey) XXX_Size() int { return xxx_messageInfo_FeatureRowKey.Size(m) @@ -87,16 +90,17 @@ func (m *FeatureRow) Reset() { *m = FeatureRow{} } func (m *FeatureRow) String() string { return proto.CompactTextString(m) } func (*FeatureRow) ProtoMessage() {} func (*FeatureRow) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRow_3072498459678ede, []int{1} + return fileDescriptor_fbbea9c89787d1c7, []int{1} } + func (m *FeatureRow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRow.Unmarshal(m, b) } func (m *FeatureRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureRow.Marshal(b, m, deterministic) } -func (dst *FeatureRow) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureRow.Merge(dst, src) +func (m *FeatureRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureRow.Merge(m, src) } func (m *FeatureRow) XXX_Size() int { return xxx_messageInfo_FeatureRow.Size(m) @@ -140,11 +144,9 @@ func init() { proto.RegisterType((*FeatureRow)(nil), "feast.types.FeatureRow") } -func init() { - proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_FeatureRow_3072498459678ede) -} +func init() { proto.RegisterFile("feast/types/FeatureRow.proto", fileDescriptor_fbbea9c89787d1c7) } -var fileDescriptor_FeatureRow_3072498459678ede = []byte{ +var fileDescriptor_fbbea9c89787d1c7 = []byte{ // 266 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x91, 0xc1, 0x4a, 0xc4, 0x30, 0x10, 0x86, 0x89, 0x2b, 0xe2, 0xce, 0xa2, 0x42, 0xf1, 0x50, 0xcb, 0xa2, 0x65, 0x4f, 0x3d, 0x65, diff --git a/protos/generated/go/feast/types/FeatureRowExtended.pb.go b/protos/generated/go/feast/types/FeatureRowExtended.pb.go index ad89f27db2..e8371fcfcd 100644 --- a/protos/generated/go/feast/types/FeatureRowExtended.pb.go +++ b/protos/generated/go/feast/types/FeatureRowExtended.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/types/FeatureRowExtended.proto -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" +package types -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,7 +19,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type Error struct { Cause string `protobuf:"bytes,1,opt,name=cause,proto3" json:"cause,omitempty"` @@ -33,16 +35,17 @@ func (m *Error) Reset() { *m = Error{} } func (m *Error) String() string { return proto.CompactTextString(m) } func (*Error) ProtoMessage() {} func (*Error) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{0} + return fileDescriptor_7823aa2c72575793, []int{0} } + func (m *Error) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Error.Unmarshal(m, b) } func (m *Error) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Error.Marshal(b, m, deterministic) } -func (dst *Error) XXX_Merge(src proto.Message) { - xxx_messageInfo_Error.Merge(dst, src) +func (m *Error) XXX_Merge(src proto.Message) { + xxx_messageInfo_Error.Merge(m, src) } func (m *Error) XXX_Size() int { return xxx_messageInfo_Error.Size(m) @@ -93,16 +96,17 @@ func (m *Attempt) Reset() { *m = Attempt{} } func (m *Attempt) String() string { return proto.CompactTextString(m) } func (*Attempt) ProtoMessage() {} func (*Attempt) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{1} + return fileDescriptor_7823aa2c72575793, []int{1} } + func (m *Attempt) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Attempt.Unmarshal(m, b) } func (m *Attempt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Attempt.Marshal(b, m, deterministic) } -func (dst *Attempt) XXX_Merge(src proto.Message) { - xxx_messageInfo_Attempt.Merge(dst, src) +func (m *Attempt) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attempt.Merge(m, src) } func (m *Attempt) XXX_Size() int { return xxx_messageInfo_Attempt.Size(m) @@ -140,16 +144,17 @@ func (m *FeatureRowExtended) Reset() { *m = FeatureRowExtended{} } func (m *FeatureRowExtended) String() string { return proto.CompactTextString(m) } func (*FeatureRowExtended) ProtoMessage() {} func (*FeatureRowExtended) Descriptor() ([]byte, []int) { - return fileDescriptor_FeatureRowExtended_bfd3c37956d1a040, []int{2} + return fileDescriptor_7823aa2c72575793, []int{2} } + func (m *FeatureRowExtended) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FeatureRowExtended.Unmarshal(m, b) } func (m *FeatureRowExtended) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FeatureRowExtended.Marshal(b, m, deterministic) } -func (dst *FeatureRowExtended) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureRowExtended.Merge(dst, src) +func (m *FeatureRowExtended) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureRowExtended.Merge(m, src) } func (m *FeatureRowExtended) XXX_Size() int { return xxx_messageInfo_FeatureRowExtended.Size(m) @@ -188,10 +193,10 @@ func init() { } func init() { - proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_FeatureRowExtended_bfd3c37956d1a040) + proto.RegisterFile("feast/types/FeatureRowExtended.proto", fileDescriptor_7823aa2c72575793) } -var fileDescriptor_FeatureRowExtended_bfd3c37956d1a040 = []byte{ +var fileDescriptor_7823aa2c72575793 = []byte{ // 338 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xc1, 0x6b, 0xea, 0x40, 0x10, 0xc6, 0xf1, 0xf9, 0xf2, 0x7c, 0x4e, 0x6e, 0x8b, 0x60, 0x08, 0xd2, 0x16, 0xe9, 0xc1, 0x5e, diff --git a/protos/generated/go/feast/types/Granularity.pb.go b/protos/generated/go/feast/types/Granularity.pb.go deleted file mode 100644 index 5166f7bbc2..0000000000 --- a/protos/generated/go/feast/types/Granularity.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: feast/types/Granularity.proto - -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type Granularity_Enum int32 - -const ( - Granularity_NONE Granularity_Enum = 0 - Granularity_DAY Granularity_Enum = 1 - Granularity_HOUR Granularity_Enum = 2 - Granularity_MINUTE Granularity_Enum = 3 - Granularity_SECOND Granularity_Enum = 4 -) - -var Granularity_Enum_name = map[int32]string{ - 0: "NONE", - 1: "DAY", - 2: "HOUR", - 3: "MINUTE", - 4: "SECOND", -} -var Granularity_Enum_value = map[string]int32{ - "NONE": 0, - "DAY": 1, - "HOUR": 2, - "MINUTE": 3, - "SECOND": 4, -} - -func (x Granularity_Enum) String() string { - return proto.EnumName(Granularity_Enum_name, int32(x)) -} -func (Granularity_Enum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_Granularity_4b57756f7a751fdb, []int{0, 0} -} - -type Granularity struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Granularity) Reset() { *m = Granularity{} } -func (m *Granularity) String() string { return proto.CompactTextString(m) } -func (*Granularity) ProtoMessage() {} -func (*Granularity) Descriptor() ([]byte, []int) { - return fileDescriptor_Granularity_4b57756f7a751fdb, []int{0} -} -func (m *Granularity) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Granularity.Unmarshal(m, b) -} -func (m *Granularity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Granularity.Marshal(b, m, deterministic) -} -func (dst *Granularity) XXX_Merge(src proto.Message) { - xxx_messageInfo_Granularity.Merge(dst, src) -} -func (m *Granularity) XXX_Size() int { - return xxx_messageInfo_Granularity.Size(m) -} -func (m *Granularity) XXX_DiscardUnknown() { - xxx_messageInfo_Granularity.DiscardUnknown(m) -} - -var xxx_messageInfo_Granularity proto.InternalMessageInfo - -func init() { - proto.RegisterType((*Granularity)(nil), "feast.types.Granularity") - proto.RegisterEnum("feast.types.Granularity_Enum", Granularity_Enum_name, Granularity_Enum_value) -} - -func init() { - proto.RegisterFile("feast/types/Granularity.proto", fileDescriptor_Granularity_4b57756f7a751fdb) -} - -var fileDescriptor_Granularity_4b57756f7a751fdb = []byte{ - // 183 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4b, 0x4d, 0x2c, - 0x2e, 0xd1, 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x77, 0x2f, 0x4a, 0xcc, 0x2b, 0xcd, 0x49, 0x2c, - 0xca, 0x2c, 0xa9, 0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x06, 0x4b, 0xeb, 0x81, 0xa5, - 0x95, 0xbc, 0xb8, 0xb8, 0x91, 0x54, 0x28, 0x59, 0x73, 0xb1, 0xb8, 0xe6, 0x95, 0xe6, 0x0a, 0x71, - 0x70, 0xb1, 0xf8, 0xf9, 0xfb, 0xb9, 0x0a, 0x30, 0x08, 0xb1, 0x73, 0x31, 0xbb, 0x38, 0x46, 0x0a, - 0x30, 0x82, 0x84, 0x3c, 0xfc, 0x43, 0x83, 0x04, 0x98, 0x84, 0xb8, 0xb8, 0xd8, 0x7c, 0x3d, 0xfd, - 0x42, 0x43, 0x5c, 0x05, 0x98, 0x41, 0xec, 0x60, 0x57, 0x67, 0x7f, 0x3f, 0x17, 0x01, 0x16, 0xa7, - 0x70, 0x2e, 0x64, 0xa3, 0x9d, 0x04, 0x90, 0x0c, 0x0e, 0x00, 0xd9, 0x1c, 0x65, 0x96, 0x9e, 0x59, - 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x9f, 0x9e, 0x9f, 0x95, 0x9a, 0xad, 0x0f, 0x71, - 0x29, 0xd8, 0x5d, 0xc5, 0xfa, 0xe9, 0xa9, 0x79, 0xa9, 0x45, 0x89, 0x25, 0xa9, 0x29, 0xfa, 0xe9, - 0xf9, 0xfa, 0x48, 0x7e, 0x48, 0x62, 0x03, 0x2b, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xe9, - 0x56, 0x0c, 0xf2, 0xd9, 0x00, 0x00, 0x00, -} diff --git a/protos/generated/go/feast/types/Value.pb.go b/protos/generated/go/feast/types/Value.pb.go index b9c26f8c4b..a7ef038e1d 100644 --- a/protos/generated/go/feast/types/Value.pb.go +++ b/protos/generated/go/feast/types/Value.pb.go @@ -1,12 +1,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: feast/types/Value.proto -package types // import "github.com/gojek/feast/protos/generated/go/feast/types" +package types -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -17,7 +19,7 @@ var _ = math.Inf // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type ValueType_Enum int32 @@ -44,6 +46,7 @@ var ValueType_Enum_name = map[int32]string{ 7: "BOOL", 8: "TIMESTAMP", } + var ValueType_Enum_value = map[string]int32{ "UNKNOWN": 0, "BYTES": 1, @@ -59,8 +62,9 @@ var ValueType_Enum_value = map[string]int32{ func (x ValueType_Enum) String() string { return proto.EnumName(ValueType_Enum_name, int32(x)) } + func (ValueType_Enum) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{0, 0} + return fileDescriptor_47c504407d284ecc, []int{0, 0} } type ValueType struct { @@ -73,16 +77,17 @@ func (m *ValueType) Reset() { *m = ValueType{} } func (m *ValueType) String() string { return proto.CompactTextString(m) } func (*ValueType) ProtoMessage() {} func (*ValueType) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{0} + return fileDescriptor_47c504407d284ecc, []int{0} } + func (m *ValueType) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValueType.Unmarshal(m, b) } func (m *ValueType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ValueType.Marshal(b, m, deterministic) } -func (dst *ValueType) XXX_Merge(src proto.Message) { - xxx_messageInfo_ValueType.Merge(dst, src) +func (m *ValueType) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueType.Merge(m, src) } func (m *ValueType) XXX_Size() int { return xxx_messageInfo_ValueType.Size(m) @@ -113,16 +118,17 @@ func (m *Value) Reset() { *m = Value{} } func (m *Value) String() string { return proto.CompactTextString(m) } func (*Value) ProtoMessage() {} func (*Value) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{1} + return fileDescriptor_47c504407d284ecc, []int{1} } + func (m *Value) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Value.Unmarshal(m, b) } func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Value.Marshal(b, m, deterministic) } -func (dst *Value) XXX_Merge(src proto.Message) { - xxx_messageInfo_Value.Merge(dst, src) +func (m *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(m, src) } func (m *Value) XXX_Size() int { return xxx_messageInfo_Value.Size(m) @@ -248,9 +254,9 @@ func (m *Value) GetTimestampVal() *timestamp.Timestamp { return nil } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ +// XXX_OneofWrappers is for the internal use of the proto package. +func (*Value) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*Value_BytesVal)(nil), (*Value_StringVal)(nil), (*Value_Int32Val)(nil), @@ -262,151 +268,6 @@ func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, } } -func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*Value) - // val - switch x := m.Val.(type) { - case *Value_BytesVal: - b.EncodeVarint(1<<3 | proto.WireBytes) - b.EncodeRawBytes(x.BytesVal) - case *Value_StringVal: - b.EncodeVarint(2<<3 | proto.WireBytes) - b.EncodeStringBytes(x.StringVal) - case *Value_Int32Val: - b.EncodeVarint(3<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Int32Val)) - case *Value_Int64Val: - b.EncodeVarint(4<<3 | proto.WireVarint) - b.EncodeVarint(uint64(x.Int64Val)) - case *Value_DoubleVal: - b.EncodeVarint(5<<3 | proto.WireFixed64) - b.EncodeFixed64(math.Float64bits(x.DoubleVal)) - case *Value_FloatVal: - b.EncodeVarint(6<<3 | proto.WireFixed32) - b.EncodeFixed32(uint64(math.Float32bits(x.FloatVal))) - case *Value_BoolVal: - t := uint64(0) - if x.BoolVal { - t = 1 - } - b.EncodeVarint(7<<3 | proto.WireVarint) - b.EncodeVarint(t) - case *Value_TimestampVal: - b.EncodeVarint(8<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampVal); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("Value.Val has unexpected type %T", x) - } - return nil -} - -func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*Value) - switch tag { - case 1: // val.bytesVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeRawBytes(true) - m.Val = &Value_BytesVal{x} - return true, err - case 2: // val.stringVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Val = &Value_StringVal{x} - return true, err - case 3: // val.int32Val - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_Int32Val{int32(x)} - return true, err - case 4: // val.int64Val - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_Int64Val{int64(x)} - return true, err - case 5: // val.doubleVal - if wire != proto.WireFixed64 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed64() - m.Val = &Value_DoubleVal{math.Float64frombits(x)} - return true, err - case 6: // val.floatVal - if wire != proto.WireFixed32 { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeFixed32() - m.Val = &Value_FloatVal{math.Float32frombits(uint32(x))} - return true, err - case 7: // val.boolVal - if wire != proto.WireVarint { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeVarint() - m.Val = &Value_BoolVal{x != 0} - return true, err - case 8: // val.timestampVal - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(timestamp.Timestamp) - err := b.DecodeMessage(msg) - m.Val = &Value_TimestampVal{msg} - return true, err - default: - return false, nil - } -} - -func _Value_OneofSizer(msg proto.Message) (n int) { - m := msg.(*Value) - // val - switch x := m.Val.(type) { - case *Value_BytesVal: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.BytesVal))) - n += len(x.BytesVal) - case *Value_StringVal: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(len(x.StringVal))) - n += len(x.StringVal) - case *Value_Int32Val: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(x.Int32Val)) - case *Value_Int64Val: - n += 1 // tag and wire - n += proto.SizeVarint(uint64(x.Int64Val)) - case *Value_DoubleVal: - n += 1 // tag and wire - n += 8 - case *Value_FloatVal: - n += 1 // tag and wire - n += 4 - case *Value_BoolVal: - n += 1 // tag and wire - n += 1 - case *Value_TimestampVal: - s := proto.Size(x.TimestampVal) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - type ValueList struct { // Types that are valid to be assigned to ValueList: // *ValueList_BytesList @@ -427,16 +288,17 @@ func (m *ValueList) Reset() { *m = ValueList{} } func (m *ValueList) String() string { return proto.CompactTextString(m) } func (*ValueList) ProtoMessage() {} func (*ValueList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{2} + return fileDescriptor_47c504407d284ecc, []int{2} } + func (m *ValueList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ValueList.Unmarshal(m, b) } func (m *ValueList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ValueList.Marshal(b, m, deterministic) } -func (dst *ValueList) XXX_Merge(src proto.Message) { - xxx_messageInfo_ValueList.Merge(dst, src) +func (m *ValueList) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueList.Merge(m, src) } func (m *ValueList) XXX_Size() int { return xxx_messageInfo_ValueList.Size(m) @@ -562,9 +424,9 @@ func (m *ValueList) GetTimestampList() *TimestampList { return nil } -// XXX_OneofFuncs is for the internal use of the proto package. -func (*ValueList) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _ValueList_OneofMarshaler, _ValueList_OneofUnmarshaler, _ValueList_OneofSizer, []interface{}{ +// XXX_OneofWrappers is for the internal use of the proto package. +func (*ValueList) XXX_OneofWrappers() []interface{} { + return []interface{}{ (*ValueList_BytesList)(nil), (*ValueList_StringList)(nil), (*ValueList_Int32List)(nil), @@ -576,180 +438,6 @@ func (*ValueList) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) err } } -func _ValueList_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*ValueList) - // valueList - switch x := m.ValueList.(type) { - case *ValueList_BytesList: - b.EncodeVarint(1<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.BytesList); err != nil { - return err - } - case *ValueList_StringList: - b.EncodeVarint(2<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.StringList); err != nil { - return err - } - case *ValueList_Int32List: - b.EncodeVarint(3<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Int32List); err != nil { - return err - } - case *ValueList_Int64List: - b.EncodeVarint(4<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Int64List); err != nil { - return err - } - case *ValueList_DoubleList: - b.EncodeVarint(5<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.DoubleList); err != nil { - return err - } - case *ValueList_FloatList: - b.EncodeVarint(6<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.FloatList); err != nil { - return err - } - case *ValueList_BoolList: - b.EncodeVarint(7<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.BoolList); err != nil { - return err - } - case *ValueList_TimestampList: - b.EncodeVarint(8<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.TimestampList); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("ValueList.ValueList has unexpected type %T", x) - } - return nil -} - -func _ValueList_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*ValueList) - switch tag { - case 1: // valueList.bytesList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(BytesList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_BytesList{msg} - return true, err - case 2: // valueList.stringList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(StringList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_StringList{msg} - return true, err - case 3: // valueList.int32List - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Int32List) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_Int32List{msg} - return true, err - case 4: // valueList.int64List - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(Int64List) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_Int64List{msg} - return true, err - case 5: // valueList.doubleList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(DoubleList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_DoubleList{msg} - return true, err - case 6: // valueList.floatList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(FloatList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_FloatList{msg} - return true, err - case 7: // valueList.boolList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(BoolList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_BoolList{msg} - return true, err - case 8: // valueList.timestampList - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(TimestampList) - err := b.DecodeMessage(msg) - m.ValueList = &ValueList_TimestampList{msg} - return true, err - default: - return false, nil - } -} - -func _ValueList_OneofSizer(msg proto.Message) (n int) { - m := msg.(*ValueList) - // valueList - switch x := m.ValueList.(type) { - case *ValueList_BytesList: - s := proto.Size(x.BytesList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_StringList: - s := proto.Size(x.StringList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_Int32List: - s := proto.Size(x.Int32List) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_Int64List: - s := proto.Size(x.Int64List) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_DoubleList: - s := proto.Size(x.DoubleList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_FloatList: - s := proto.Size(x.FloatList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_BoolList: - s := proto.Size(x.BoolList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case *ValueList_TimestampList: - s := proto.Size(x.TimestampList) - n += 1 // tag and wire - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - type BytesList struct { Val [][]byte `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -761,16 +449,17 @@ func (m *BytesList) Reset() { *m = BytesList{} } func (m *BytesList) String() string { return proto.CompactTextString(m) } func (*BytesList) ProtoMessage() {} func (*BytesList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{3} + return fileDescriptor_47c504407d284ecc, []int{3} } + func (m *BytesList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BytesList.Unmarshal(m, b) } func (m *BytesList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BytesList.Marshal(b, m, deterministic) } -func (dst *BytesList) XXX_Merge(src proto.Message) { - xxx_messageInfo_BytesList.Merge(dst, src) +func (m *BytesList) XXX_Merge(src proto.Message) { + xxx_messageInfo_BytesList.Merge(m, src) } func (m *BytesList) XXX_Size() int { return xxx_messageInfo_BytesList.Size(m) @@ -799,16 +488,17 @@ func (m *StringList) Reset() { *m = StringList{} } func (m *StringList) String() string { return proto.CompactTextString(m) } func (*StringList) ProtoMessage() {} func (*StringList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{4} + return fileDescriptor_47c504407d284ecc, []int{4} } + func (m *StringList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StringList.Unmarshal(m, b) } func (m *StringList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StringList.Marshal(b, m, deterministic) } -func (dst *StringList) XXX_Merge(src proto.Message) { - xxx_messageInfo_StringList.Merge(dst, src) +func (m *StringList) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringList.Merge(m, src) } func (m *StringList) XXX_Size() int { return xxx_messageInfo_StringList.Size(m) @@ -837,16 +527,17 @@ func (m *Int32List) Reset() { *m = Int32List{} } func (m *Int32List) String() string { return proto.CompactTextString(m) } func (*Int32List) ProtoMessage() {} func (*Int32List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{5} + return fileDescriptor_47c504407d284ecc, []int{5} } + func (m *Int32List) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Int32List.Unmarshal(m, b) } func (m *Int32List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Int32List.Marshal(b, m, deterministic) } -func (dst *Int32List) XXX_Merge(src proto.Message) { - xxx_messageInfo_Int32List.Merge(dst, src) +func (m *Int32List) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int32List.Merge(m, src) } func (m *Int32List) XXX_Size() int { return xxx_messageInfo_Int32List.Size(m) @@ -875,16 +566,17 @@ func (m *Int64List) Reset() { *m = Int64List{} } func (m *Int64List) String() string { return proto.CompactTextString(m) } func (*Int64List) ProtoMessage() {} func (*Int64List) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{6} + return fileDescriptor_47c504407d284ecc, []int{6} } + func (m *Int64List) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Int64List.Unmarshal(m, b) } func (m *Int64List) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Int64List.Marshal(b, m, deterministic) } -func (dst *Int64List) XXX_Merge(src proto.Message) { - xxx_messageInfo_Int64List.Merge(dst, src) +func (m *Int64List) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64List.Merge(m, src) } func (m *Int64List) XXX_Size() int { return xxx_messageInfo_Int64List.Size(m) @@ -913,16 +605,17 @@ func (m *DoubleList) Reset() { *m = DoubleList{} } func (m *DoubleList) String() string { return proto.CompactTextString(m) } func (*DoubleList) ProtoMessage() {} func (*DoubleList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{7} + return fileDescriptor_47c504407d284ecc, []int{7} } + func (m *DoubleList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DoubleList.Unmarshal(m, b) } func (m *DoubleList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DoubleList.Marshal(b, m, deterministic) } -func (dst *DoubleList) XXX_Merge(src proto.Message) { - xxx_messageInfo_DoubleList.Merge(dst, src) +func (m *DoubleList) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleList.Merge(m, src) } func (m *DoubleList) XXX_Size() int { return xxx_messageInfo_DoubleList.Size(m) @@ -951,16 +644,17 @@ func (m *FloatList) Reset() { *m = FloatList{} } func (m *FloatList) String() string { return proto.CompactTextString(m) } func (*FloatList) ProtoMessage() {} func (*FloatList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{8} + return fileDescriptor_47c504407d284ecc, []int{8} } + func (m *FloatList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FloatList.Unmarshal(m, b) } func (m *FloatList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FloatList.Marshal(b, m, deterministic) } -func (dst *FloatList) XXX_Merge(src proto.Message) { - xxx_messageInfo_FloatList.Merge(dst, src) +func (m *FloatList) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatList.Merge(m, src) } func (m *FloatList) XXX_Size() int { return xxx_messageInfo_FloatList.Size(m) @@ -989,16 +683,17 @@ func (m *BoolList) Reset() { *m = BoolList{} } func (m *BoolList) String() string { return proto.CompactTextString(m) } func (*BoolList) ProtoMessage() {} func (*BoolList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{9} + return fileDescriptor_47c504407d284ecc, []int{9} } + func (m *BoolList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BoolList.Unmarshal(m, b) } func (m *BoolList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BoolList.Marshal(b, m, deterministic) } -func (dst *BoolList) XXX_Merge(src proto.Message) { - xxx_messageInfo_BoolList.Merge(dst, src) +func (m *BoolList) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoolList.Merge(m, src) } func (m *BoolList) XXX_Size() int { return xxx_messageInfo_BoolList.Size(m) @@ -1027,16 +722,17 @@ func (m *TimestampList) Reset() { *m = TimestampList{} } func (m *TimestampList) String() string { return proto.CompactTextString(m) } func (*TimestampList) ProtoMessage() {} func (*TimestampList) Descriptor() ([]byte, []int) { - return fileDescriptor_Value_0680a2f024df1112, []int{10} + return fileDescriptor_47c504407d284ecc, []int{10} } + func (m *TimestampList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TimestampList.Unmarshal(m, b) } func (m *TimestampList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_TimestampList.Marshal(b, m, deterministic) } -func (dst *TimestampList) XXX_Merge(src proto.Message) { - xxx_messageInfo_TimestampList.Merge(dst, src) +func (m *TimestampList) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampList.Merge(m, src) } func (m *TimestampList) XXX_Size() int { return xxx_messageInfo_TimestampList.Size(m) @@ -1055,6 +751,7 @@ func (m *TimestampList) GetVal() []*timestamp.Timestamp { } func init() { + proto.RegisterEnum("feast.types.ValueType_Enum", ValueType_Enum_name, ValueType_Enum_value) proto.RegisterType((*ValueType)(nil), "feast.types.ValueType") proto.RegisterType((*Value)(nil), "feast.types.Value") proto.RegisterType((*ValueList)(nil), "feast.types.ValueList") @@ -1066,12 +763,11 @@ func init() { proto.RegisterType((*FloatList)(nil), "feast.types.FloatList") proto.RegisterType((*BoolList)(nil), "feast.types.BoolList") proto.RegisterType((*TimestampList)(nil), "feast.types.TimestampList") - proto.RegisterEnum("feast.types.ValueType_Enum", ValueType_Enum_name, ValueType_Enum_value) } -func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_Value_0680a2f024df1112) } +func init() { proto.RegisterFile("feast/types/Value.proto", fileDescriptor_47c504407d284ecc) } -var fileDescriptor_Value_0680a2f024df1112 = []byte{ +var fileDescriptor_47c504407d284ecc = []byte{ // 626 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xd1, 0x6f, 0x9a, 0x50, 0x14, 0xc6, 0xb9, 0x22, 0x0a, 0xc7, 0x36, 0x21, 0x37, 0xd9, 0xda, 0x34, 0x6d, 0x47, 0x7c, 0xe2, diff --git a/sdk/python/feast/sdk/client.py b/sdk/python/feast/sdk/client.py index 59b4ddd9f6..1d835d37b5 100644 --- a/sdk/python/feast/sdk/client.py +++ b/sdk/python/feast/sdk/client.py @@ -35,6 +35,7 @@ from feast.sdk.resources.storage import Storage from feast.sdk.utils.bq_util import TableDownloader from feast.sdk.utils.print_utils import spec_to_yaml +from feast.sdk.utils import types from feast.serving.Serving_pb2 import QueryFeaturesRequest from feast.serving.Serving_pb2_grpc import ServingAPIStub @@ -77,9 +78,11 @@ def core_url(self): self._core_url = os.getenv(FEAST_CORE_URL_ENV_KEY) if self._core_url is None: raise ValueError( - "Core API URL not set. Either set the " + - "environment variable {} or set it explicitly.".format( - FEAST_CORE_URL_ENV_KEY)) + "Core API URL not set. Either set the " + + "environment variable {} or set it explicitly.".format( + FEAST_CORE_URL_ENV_KEY + ) + ) return self._core_url @core_url.setter @@ -92,9 +95,11 @@ def serving_url(self): self._serving_url = os.getenv(FEAST_SERVING_URL_ENV_KEY) if self._serving_url is None: raise ValueError( - "Serving API URL not set. Either set the " + - "environment variable {} or set it explicitly.".format( - FEAST_SERVING_URL_ENV_KEY)) + "Serving API URL not set. Either set the " + + "environment variable {} or set it explicitly.".format( + FEAST_SERVING_URL_ENV_KEY + ) + ) return self._serving_url @serving_url.setter @@ -128,11 +133,9 @@ def apply(self, obj): else: return self._apply(obj) - def run(self, - importer, - name_override=None, - apply_entity=False, - apply_features=False): + def run( + self, importer, name_override=None, apply_entity=False, apply_features=False + ): """ Run an import job Args: @@ -146,8 +149,7 @@ def run(self, Returns: (str) job ID of the import job """ - request = JobServiceTypes.SubmitImportJobRequest( - importSpec=importer.spec) + request = JobServiceTypes.SubmitImportJobRequest(importSpec=importer.spec) if name_override is not None: request.name = name_override @@ -158,22 +160,17 @@ def run(self, self._apply_feature(importer.features[feature]) if importer.require_staging: - print("Staging file to remote path {}".format( - importer.remote_path)) + print("Staging file to remote path {}".format(importer.remote_path)) importer.stage() - print("Submitting job with spec:\n {}".format( - spec_to_yaml(importer.spec))) + print("Submitting job with spec:\n {}".format(spec_to_yaml(importer.spec))) self._connect_core() response = self._job_service_stub.SubmitJob(request) print("Submitted job with id: {}".format(response.jobId)) return response.jobId - def create_dataset(self, - feature_set, - start_date, - end_date, - limit=None, - name_prefix=None): + def create_dataset( + self, feature_set, start_date, end_date, limit=None, name_prefix=None + ): """ Create training dataset for a feature set. The training dataset will be bounded by event timestamp between start_date and end_date. @@ -194,24 +191,28 @@ def create_dataset(self, feast.resources.feature_set.DatasetInfo: DatasetInfo containing the information of training dataset """ - self._check_create_dataset_args(feature_set, start_date, end_date, - limit) + self._check_create_dataset_args(feature_set, start_date, end_date, limit) req = DatasetServiceTypes.CreateDatasetRequest( featureSet=feature_set.proto, startDate=_timestamp_from_datetime(_parse_date(start_date)), endDate=_timestamp_from_datetime(_parse_date(end_date)), limit=limit, - namePrefix=name_prefix) + namePrefix=name_prefix, + ) if self.verbose: - print("creating training dataset for features: " + - str(feature_set.features)) + print( + "creating training dataset for features: " + str(feature_set.features) + ) self._connect_core() resp = self._dataset_service_stub.CreateDataset(req) if self.verbose: - print("created dataset {}: {}".format(resp.datasetInfo.name, - resp.datasetInfo.tableUrl)) + print( + "created dataset {}: {}".format( + resp.datasetInfo.name, resp.datasetInfo.tableUrl + ) + ) return DatasetInfo(resp.datasetInfo.name, resp.datasetInfo.tableUrl) def get_serving_data(self, feature_set, entity_keys, ts_range=None): @@ -245,14 +246,12 @@ def get_serving_data(self, feature_set, entity_keys, ts_range=None): request = self._build_serving_request(feature_set, entity_keys) self._connect_serving() return self._response_to_df( - feature_set, self._serving_service_stub.QueryFeatures(request), - start, end) - - def download_dataset(self, - dataset_info, - dest, - staging_location, - file_type=FileType.CSV): + feature_set, self._serving_service_stub.QueryFeatures(request), start, end + ) + + def download_dataset( + self, dataset_info, dest, staging_location, file_type=FileType.CSV + ): """ Download training dataset as file Args: @@ -267,7 +266,8 @@ def download_dataset(self, str: path to the downloaded file """ return self._table_downloader.download_table_as_file( - dataset_info.table_id, dest, staging_location, file_type) + dataset_info.table_id, dest, staging_location, file_type + ) def download_dataset_to_df(self, dataset_info, staging_location): """ @@ -282,7 +282,8 @@ def download_dataset_to_df(self, dataset_info, staging_location): """ return self._table_downloader.download_table_as_df( - dataset_info.table_id, staging_location) + dataset_info.table_id, staging_location + ) def close(self): """ @@ -299,8 +300,7 @@ def _connect_core(self): self.__core_channel = grpc.insecure_channel(self.core_url) self._core_service_stub = CoreServiceStub(self.__core_channel) self._job_service_stub = JobServiceStub(self.__core_channel) - self._dataset_service_stub = DatasetServiceStub( - self.__core_channel) + self._dataset_service_stub = DatasetServiceStub(self.__core_channel) def _connect_serving(self): """Connect to serving api""" @@ -313,11 +313,13 @@ def _build_serving_request(self, feature_set, entity_keys): return QueryFeaturesRequest( entityName=feature_set.entity, entityId=entity_keys, - featureId=feature_set.features) + featureId=feature_set.features, + ) def _response_to_df(self, feature_set, response, start=None, end=None): is_filter_time = start is not None and end is not None df = pd.DataFrame(columns=[feature_set.entity] + feature_set.features) + dtypes = {} for entity_id in response.entities: feature_map = response.entities[entity_id].features row = {response.entityName: entity_id} @@ -327,10 +329,13 @@ def _response_to_df(self, feature_set, response, start=None, end=None): ts = feature_map[feature_id].timestamp.ToDatetime() if ts < start or ts > end: continue + feast_valuetype = v.WhichOneof("val") + if feast_valuetype not in dtypes: + dtypes[feature_id] = types.FEAST_VALUETYPE_TO_DTYPE[feast_valuetype] v = getattr(v, v.WhichOneof("val")) row[feature_id] = v df = df.append(row, ignore_index=True) - return df.reset_index(drop=True) + return df.astype(dtypes).reset_index(drop=True) def _apply(self, obj): """Applies a single object to feast core. @@ -348,8 +353,10 @@ def _apply(self, obj): elif isinstance(obj, Storage): return self._apply_storage(obj) else: - raise TypeError('Apply can only be passed one of the following \ - types: [Feature, Entity, FeatureGroup, Storage, Importer]') + raise TypeError( + "Apply can only be passed one of the following \ + types: [Feature, Entity, FeatureGroup, Storage, Importer]" + ) def _apply_feature(self, feature): """Apply the feature to the core API @@ -360,8 +367,11 @@ def _apply_feature(self, feature): self._connect_core() response = self._core_service_stub.ApplyFeature(feature.spec) if self.verbose: - print("Successfully applied feature with id: {}\n---\n{}".format( - response.featureId, feature)) + print( + "Successfully applied feature with id: {}\n---\n{}".format( + response.featureId, feature + ) + ) return response.featureId def _apply_entity(self, entity): @@ -373,8 +383,11 @@ def _apply_entity(self, entity): self._connect_core() response = self._core_service_stub.ApplyEntity(entity.spec) if self.verbose: - print("Successfully applied entity with name: {}\n---\n{}".format( - response.entityName, entity)) + print( + "Successfully applied entity with name: {}\n---\n{}".format( + response.entityName, entity + ) + ) return response.entityName def _apply_feature_group(self, feature_group): @@ -385,11 +398,12 @@ def _apply_feature_group(self, feature_group): feature group to apply """ self._connect_core() - response = self._core_service_stub.ApplyFeatureGroup( - feature_group.spec) + response = self._core_service_stub.ApplyFeatureGroup(feature_group.spec) if self.verbose: - print("Successfully applied feature group with id: " + - "{}\n---\n{}".format(response.featureGroupId, feature_group)) + print( + "Successfully applied feature group with id: " + + "{}\n---\n{}".format(response.featureGroupId, feature_group) + ) return response.featureGroupId def _apply_storage(self, storage): @@ -401,12 +415,13 @@ def _apply_storage(self, storage): self._connect_core() response = self._core_service_stub.ApplyStorage(storage.spec) if self.verbose: - print("Successfully applied storage with id: " + - "{}\n{}".format(response.storageId, storage)) + print( + "Successfully applied storage with id: " + + "{}\n{}".format(response.storageId, storage) + ) return response.storageId - def _check_create_dataset_args(self, feature_set, start_date, end_date, - limit): + def _check_create_dataset_args(self, feature_set, start_date, end_date, limit): if len(feature_set.features) < 1: raise ValueError("feature set is empty") diff --git a/sdk/python/feast/sdk/utils/bq_util.py b/sdk/python/feast/sdk/utils/bq_util.py index 8cbc4b090b..b52dee92ca 100644 --- a/sdk/python/feast/sdk/utils/bq_util.py +++ b/sdk/python/feast/sdk/utils/bq_util.py @@ -13,19 +13,25 @@ # limitations under the License. import os +import tempfile import time - +from datetime import datetime +import pytz +import fastavro import pandas as pd +from google.cloud import bigquery from google.cloud.bigquery.client import Client as BQClient from google.cloud.bigquery.job import ExtractJobConfig, DestinationFormat from google.cloud.bigquery.table import Table +from google.cloud.exceptions import NotFound from google.cloud.storage import Client as GCSClient +from google.cloud import storage from feast.sdk.utils.gs_utils import is_gs_path, split_gs_path, gcs_to_df def head(client, table, max_rows=10): - '''Get the head of the table. Retrieves rows from the given table at + """Get the head of the table. Retrieves rows from the given table at minimum cost Args: @@ -37,12 +43,13 @@ def head(client, table, max_rows=10): Returns: pandas.DataFrame: dataframe containing the head of rows - ''' + """ rows = client.list_rows(table, max_results=max_rows) rows = [x for x in rows] return pd.DataFrame( - data=[list(x.values()) for x in rows], columns=list(rows[0].keys())) + data=[list(x.values()) for x in rows], columns=list(rows[0].keys()) + ) def get_table_name(feature_id, storage_spec): @@ -71,6 +78,108 @@ def get_table_name(feature_id, storage_spec): return ".".join([project, dataset, table_name]) +def get_default_templocation(bigquery_client, project=None): + if project is None: + project = bigquery_client.project + assert isinstance(project, str) + assert len(project) > 0 + storage_client = storage.Client() + default_bucket_name = f"feast-templocation-{project}" + try: + storage_client.get_bucket(default_bucket_name) + except NotFound: + print( + f'Default bucket "{default_bucket_name}" not found. Attempting to create it.' + ) + storage_client.create_bucket(bucket_name=default_bucket_name, project=project) + return f"gs://{default_bucket_name}" + + +def query_to_dataframe( + query: str, + bigquery_client: bigquery.Client = None, + storage_client: storage.Client = None, + project: str = None, + templocation: str = None, +) -> pd.DataFrame: + """ + Run a query job on BigQuery and return the result in Pandas DataFrame format + + Args: + query: BigQuery query e.g. "SELECT * FROM dataset.table" + bigquery_client: + storage_client: + project: Google Cloud project id + templocation: Google Cloud Storage location to store intermediate files, must start with "gs://" + + Returns: Pandas DataFrame of the query result + + """ + if isinstance(templocation, str) and not templocation.startswith("gs://"): + raise RuntimeError('templocation must start with "gs://"') + + if bigquery_client is None: + bigquery_client = bigquery.Client(project=project) + + if project is None: + project = bigquery_client.project + + query_job = bigquery_client.query(query, project=project) + query_job_state = "" + + while not query_job.done(): + if query_job.state != query_job_state: + print(f"Query status: {query_job.state}") + query_job_state = query_job.state + time.sleep(5) + + if query_job.state != query_job_state: + print(f"Query status: {query_job.state}") + + if query_job.exception(): + raise query_job.exception() + + if not templocation: + templocation = get_default_templocation(bigquery_client, project=project) + + if templocation.endswith("/"): + templocation += templocation[:-1] + + destination_uri = ( + f"{templocation}/bq-{datetime.now(pytz.utc).strftime('%Y%m%dT%H%M%SZ')}.avro" + ) + extract_job_config = bigquery.job.ExtractJobConfig(destination_format="AVRO") + extract_job = bigquery_client.extract_table( + query_job.destination, destination_uri, job_config=extract_job_config + ) + + while not extract_job.done(): + time.sleep(5) + + if extract_job.exception(): + raise extract_job.exception() + + if not storage_client: + storage_client = storage.Client(project=project) + + print("Reading query result into DataFrame") + + bucket_name, blob_name = ( + destination_uri.split("/")[2], + "/".join(destination_uri.split("/")[3:]), + ) + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.get_blob(blob_name) + downloaded_avro_filename = tempfile.NamedTemporaryFile().name + blob.download_to_filename(downloaded_avro_filename) + + with open(downloaded_avro_filename, "rb") as avro_file: + avro_reader = fastavro.reader(avro_file) + df = pd.DataFrame.from_records(avro_reader) + + return df + + class TableDownloader: def __init__(self): self._bq = None @@ -88,8 +197,7 @@ def bq(self): self._bq = BQClient() return self._bq - def download_table_as_file(self, table_id, dest, staging_location, - file_type): + def download_table_as_file(self, table_id, dest, staging_location, file_type): """ Download a bigquery table as file Args: @@ -105,14 +213,13 @@ def download_table_as_file(self, table_id, dest, staging_location, if not is_gs_path(staging_location): raise ValueError("staging_uri must be a directory in GCS") - temp_file_name = 'temp_{}'.format(int(round(time.time() * 1000))) + temp_file_name = "temp_{}".format(int(round(time.time() * 1000))) staging_file_path = os.path.join(staging_location, temp_file_name) job_config = ExtractJobConfig() job_config.destination_format = file_type src_table = Table.from_string(table_id) - job = self.bq.extract_table( - src_table, staging_file_path, job_config=job_config) + job = self.bq.extract_table(src_table, staging_file_path, job_config=job_config) # await completion job.result() @@ -137,15 +244,14 @@ def download_table_as_df(self, table_id, staging_location): if not is_gs_path(staging_location): raise ValueError("staging_uri must be a directory in GCS") - temp_file_name = 'temp_{}'.format(int(round(time.time() * 1000))) + temp_file_name = "temp_{}".format(int(round(time.time() * 1000))) staging_file_path = os.path.join(staging_location, temp_file_name) job_config = ExtractJobConfig() job_config.destination_format = DestinationFormat.CSV job = self.bq.extract_table( - Table.from_string(table_id), - staging_file_path, - job_config=job_config) + Table.from_string(table_id), staging_file_path, job_config=job_config + ) # await completion job.result() diff --git a/sdk/python/feast/sdk/utils/types.py b/sdk/python/feast/sdk/utils/types.py index cf72425c25..21752c8e24 100644 --- a/sdk/python/feast/sdk/utils/types.py +++ b/sdk/python/feast/sdk/utils/types.py @@ -13,9 +13,10 @@ # limitations under the License. from feast.sdk.resources.feature import ValueType +import numpy as np # mapping of pandas dtypes to feast value type strings -_DTYPE_TO_VALUE_TYPE_MAPPING = { +DTYPE_TO_VALUE_TYPE_MAPPING = { "float64": ValueType.DOUBLE, "float32": ValueType.FLOAT, "int64": ValueType.INT64, @@ -32,6 +33,20 @@ "object": ValueType.STRING } +# Mapping of feast value type to Pandas DataFrame dtypes +# Integer and floating values are all 64-bit for better integration +# with BigQuery data types +FEAST_VALUETYPE_TO_DTYPE = { + "bytesVal": np.byte, + "stringVal": np.object, + "int32Val": np.int64, + "int64Val": np.int64, + "doubleVal": np.float64, + "floatVal": np.float64, + "boolVal": np.bool, + "timestampVal": np.datetime64, +} + def dtype_to_value_type(dtype): """Returns the equivalent feast valueType for the given dtype @@ -42,4 +57,4 @@ def dtype_to_value_type(dtype): Returns: feast.types.ValueType2.ValueType: equivalent feast valuetype """ - return _DTYPE_TO_VALUE_TYPE_MAPPING[dtype.__str__()] + return DTYPE_TO_VALUE_TYPE_MAPPING[dtype.__str__()] diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 6e3fe62f6c..b07157bc3f 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -1,11 +1,11 @@ # Copyright 2018 The Feast Authors -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # https://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -16,23 +16,23 @@ import os from setuptools import find_packages, setup, Command -NAME = 'Feast' -DESCRIPTION = 'Python sdk for Feast' -URL = 'https://github.com/gojek/feast' -AUTHOR = 'Feast' -REQUIRES_PYTHON = '>=3.6.0' -VERSION = imp.load_source( - 'feast.version', os.path.join('feast', 'version.py')).VERSION +NAME = "Feast" +DESCRIPTION = "Python sdk for Feast" +URL = "https://github.com/gojek/feast" +AUTHOR = "Feast" +REQUIRES_PYTHON = ">=3.6.0" +VERSION = imp.load_source("feast.version", os.path.join("feast", "version.py")).VERSION REQUIRED = [ - 'google-api-core>=1.7.0', - 'google-auth>=1.6.0', - 'google-cloud-bigquery>=1.8.0', - 'google-cloud-storage>=1.13.0', - 'googleapis-common-protos>=1.5.5', - 'grpcio>=1.16.1', - 'pandas', - 'protobuf>=3.0.0', - 'PyYAML', + "google-api-core>=1.7.0", + "google-auth>=1.6.0", + "google-cloud-bigquery>=1.8.0", + "google-cloud-storage>=1.13.0", + "googleapis-common-protos>=1.5.5", + "grpcio>=1.16.1", + "pandas", + "protobuf>=3.0.0", + "PyYAML", + "fastavro>=0.21.19" ] setup( @@ -42,16 +42,16 @@ author=AUTHOR, python_requires=REQUIRES_PYTHON, url=URL, - packages=find_packages(exclude=('tests',)), + packages=find_packages(exclude=("tests",)), install_requires=REQUIRED, include_package_data=True, - license='Apache', + license="Apache", classifiers=[ # Trove classifiers # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - ] -) \ No newline at end of file + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + ], +) diff --git a/sdk/python/tests/data/austin_bikeshare.bikeshare_stations.avro b/sdk/python/tests/data/austin_bikeshare.bikeshare_stations.avro new file mode 100644 index 0000000000..0a44e3a365 Binary files /dev/null and b/sdk/python/tests/data/austin_bikeshare.bikeshare_stations.avro differ diff --git a/sdk/python/tests/sdk/utils/test_bq_utils.py b/sdk/python/tests/sdk/utils/test_bq_utils.py index 206180197c..bc5fd72f18 100644 --- a/sdk/python/tests/sdk/utils/test_bq_utils.py +++ b/sdk/python/tests/sdk/utils/test_bq_utils.py @@ -14,13 +14,18 @@ import os +import fastavro +import pandas as pd import pytest from google.cloud.bigquery.table import Table +from google.cloud.exceptions import NotFound from feast.sdk.resources.feature_set import FileType -from feast.sdk.utils.bq_util import TableDownloader, get_table_name +from feast.sdk.utils.bq_util import TableDownloader, get_table_name, query_to_dataframe from feast.specs.StorageSpec_pb2 import StorageSpec +testdata_path = os.path.abspath(os.path.join(__file__, "..", "..", "..", "data")) + def test_get_table_name(): project_name = "my_project" @@ -29,27 +34,53 @@ def test_get_table_name(): storage_spec = StorageSpec( id="BIGQUERY1", type="bigquery", - options={ - "project": project_name, - "dataset": dataset_name - }) - assert get_table_name(feature_id, storage_spec) == \ - "my_project.my_dataset.myentity_none" + options={"project": project_name, "dataset": dataset_name}, + ) + assert ( + get_table_name(feature_id, storage_spec) + == "my_project.my_dataset.myentity_none" + ) def test_get_table_name_not_bq(): feature_id = "myentity.feature1" storage_spec = StorageSpec(id="REDIS1", type="redis") - with pytest.raises( - ValueError, match="storage spec is not BigQuery storage spec"): + with pytest.raises(ValueError, match="storage spec is not BigQuery storage spec"): get_table_name(feature_id, storage_spec) +@pytest.mark.skipif( + os.getenv("SKIP_BIGQUERY_TEST") is not None, + reason="SKIP_BIGQUERY_TEST is set in the environment", +) +def test_query_to_dataframe(): + with open( + os.path.join(testdata_path, "austin_bikeshare.bikeshare_stations.avro"), "rb" + ) as expected_file: + avro_reader = fastavro.reader(expected_file) + expected = pd.DataFrame.from_records(avro_reader) + + query = "SELECT * FROM `bigquery-public-data.austin_bikeshare.bikeshare_stations`" + actual = query_to_dataframe(query) + assert expected.equals(actual) + + +@pytest.mark.skipif( + os.getenv("SKIP_BIGQUERY_TEST") is not None, + reason="SKIP_BIGQUERY_TEST is set in the environment", +) +def test_query_to_dataframe_for_non_existing_dataset(): + query = "SELECT * FROM `bigquery-public-data.this_dataset_should_not_exists.bikeshare_stations`" + with pytest.raises(NotFound): + query_to_dataframe(query) + + class TestTableDownloader(object): def test_download_table_as_df(self, mocker): self._stop_time(mocker) mocked_gcs_to_df = mocker.patch( - "feast.sdk.utils.bq_util.gcs_to_df", return_value=None) + "feast.sdk.utils.bq_util.gcs_to_df", return_value=None + ) staging_path = "gs://temp/" staging_file_name = "temp_0" @@ -59,19 +90,15 @@ def test_download_table_as_df(self, mocker): exp_staging_path = os.path.join(staging_path, staging_file_name) table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object( - table_dldr._bq, "extract_table", return_value=_Job()) + mocker.patch.object(table_dldr._bq, "extract_table", return_value=_Job()) - table_dldr.download_table_as_df( - table_id, staging_location=staging_path) + table_dldr.download_table_as_df(table_id, staging_location=staging_path) assert len(table_dldr._bq.extract_table.call_args_list) == 1 - args, kwargs = \ - table_dldr._bq.extract_table.call_args_list[0] - assert args[0].full_table_id == Table.from_string( - table_id).full_table_id + args, kwargs = table_dldr._bq.extract_table.call_args_list[0] + assert args[0].full_table_id == Table.from_string(table_id).full_table_id assert args[1] == exp_staging_path - assert kwargs['job_config'].destination_format == "CSV" + assert kwargs["job_config"].destination_format == "CSV" mocked_gcs_to_df.assert_called_once_with(exp_staging_path) def test_download_csv(self, mocker): @@ -90,14 +117,15 @@ def test_download_invalid_staging_url(self): table_id = "project_id.dataset_id.table_id" table_dldr = TableDownloader() with pytest.raises( - ValueError, match="staging_uri must be a directory in " - "GCS"): - table_dldr.download_table_as_file(table_id, "/tmp/dst", - "/local/directory", FileType.CSV) + ValueError, match="staging_uri must be a directory in " "GCS" + ): + table_dldr.download_table_as_file( + table_id, "/tmp/dst", "/local/directory", FileType.CSV + ) with pytest.raises( - ValueError, match="staging_uri must be a directory in " - "GCS"): + ValueError, match="staging_uri must be a directory in " "GCS" + ): table_dldr.download_table_as_df(table_id, "/local/directory") def _test_download_file(self, mocker, type): @@ -110,28 +138,27 @@ def _test_download_file(self, mocker, type): mock_blob = _Blob() mocker.patch.object(mock_blob, "download_to_filename") table_dldr._bq = _Mock_BQ_Client() - mocker.patch.object( - table_dldr._bq, "extract_table", return_value=_Job()) + mocker.patch.object(table_dldr._bq, "extract_table", return_value=_Job()) table_dldr._gcs = _Mock_GCS_Client() mocker.patch.object( - table_dldr._gcs, "get_bucket", return_value=_Bucket(mock_blob)) + table_dldr._gcs, "get_bucket", return_value=_Bucket(mock_blob) + ) table_dldr.download_table_as_file( - table_id, dst_path, staging_location=staging_path, file_type=type) + table_id, dst_path, staging_location=staging_path, file_type=type + ) exp_staging_path = os.path.join(staging_path, staging_file_name) assert len(table_dldr._bq.extract_table.call_args_list) == 1 - args, kwargs = \ - table_dldr._bq.extract_table.call_args_list[0] - assert args[0].full_table_id == Table.from_string( - table_id).full_table_id + args, kwargs = table_dldr._bq.extract_table.call_args_list[0] + assert args[0].full_table_id == Table.from_string(table_id).full_table_id assert args[1] == exp_staging_path - assert kwargs['job_config'].destination_format == str(type) + assert kwargs["job_config"].destination_format == str(type) mock_blob.download_to_filename.assert_called_once_with(dst_path) def _stop_time(self, mocker): - mocker.patch('time.time', return_value=0) + mocker.patch("time.time", return_value=0) class _Mock_BQ_Client: diff --git a/testing/Makefile b/testing/Makefile index 0eef0abab9..0ff05f827f 100644 --- a/testing/Makefile +++ b/testing/Makefile @@ -32,6 +32,6 @@ update-config: get-cluster-credentials update-plugins: get-cluster-credentials kubectl create configmap plugins --from-file=plugins.yaml=prow/plugins.yaml --dry-run -o yaml | kubectl replace configmap plugins -f - -build-push: - docker build test-image/ -t $(REGISTRY)/$(PROJECT)/test-image:$(VERSION) +build-push-test-image: + docker build -f docker-images/test-image/Dockerfile -t $(REGISTRY)/$(PROJECT)/test-image:$(VERSION) . $(PUSH) "$(REGISTRY)/$(PROJECT)/test-image:$(VERSION)" \ No newline at end of file diff --git a/testing/README.md b/testing/README.md index c345c46e75..df38d34a67 100644 --- a/testing/README.md +++ b/testing/README.md @@ -4,9 +4,9 @@ This folder contains the Feast test infrastructure. ## Components -* test-image/ - Base docker image and script for running tests. +* docker-images/ - Docker images for running tests * prow/ - Prow configuration (plugins and jobs) -* tf/ - Terraform modules to provision the base testing infrastructure on GCP +* infrastructure/ - Terraform modules to provision the base testing infrastructure on GCP ## Set up @@ -23,7 +23,7 @@ mv tf/gcs terraform import google_storage_bucket.kf-feast-terraform-state kf-feast-terraform-state ``` -4. Ensure that all variables are set correctly in `tf/terraform.tfvars`. It is likely that the GCP project will need to be updated. +4. Ensure that all variables are set correctly in `infrastructure/terraform.tfvars`. It is likely that the GCP project will need to be updated. 5. Create the primary Kubernetes cluster which will host Prow and Argo @@ -51,7 +51,11 @@ To update Prow jobs, plugins, or the Docker image used for testing, modify one o - `prow/config.yaml` - `prow/plugins.yaml` -- `test-image/Dockerfile` -- `test-image/run.sh` +- `docker-images/test-image/Dockerfile` +- `docker-images/test-image/run.sh` -After making modifications, run `make`. This will update the Prow configuration, build a new test image, and push it to the container registry. \ No newline at end of file +After making modifications, run `make`. This will update the Prow configuration, build a new test image, and push it to the container registry. + +## Installing Argo + +Argo is used to run integration tests. It follows the [standard installation process described in the argo quickstart](https://github.com/argoproj/argo/blob/master/demo.md), with the artifact repository [configured to use gcs](https://github.com/argoproj/argo/blob/master/ARTIFACT_REPO.md). \ No newline at end of file diff --git a/testing/docker-images/docker-builder-image/Dockerfile b/testing/docker-images/docker-builder-image/Dockerfile new file mode 100644 index 0000000000..45f2ce67b1 --- /dev/null +++ b/testing/docker-images/docker-builder-image/Dockerfile @@ -0,0 +1,25 @@ +FROM docker:18.09 + +# from https://github.com/GoogleCloudPlatform/cloud-sdk-docker/blob/master/alpine/Dockerfile +ENV CLOUD_SDK_VERSION=236.0.0 + +ENV PATH /google-cloud-sdk/bin:$PATH +RUN apk --no-cache add \ + curl \ + python \ + py-crcmod \ + bash \ + libc6-compat \ + openssh-client \ + git \ + gnupg \ + && curl -O https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ + tar xzf google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ + rm google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ + ln -s /lib /lib64 && \ + gcloud config set core/disable_usage_reporting true && \ + gcloud config set component_manager/disable_update_check true && \ + gcloud config set metrics/environment github_docker_image && \ + gcloud --version + +RUN gcloud auth configure-docker diff --git a/testing/docker-images/gcloud-argo/Dockerfile b/testing/docker-images/gcloud-argo/Dockerfile new file mode 100644 index 0000000000..d84f56ea22 --- /dev/null +++ b/testing/docker-images/gcloud-argo/Dockerfile @@ -0,0 +1,15 @@ +FROM debian:stretch + +RUN apt-get update && apt-get install -y make curl gnupg wget python + +# Install kubectl gcloud and argo +RUN wget -qO- https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-241.0.0-linux-x86_64.tar.gz | tar xzf - \ + && /google-cloud-sdk/bin/gcloud -q components install kubectl \ + && /google-cloud-sdk/install.sh \ + && ln -s /google-cloud-sdk/bin/gcloud /usr/bin/gcloud +RUN curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/v2.2.1/argo-linux-amd64 \ + && chmod +x /usr/local/bin/argo + +COPY run.sh /usr/local/bin/ + +ENTRYPOINT ["run.sh"] \ No newline at end of file diff --git a/testing/docker-images/gcloud-argo/run.sh b/testing/docker-images/gcloud-argo/run.sh new file mode 100755 index 0000000000..e7f501972f --- /dev/null +++ b/testing/docker-images/gcloud-argo/run.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +set -e +printenv + +if [ $# -eq 0 ] +then + echo "" + echo "please specify commands as arguments, for example \"make test\"" + echo "" + exit 1 +fi + +echo "" +echo "cloning Feast repository..." +echo "" + +git clone https://github.com/gojek/feast +cd feast + +if [ -n "${PULL_NUMBER}" ] && [ "$JOB_TYPE" = "presubmit" ] +then + echo "" + echo "fetching PR ${PULL_NUMBER}..." + echo "" + git fetch origin pull/"${PULL_NUMBER}"/head:pull_"${PULL_NUMBER}" + + echo "" + echo "checking out PR ${PULL_NUMBER}..." + echo "" + git checkout pull_"${PULL_NUMBER}" +fi + +echo "connecting to test cluster..." +gcloud container clusters get-credentials primary-test-cluster --zone us-central1-a --project kf-feast +echo "testing connection to argo..." +argo list +if [ $? -ne 0 ] +then + echo "failed to connect to argo" + echo "exiting" + exit 1 +fi + +echo "sha:" +git rev-parse HEAD +echo "" +echo "running tests" +echo "" + +$@ \ No newline at end of file diff --git a/testing/docker-images/terraform-image/Dockerfile b/testing/docker-images/terraform-image/Dockerfile new file mode 100644 index 0000000000..04b734fbef --- /dev/null +++ b/testing/docker-images/terraform-image/Dockerfile @@ -0,0 +1,19 @@ +FROM debian:stretch + +RUN apt-get update && apt-get install -y jq wget python zip netcat + +RUN wget -qO- https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-233.0.0-linux-x86_64.tar.gz | tar xzf - \ + && /google-cloud-sdk/bin/gcloud -q components install kubectl \ + && /google-cloud-sdk/bin/gcloud -q components install beta \ + && /google-cloud-sdk/bin/gcloud -q components install bq + +ENV PATH="/google-cloud-sdk/bin:${PATH}" + +RUN wget https://releases.hashicorp.com/terraform/0.11.11/terraform_0.11.11_linux_amd64.zip \ + && unzip terraform_0.11.11_linux_amd64.zip \ + && mv terraform /bin/terraform + +RUN wget -qO- https://storage.googleapis.com/kubernetes-helm/helm-v2.12.3-linux-arm64.tar.gz | tar xzf - \ + && mv linux-arm64/helm /bin/helm + +RUN helm init --client-only diff --git a/testing/test-image/Dockerfile b/testing/docker-images/test-image/Dockerfile similarity index 89% rename from testing/test-image/Dockerfile rename to testing/docker-images/test-image/Dockerfile index b3fcbdf4b6..8dc68c9edd 100644 --- a/testing/test-image/Dockerfile +++ b/testing/docker-images/test-image/Dockerfile @@ -6,8 +6,6 @@ RUN git clone https://github.com/gojek/feast \ && cd feast && mvn test || true \ && cd .. && rm -rf feast -RUN apt-get install -y make - COPY run.sh /usr/local/bin/ ENTRYPOINT ["run.sh"] \ No newline at end of file diff --git a/testing/test-image/run.sh b/testing/docker-images/test-image/run.sh similarity index 96% rename from testing/test-image/run.sh rename to testing/docker-images/test-image/run.sh index f007da1e0d..a001255897 100755 --- a/testing/test-image/run.sh +++ b/testing/docker-images/test-image/run.sh @@ -34,7 +34,7 @@ fi echo "sha:" git rev-parse HEAD echo "" -echo "running unit tests" +echo "running tests" echo "" $@ \ No newline at end of file diff --git a/testing/tf/cloud-build/cloud-build.tf b/testing/infrastructure/cloud-build/cloud-build.tf similarity index 100% rename from testing/tf/cloud-build/cloud-build.tf rename to testing/infrastructure/cloud-build/cloud-build.tf diff --git a/testing/tf/cloud-build/cloudbuild.yaml b/testing/infrastructure/cloud-build/cloudbuild.yaml similarity index 100% rename from testing/tf/cloud-build/cloudbuild.yaml rename to testing/infrastructure/cloud-build/cloudbuild.yaml diff --git a/testing/tf/cloud-build/variables.tf b/testing/infrastructure/cloud-build/variables.tf similarity index 100% rename from testing/tf/cloud-build/variables.tf rename to testing/infrastructure/cloud-build/variables.tf diff --git a/testing/tf/gcs/backend.tf b/testing/infrastructure/gcs/backend.tf similarity index 100% rename from testing/tf/gcs/backend.tf rename to testing/infrastructure/gcs/backend.tf diff --git a/testing/tf/gcs/main.tf b/testing/infrastructure/gcs/main.tf similarity index 100% rename from testing/tf/gcs/main.tf rename to testing/infrastructure/gcs/main.tf diff --git a/testing/tf/k8s-cluster/backend.tf b/testing/infrastructure/k8s-cluster/backend.tf similarity index 100% rename from testing/tf/k8s-cluster/backend.tf rename to testing/infrastructure/k8s-cluster/backend.tf diff --git a/testing/tf/k8s-cluster/main.tf b/testing/infrastructure/k8s-cluster/main.tf similarity index 69% rename from testing/tf/k8s-cluster/main.tf rename to testing/infrastructure/k8s-cluster/main.tf index e5b01754f9..43b1de31b9 100644 --- a/testing/tf/k8s-cluster/main.tf +++ b/testing/infrastructure/k8s-cluster/main.tf @@ -16,6 +16,10 @@ resource "google_container_cluster" "primary-test-cluster" { node_config { oauth_scopes = [ "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/devstorage.read_write", + "https://www.googleapis.com/auth/devstorage.full_control", + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigquery.insertdata", ] machine_type = "n1-standard-4" diff --git a/testing/tf/k8s-cluster/output.tf b/testing/infrastructure/k8s-cluster/output.tf similarity index 100% rename from testing/tf/k8s-cluster/output.tf rename to testing/infrastructure/k8s-cluster/output.tf diff --git a/testing/tf/k8s-cluster/provider.tf b/testing/infrastructure/k8s-cluster/provider.tf similarity index 100% rename from testing/tf/k8s-cluster/provider.tf rename to testing/infrastructure/k8s-cluster/provider.tf diff --git a/testing/tf/k8s-cluster/variables.tf b/testing/infrastructure/k8s-cluster/variables.tf similarity index 100% rename from testing/tf/k8s-cluster/variables.tf rename to testing/infrastructure/k8s-cluster/variables.tf diff --git a/testing/integration/Makefile b/testing/integration/Makefile new file mode 100644 index 0000000000..7ce9feda30 --- /dev/null +++ b/testing/integration/Makefile @@ -0,0 +1,25 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +.PHONY: test-integration it-redis-bq-dataflow + +ifeq ($(TYPE), PR) +test-integration: + argo submit --watch tests/redis-bq-dataflow/workflow.yaml -p prId=$(ID) +else +test-integration: + argo submit --watch tests/redis-bq-dataflow/workflow.yaml -p gitRef=$(ID) +endif diff --git a/testing/integration/README.md b/testing/integration/README.md new file mode 100644 index 0000000000..fde86a3533 --- /dev/null +++ b/testing/integration/README.md @@ -0,0 +1,29 @@ +# Integration tests + +Integration tests for Feast are run on argo workflows. The tests follow the following steps: + +1. Provision infrastructure using Terraform +2. Test using pytest +3. Teardown infrastructure using Terraform + +## Terraform modules + +The `tf/modules` directory contains Terraform modules to set up the necessary infrastructure. Currently contains: + +- `cluster`: kubernetes cluster with necessary permissions to run Feast jobs +- `feast-helm`: Feast helm installation. + +## Tests + +The `tests` directory contains the integration tests. Each folder should contain the following: + +1. The terraform scripts to set up the necessary infra. +2. Data to run the tests on +3. A pytest file that executes the ingestion jobs and then tests for correctness. +4. Argo workflow yaml to orchestrate the entire process. + +Multiple tests can be run on the same infrastructure. + +### Adding new tests + +To add your own tests, either (1) create a new test case within an existing folder or (2) create a new folder with the resources mentioned above. \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/.env.local b/testing/integration/tests/redis-bq-dataflow/.env.local new file mode 100644 index 0000000000..99a58fd76d --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/.env.local @@ -0,0 +1,7 @@ +# Example environment variables that need to be set when running this test locally +# Export the variables like so: +# set -a; source .env.local +FEAST_CORE_URL=localhost:6565 +FEAST_SERVING_URL=localhost:6566 +PROJECT_ID=kf-feast +BUCKET_NAME=it-feast-storage diff --git a/testing/integration/tests/redis-bq-dataflow/.env.local.example b/testing/integration/tests/redis-bq-dataflow/.env.local.example new file mode 100644 index 0000000000..1ecb945e3c --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/.env.local.example @@ -0,0 +1,7 @@ +# Example environment variables that need to be set when running this test locally +# Export the variables like so: +# set -a; source .env.local +FEAST_CORE_URL=localhost:6565 +FEAST_SERVING_URL=localhost:6566 +PROJECT_ID=google-cloud-project +BUCKET_NAME=bucket-name diff --git a/testing/integration/tests/redis-bq-dataflow/.gitignore b/testing/integration/tests/redis-bq-dataflow/.gitignore new file mode 100644 index 0000000000..781b3b25c5 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/.gitignore @@ -0,0 +1,3 @@ +# local env files +.env.local +.env.*.local diff --git a/testing/integration/tests/redis-bq-dataflow/README.md b/testing/integration/tests/redis-bq-dataflow/README.md new file mode 100644 index 0000000000..248cc4e1d8 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/README.md @@ -0,0 +1,9 @@ +# redis-bq-dataflow + +This repository contains an end-to-end test that sets up the following infrastructure: + +1. Feast configured to run jobs on dataflow +2. Redis as serving store +3. BQ as warehouse store + +And then runs the tests in `test_feast.py`. \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/data/entity/myentity.yaml b/testing/integration/tests/redis-bq-dataflow/data/entity/myentity.yaml new file mode 100644 index 0000000000..8fa945a677 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/entity/myentity.yaml @@ -0,0 +1,5 @@ +name: myentity +description: my test entity +tags: + - tag1 + - tag2 diff --git a/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutedoubleredis1.yaml b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutedoubleredis1.yaml new file mode 100644 index 0000000000..05b5279d0e --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutedoubleredis1.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_double_redis +name: feature_double_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: DOUBLE +uri: https://github.com/bob/example diff --git a/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutefloatredis1.yaml b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutefloatredis1.yaml new file mode 100644 index 0000000000..3ff38358ce --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minutefloatredis1.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_float_redis +name: feature_float_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: FLOAT +uri: https://github.com/bob/example diff --git a/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint32redis1.yaml b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint32redis1.yaml new file mode 100644 index 0000000000..ebeb92dc99 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint32redis1.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_int32_redis +name: feature_int32_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: INT32 +uri: https://github.com/bob/example diff --git a/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint64redis1.yaml b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint64redis1.yaml new file mode 100644 index 0000000000..dd6b7339bf --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/feature/feature_minuteint64redis1.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_int64_redis +name: feature_int64_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: INT64 +uri: https://github.com/bob/example diff --git a/testing/integration/tests/redis-bq-dataflow/data/import/import_csv.yaml b/testing/integration/tests/redis-bq-dataflow/data/import/import_csv.yaml new file mode 100644 index 0000000000..3eb170a23c --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/import/import_csv.yaml @@ -0,0 +1,19 @@ +type: file.csv +sourceOptions: + path: gs://it-feast-storage/test-cases/test_data.csv +entities: + - myentity +schema: + entityIdColumn: entity + timestampColumn: ts + fields: + - name: entity + - name: ts + - name: feature1 + featureId: myentity.feature_double_redis + - name: feature2 + featureId: myentity.feature_float_redis + - name: feature3 + featureId: myentity.feature_int32_redis + - name: feature4 + featureId: myentity.feature_int64_redis diff --git a/testing/integration/tests/redis-bq-dataflow/data/test_data.csv b/testing/integration/tests/redis-bq-dataflow/data/test_data.csv new file mode 100644 index 0000000000..16906eef98 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/data/test_data.csv @@ -0,0 +1,50 @@ +0,2018-01-01T00:00:00,0.16654998178974967,0.0779124915810051,8,1 +0,2018-01-01T00:01:00,0.35055416999061695,0.3789967915556205,4,7 +0,2018-01-01T00:02:00,0.922617931213872,0.3179716588679623,8,6 +0,2018-01-01T00:03:00,0.06582437841319277,0.05565087947895475,9,1 +0,2018-01-01T00:04:00,0.8470568422759338,0.21347178570025072,2,10 +0,2018-01-01T00:05:00,0.11361651376595083,0.610881579742112,8,10 +0,2018-01-01T00:06:00,0.3034691430518833,0.19830856710384326,6,2 +0,2018-01-01T00:07:00,0.8346843022317924,0.08544530941545558,2,4 +0,2018-01-01T00:08:00,0.012367592836242891,0.5726466047910151,2,5 +0,2018-01-01T00:09:00,0.9887391081292758,0.9819570408280592,9,10 +1,2018-01-01T00:00:00,0.7503367819099366,0.946197160138704,10,1 +1,2018-01-01T00:01:00,0.9190195271598668,0.8836270958333856,10,4 +1,2018-01-01T00:02:00,0.07725768926853083,0.448071643977631,4,9 +1,2018-01-01T00:03:00,0.4965090410288343,0.9615236179532041,3,5 +1,2018-01-01T00:04:00,0.4838512691935104,0.355736320341819,7,2 +1,2018-01-01T00:05:00,0.7422860280929264,0.26936605638140254,7,6 +1,2018-01-01T00:06:00,0.007120660156963887,0.9457195989857463,5,4 +1,2018-01-01T00:07:00,0.5165841927879966,0.5734751201042606,1,4 +1,2018-01-01T00:08:00,0.0874458350429409,0.3284354801066641,3,8 +1,2018-01-01T00:09:00,0.8091561596661963,0.5176044026790632,1,9 +2,2018-01-01T00:00:00,0.5440258526780313,0.7936189500778384,3,5 +2,2018-01-01T00:01:00,0.9012681719733718,0.408129175696462,5,8 +2,2018-01-01T00:02:00,0.1357239530601766,0.2882272639567518,9,9 +2,2018-01-01T00:03:00,0.5482100359374814,0.22784885285759426,2,10 +2,2018-01-01T00:04:00,0.47915480013638667,0.5783117797172574,6,2 +2,2018-01-01T00:05:00,0.08954537587399436,0.6036422156720167,2,5 +2,2018-01-01T00:06:00,0.022254256710355302,0.8425123292474225,2,2 +2,2018-01-01T00:07:00,0.2981277839268307,0.7187791198561501,9,6 +2,2018-01-01T00:08:00,0.7483695369918476,0.24681429759605789,10,3 +2,2018-01-01T00:09:00,0.1357618414286158,0.887316489600256,3,10 +3,2018-01-01T00:00:00,0.24835796330732807,0.8871182399308823,3,2 +3,2018-01-01T00:01:00,0.18779166308706263,0.6635037319574626,4,10 +3,2018-01-01T00:02:00,0.00017995781865332017,0.9437703134427515,9,2 +3,2018-01-01T00:03:00,0.44519915810431854,0.03739113677349182,4,5 +3,2018-01-01T00:04:00,0.27156326671637077,0.06880899222977854,3,2 +3,2018-01-01T00:05:00,0.7647529493108539,0.14002956138831768,7,10 +3,2018-01-01T00:06:00,0.9954469835451967,0.7567907196365666,9,9 +3,2018-01-01T00:07:00,0.3883998745214877,0.18671618739049767,7,10 +3,2018-01-01T00:08:00,0.5448378857720906,0.38482161282383076,3,1 +3,2018-01-01T00:09:00,0.3550003641256,0.1818481760674323,1,9 +4,2018-01-01T00:00:00,0.5925452403226181,0.30212100652221296,5,10 +4,2018-01-01T00:01:00,0.18599533814587,0.5230622774213863,6,5 +4,2018-01-01T00:02:00,0.9599946262502064,0.26435543431553166,10,8 +4,2018-01-01T00:03:00,0.22755474767811312,0.6693510146123766,1,8 +4,2018-01-01T00:04:00,0.12063065422121622,0.7460800163440976,10,9 +4,2018-01-01T00:05:00,0.58262241980741,0.2714493926314999,9,5 +4,2018-01-01T00:06:00,0.26952820440313163,0.40841655204277727,5,1 +4,2018-01-01T00:07:00,0.7779897852625218,0.39955885342995734,9,3 +4,2018-01-01T00:08:00,0.9892858266244913,0.91265457567995,2,10 +4,2018-01-01T00:09:00,0.31825556885425366,0.6727958709215186,3,9 diff --git a/testing/integration/tests/redis-bq-dataflow/main.py b/testing/integration/tests/redis-bq-dataflow/main.py new file mode 100644 index 0000000000..7c77180bac --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/main.py @@ -0,0 +1,9 @@ +import test_feast +from feast.sdk.client import Client + +# This main.py is for easier local debugging +# because running pytest often hides useful debugging output + +if __name__ == "__main__": + feast_client = Client(verbose=True) + test_feast.TestFeastIntegration().test_end_to_end(client=feast_client) diff --git a/testing/integration/tests/redis-bq-dataflow/test_feast.py b/testing/integration/tests/redis-bq-dataflow/test_feast.py new file mode 100644 index 0000000000..e7c3003fe0 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/test_feast.py @@ -0,0 +1,133 @@ +import os +import subprocess +from subprocess import PIPE +from time import sleep + +import fastavro +import feast.sdk.utils.gs_utils as utils +import numpy as np +import pandas as pd +import pytest +import yaml +from feast.sdk.client import Client +from feast.sdk.resources.entity import Entity +from feast.sdk.resources.feature import Feature +from feast.sdk.resources.feature_set import FeatureSet +from feast.sdk.utils import bq_util +from google.cloud import storage + + +@pytest.fixture +def client(): + return Client(verbose=True) + + +# Init the system by registering relevant resources. +def _register_resources(client, entities_fldr, features_fldr): + resources = [] + for ent_file in os.listdir(entities_fldr): + resources.append(Entity.from_yaml(os.path.join(entities_fldr, ent_file))) + for feat_file in os.listdir(features_fldr): + resources.append(Feature.from_yaml(os.path.join(features_fldr, feat_file))) + client.apply(resources) + + +# Run an import job given an import spec. +def _run_job_and_wait_for_completion(job_yaml): + out = subprocess.run( + "feast jobs run {}".format(job_yaml).split(" "), check=True, stdout=PIPE + ) + job_id = out.stdout.decode("utf-8").split(" ")[-1] + job_status, job_complete = "UNKNOWN", False + terminal_states = ["COMPLETED", "ABORTED", "ERROR", "UNKNOWN"] + while not job_complete: + out = subprocess.run( + "feast get job {}".format(job_id).split(" "), check=True, stdout=PIPE + ) + job_details = yaml.load(out.stdout.decode("utf-8").replace("\t", " ")) + if job_details["Status"] != job_status: + job_status = job_details["Status"] + print("Job id {} currently {}".format(job_id, job_status)) + if job_status in terminal_states: + break + sleep(10) + return job_status + + +def _stage_data(local, remote): + split = utils.split_gs_path(remote) + storage_client = storage.Client() + bucket = storage_client.get_bucket(split[0]) + blob = bucket.blob(split[1]) + + blob.upload_from_filename(local) + + +class TestFeastIntegration: + def test_end_to_end(self, client): + project_id = os.environ.get("PROJECT_ID") + bucket_name = os.environ.get("BUCKET_NAME") + testdata_path = os.path.abspath(os.path.join(__file__, "..", "testdata")) + + with open(os.path.join(testdata_path, "myentity.avro"), "rb") as myentity_file: + avro_reader = fastavro.reader(myentity_file) + expected = ( + pd.DataFrame.from_records(avro_reader) + .drop(columns=["created_timestamp"]) + .sort_values(["id", "event_timestamp"]) + .reset_index(drop=True) + ) + + self.run_batch_import(bucket_name, client) + self.validate_warehouse_data(project_id, expected) + self.validate_serving_data(client, expected) + + @staticmethod + def run_batch_import(bucket_name, client): + _stage_data( + "testdata/myentity.csv", + "gs://{}/test-cases/myentity.csv".format(bucket_name), + ) + _register_resources(client, "testdata/entity", "testdata/feature") + job_status = _run_job_and_wait_for_completion("testdata/import/import_csv.yaml") + assert job_status == "COMPLETED" + + @staticmethod + def validate_serving_data(client, expected): + features = { + "myentity.feature_double_redis", + "myentity.feature_float_redis", + "myentity.feature_int32_redis", + "myentity.feature_int64_redis", + } + + feature_set = FeatureSet(entity="myentity", features=[f for f in features]) + entity_keys = [str(id) for id in list(expected.id.unique())] + actual = client.get_serving_data(feature_set, entity_keys=entity_keys) + actual = actual.sort_values(["myentity"]).reset_index(drop=True) + + # Serving store returns only latest feature values so we update expected values + # to only have the feature values with latest event timestamp + expected = expected.loc[expected.groupby("id")["event_timestamp"].idxmax()] + # Ensure the columns in expected are the same as those in actual + # For example, in actual there is not event_timestamp column + expected = expected.rename(columns={"id": "myentity"}) + expected = expected[ + [c.replace("myentity.", "") for c in actual.columns] + ].reset_index(drop=True) + + np.testing.assert_array_equal(expected, actual) + + @staticmethod + def validate_warehouse_data(project_id, expected): + actual = ( + bq_util.query_to_dataframe( + f"SELECT * FROM `feast_it.myentity_view`", project=project_id + ) + # created_timestamp is not relevant for validating correctness of import + # and retrieval of feature values + .drop(columns=["created_timestamp"]) + .sort_values(["id", "event_timestamp"]) + .reset_index(drop=True) + ) + assert expected.equals(actual) diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/entity/myentity.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/entity/myentity.yaml new file mode 100644 index 0000000000..8fa945a677 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/entity/myentity.yaml @@ -0,0 +1,5 @@ +name: myentity +description: my test entity +tags: + - tag1 + - tag2 diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_double_redis.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_double_redis.yaml new file mode 100644 index 0000000000..1ddc2558a3 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_double_redis.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_double_redis +name: feature_double_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: DOUBLE +uri: https://github.com/bob/example \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_float_redis.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_float_redis.yaml new file mode 100644 index 0000000000..e5e7afa470 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_float_redis.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_float_redis +name: feature_float_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: FLOAT +uri: https://github.com/bob/example \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int32_redis.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int32_redis.yaml new file mode 100644 index 0000000000..caacd5a055 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int32_redis.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_int32_redis +name: feature_int32_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: INT32 +uri: https://github.com/bob/example \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int64_redis.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int64_redis.yaml new file mode 100644 index 0000000000..e085a62c02 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/feature/feature_int64_redis.yaml @@ -0,0 +1,7 @@ +id: myentity.feature_int64_redis +name: feature_int64_redis +entity: myentity +owner: bob@example.com +description: test entity. +valueType: INT64 +uri: https://github.com/bob/example \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/import/import_csv.yaml b/testing/integration/tests/redis-bq-dataflow/testdata/import/import_csv.yaml new file mode 100644 index 0000000000..04f45d5444 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/import/import_csv.yaml @@ -0,0 +1,19 @@ +type: file.csv +sourceOptions: + path: gs://it-feast-storage/test-cases/myentity.csv +entities: + - myentity +schema: + entityIdColumn: entity + timestampColumn: ts + fields: + - name: entity + - name: ts + - name: feature1 + featureId: myentity.feature_double_redis + - name: feature2 + featureId: myentity.feature_float_redis + - name: feature3 + featureId: myentity.feature_int32_redis + - name: feature4 + featureId: myentity.feature_int64_redis diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/myentity.avro b/testing/integration/tests/redis-bq-dataflow/testdata/myentity.avro new file mode 100644 index 0000000000..42dfb251ac Binary files /dev/null and b/testing/integration/tests/redis-bq-dataflow/testdata/myentity.avro differ diff --git a/testing/integration/tests/redis-bq-dataflow/testdata/myentity.csv b/testing/integration/tests/redis-bq-dataflow/testdata/myentity.csv new file mode 100644 index 0000000000..16906eef98 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/testdata/myentity.csv @@ -0,0 +1,50 @@ +0,2018-01-01T00:00:00,0.16654998178974967,0.0779124915810051,8,1 +0,2018-01-01T00:01:00,0.35055416999061695,0.3789967915556205,4,7 +0,2018-01-01T00:02:00,0.922617931213872,0.3179716588679623,8,6 +0,2018-01-01T00:03:00,0.06582437841319277,0.05565087947895475,9,1 +0,2018-01-01T00:04:00,0.8470568422759338,0.21347178570025072,2,10 +0,2018-01-01T00:05:00,0.11361651376595083,0.610881579742112,8,10 +0,2018-01-01T00:06:00,0.3034691430518833,0.19830856710384326,6,2 +0,2018-01-01T00:07:00,0.8346843022317924,0.08544530941545558,2,4 +0,2018-01-01T00:08:00,0.012367592836242891,0.5726466047910151,2,5 +0,2018-01-01T00:09:00,0.9887391081292758,0.9819570408280592,9,10 +1,2018-01-01T00:00:00,0.7503367819099366,0.946197160138704,10,1 +1,2018-01-01T00:01:00,0.9190195271598668,0.8836270958333856,10,4 +1,2018-01-01T00:02:00,0.07725768926853083,0.448071643977631,4,9 +1,2018-01-01T00:03:00,0.4965090410288343,0.9615236179532041,3,5 +1,2018-01-01T00:04:00,0.4838512691935104,0.355736320341819,7,2 +1,2018-01-01T00:05:00,0.7422860280929264,0.26936605638140254,7,6 +1,2018-01-01T00:06:00,0.007120660156963887,0.9457195989857463,5,4 +1,2018-01-01T00:07:00,0.5165841927879966,0.5734751201042606,1,4 +1,2018-01-01T00:08:00,0.0874458350429409,0.3284354801066641,3,8 +1,2018-01-01T00:09:00,0.8091561596661963,0.5176044026790632,1,9 +2,2018-01-01T00:00:00,0.5440258526780313,0.7936189500778384,3,5 +2,2018-01-01T00:01:00,0.9012681719733718,0.408129175696462,5,8 +2,2018-01-01T00:02:00,0.1357239530601766,0.2882272639567518,9,9 +2,2018-01-01T00:03:00,0.5482100359374814,0.22784885285759426,2,10 +2,2018-01-01T00:04:00,0.47915480013638667,0.5783117797172574,6,2 +2,2018-01-01T00:05:00,0.08954537587399436,0.6036422156720167,2,5 +2,2018-01-01T00:06:00,0.022254256710355302,0.8425123292474225,2,2 +2,2018-01-01T00:07:00,0.2981277839268307,0.7187791198561501,9,6 +2,2018-01-01T00:08:00,0.7483695369918476,0.24681429759605789,10,3 +2,2018-01-01T00:09:00,0.1357618414286158,0.887316489600256,3,10 +3,2018-01-01T00:00:00,0.24835796330732807,0.8871182399308823,3,2 +3,2018-01-01T00:01:00,0.18779166308706263,0.6635037319574626,4,10 +3,2018-01-01T00:02:00,0.00017995781865332017,0.9437703134427515,9,2 +3,2018-01-01T00:03:00,0.44519915810431854,0.03739113677349182,4,5 +3,2018-01-01T00:04:00,0.27156326671637077,0.06880899222977854,3,2 +3,2018-01-01T00:05:00,0.7647529493108539,0.14002956138831768,7,10 +3,2018-01-01T00:06:00,0.9954469835451967,0.7567907196365666,9,9 +3,2018-01-01T00:07:00,0.3883998745214877,0.18671618739049767,7,10 +3,2018-01-01T00:08:00,0.5448378857720906,0.38482161282383076,3,1 +3,2018-01-01T00:09:00,0.3550003641256,0.1818481760674323,1,9 +4,2018-01-01T00:00:00,0.5925452403226181,0.30212100652221296,5,10 +4,2018-01-01T00:01:00,0.18599533814587,0.5230622774213863,6,5 +4,2018-01-01T00:02:00,0.9599946262502064,0.26435543431553166,10,8 +4,2018-01-01T00:03:00,0.22755474767811312,0.6693510146123766,1,8 +4,2018-01-01T00:04:00,0.12063065422121622,0.7460800163440976,10,9 +4,2018-01-01T00:05:00,0.58262241980741,0.2714493926314999,9,5 +4,2018-01-01T00:06:00,0.26952820440313163,0.40841655204277727,5,1 +4,2018-01-01T00:07:00,0.7779897852625218,0.39955885342995734,9,3 +4,2018-01-01T00:08:00,0.9892858266244913,0.91265457567995,2,10 +4,2018-01-01T00:09:00,0.31825556885425366,0.6727958709215186,3,9 diff --git a/testing/integration/tests/redis-bq-dataflow/tf/backend.tf b/testing/integration/tests/redis-bq-dataflow/tf/backend.tf new file mode 100644 index 0000000000..fb1f9c81b5 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/backend.tf @@ -0,0 +1,6 @@ +terraform { + backend "gcs" { + bucket = "kf-feast-terraform-state" + prefix = "tf/k8s-cluster/integration-test" + } +} diff --git a/testing/integration/tests/redis-bq-dataflow/tf/data.tf b/testing/integration/tests/redis-bq-dataflow/tf/data.tf new file mode 100644 index 0000000000..a988612c59 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/data.tf @@ -0,0 +1 @@ +data "google_client_config" "current" {} diff --git a/testing/integration/tests/redis-bq-dataflow/tf/input.tf b/testing/integration/tests/redis-bq-dataflow/tf/input.tf new file mode 100644 index 0000000000..bb653c213a --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/input.tf @@ -0,0 +1,3 @@ +variable "docker_tag" { + description = "Docker image to deploy" +} diff --git a/testing/integration/tests/redis-bq-dataflow/tf/main.tf b/testing/integration/tests/redis-bq-dataflow/tf/main.tf new file mode 100644 index 0000000000..fadca6442a --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/main.tf @@ -0,0 +1,70 @@ +locals { + project_name = "kf-feast" + region = "us-central1" + subnetwork = "regions/${local.region}/subnetworks/default" + network = "default" + cluster_name = "it-feast" + + job_runner_options = { + "project" = "${local.project_name}" + "region" = "${local.region}" + "tempLocation" = "gs://${local.cluster_name}-storage/tempJob" + "subnetwork" = "${local.subnetwork}" + "maxNumWorkers" = "64" + "autoscalingAlgorithm" = "THROUGHPUT_BASED" + } + + errors_store_options = { + "path" = "gs://${local.cluster_name}-storage/errors" + } +} + +module "cluster" { + source = "../../../tf/modules/cluster" + cluster_name = "${local.cluster_name}" + project_name = "${local.project_name}" + region = "${local.region}" + bucket_name = "${local.cluster_name}-storage" + network = "${local.network}" + subnetwork = "${local.subnetwork}" + gke_machine_type = "n1-standard-8" +} + +resource "null_resource" "wait_for_regional_cluster" { + provisioner "local-exec" { + command = "${path.module}/scripts/wait-for-cluster.sh ${local.project_name} ${local.cluster_name}" + } + + provisioner "local-exec" { + when = "destroy" + command = "${path.module}/scripts/wait-for-cluster.sh ${local.project_name} ${local.cluster_name}" + } + + depends_on = ["module.cluster"] +} + +module "feast" { + source = "../../../tf/modules/feast-helm" + project_name = "${local.project_name}" + region = "${local.region}" + subnetwork = "${local.subnetwork}" + docker_tag = "${var.docker_tag}" + core_address = "10.128.0.99" + serving_address = "10.128.0.100" + redis_address = "10.128.0.101" + load_balancer_source_range = "10.0.0.0/8" + job_runner = "DataflowRunner" + job_runner_options = "'${jsonencode(local.job_runner_options)}'" + bucket_name = "${local.cluster_name}-storage" + bq_dataset = "${google_bigquery_dataset.feast_bq_dataset.dataset_id}" + + depends_on = ["module.cluster.cluster_name", "null_resource.wait_for_regional_cluster"] +} + +resource "google_bigquery_dataset" "feast_bq_dataset" { + dataset_id = "feast_it" + description = "Feast integration test dataset" + default_table_expiration_ms = 36000000 + location = "US" + delete_contents_on_destroy = true +} diff --git a/testing/integration/tests/redis-bq-dataflow/tf/provider.tf b/testing/integration/tests/redis-bq-dataflow/tf/provider.tf new file mode 100644 index 0000000000..91f4c3caca --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/provider.tf @@ -0,0 +1,16 @@ +provider "google" { + version = "2.1.0" + # credentials = "${file("~/.secrets/${local.project_name}.json")}" + project = "${local.project_name}" +} + +provider "helm" { + kubernetes { + host = "${module.cluster.endpoint}" + token = "${data.google_client_config.current.access_token}" + + client_certificate = "${base64decode(module.cluster.client_certificate)}" + client_key = "${base64decode(module.cluster.client_key)}" + cluster_ca_certificate = "${base64decode(module.cluster.cluster_ca_certificate)}" + } +} diff --git a/testing/integration/tests/redis-bq-dataflow/tf/scripts/empty-bq.sh b/testing/integration/tests/redis-bq-dataflow/tf/scripts/empty-bq.sh new file mode 100755 index 0000000000..5abeb004ab --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/scripts/empty-bq.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +PROJECT_ID=$1 +DATASET_NAME=$2 +for i in $(bq ls $PROJECT_ID:$DATASET_NAME | sed 1,2d | awk "{print \$1}"); +do + if [ ! -z "$i" ]; then + bq rm -ft $PROJECT_ID:$DATASET_NAME.$i; + fi +done \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/tf/scripts/wait-for-cluster.sh b/testing/integration/tests/redis-bq-dataflow/tf/scripts/wait-for-cluster.sh new file mode 100755 index 0000000000..aba1dc2450 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/scripts/wait-for-cluster.sh @@ -0,0 +1,36 @@ + #!/bin/bash + + # Copyright 2018 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + +# from https://github.com/terraform-google-modules/terraform-google-kubernetes-engine/blob/176ad6f47d2d92ac9a49ef112830da2f539b737d/scripts/wait-for-cluster.sh +set -e + +PROJECT=$1 +CLUSTER_NAME=$2 +gcloud_command="gcloud container clusters list --project=$PROJECT --format=json" +jq_query=".[] | select(.name==\"$CLUSTER_NAME\") | .status" + +echo "Waiting for cluster $2 in project $1 to reconcile..." + +current_status=$($gcloud_command | jq -r "$jq_query") + +while [ "${current_status}" = "RECONCILING" ]; do + printf "." + sleep 5 + current_status=$($gcloud_command | jq -r "$jq_query") +done + +echo "Cluster is ready!" \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/tf/values.yaml b/testing/integration/tests/redis-bq-dataflow/tf/values.yaml new file mode 100755 index 0000000000..0a21749c67 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/tf/values.yaml @@ -0,0 +1,101 @@ +--- +core: + image: + pullPolicy: IfNotPresent + registry: gcr.io/kf-feast + repository: feast-core + tag: "integration-test" + replicaCount: 1 + resources: + limits: + cpu: 4 + memory: 8G + requests: + cpu: 1 + memory: 4G + rollingUpdate: + maxSurge: 2 + maxUnavailable: 0 + service: + grpc: + port: 6565 + targetPort: 6565 + http: + port: 80 + targetPort: 8080 + jobs: + workspace: "it-feast-storage/workspace" + runner: "DataflowRunner" + options: '{"autoscalingAlgorithm":"THROUGHPUT_BASED","maxNumWorkers":"64","project":"kf-feast","region":"us-central1","subnetwork":"regions/us-central1/subnetworks/default","tempLocation":"gs://it-feast-storage/tempJob"}' + monitoring: + period: 5000 + initialDelay: 60000 +dataflow: + projectID: "kf-feast" + location: "us-central1" + +postgresql: + name: feast-metadata + service: + port: 5432 + postgresPassword: password + +postgresql: + provision: true + imageTag: 9.6.11 + service: + port: 5432 + +redis: + name: feast-redis + provision: true + master: + service: + type: LoadBalancer + annotations: + "cloud.google.com/load-balancer-type": "Internal" + +serving: + config: + maxEntityPerBatch: 2000 + maxNumberOfThread: 1024 + redisPool: + maxIdle: 16 + maxSize: 1024 + timeout: 1 + image: + pullPolicy: IfNotPresent + registry: gcr.io/kf-feast + repository: feast-serving + tag: "integration-test" + jaeger: + enabled: false + replicaCount: 1 + resources: + limits: + cpu: 4 + memory: 8G + requests: + cpu: 2 + memory: 2G + rollingUpdate: + maxSurge: 2 + maxUnavailable: 0 + service: + grpc: + port: 6566 + targetPort: 6566 + http: + port: 80 + targetPort: 8080 + +store: + errors: + type: "file.json" + options: '{"path":"gs://it-feast-storage/error-log"}' + warehouse: + type: "bigquery" + options: '{"dataset":"feast_it","project":"kf-feast"}' + serving: + type: "redis" + options: '{"host":"10.128.0.101","port":"6379"}' \ No newline at end of file diff --git a/testing/integration/tests/redis-bq-dataflow/workflow.yaml b/testing/integration/tests/redis-bq-dataflow/workflow.yaml new file mode 100644 index 0000000000..7ea30c4973 --- /dev/null +++ b/testing/integration/tests/redis-bq-dataflow/workflow.yaml @@ -0,0 +1,233 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: feast-integration-test- +spec: + entrypoint: run + onExit: exit-handler + arguments: + parameters: + - name: revision + value: integration-test + - name: prId + value: "none" + - name: gitRef + value: "none" + templates: + - name: run + steps: + - - name: get-feast + template: get-feast + - - name: build-jars + template: build-jars + arguments: + artifacts: + - name: feast + from: "{{steps.get-feast.outputs.artifacts.feast-repo}}" + - name: build-cli + template: build-cli + arguments: + artifacts: + - name: feast + from: "{{steps.get-feast.outputs.artifacts.feast-repo}}" + - - name: build-push-docker + template: build-docker + arguments: + artifacts: + - name: feast + from: "{{steps.build-jars.outputs.artifacts.feast-build}}" + - - name: terraform-provision + template: terraform + arguments: + parameters: + - name: arg + value: "apply -var docker_tag=integration-test -auto-approve ." + artifacts: + - name: feast + from: "{{steps.get-feast.outputs.artifacts.feast-repo}}" + - name: feast-cli + from: "{{steps.build-cli.outputs.artifacts.feast-cli}}" + - - name: test + template: run-test + arguments: + artifacts: + - name: feast + from: "{{steps.get-feast.outputs.artifacts.feast-repo}}" + - name: feast-cli + from: "{{steps.build-cli.outputs.artifacts.feast-cli}}" + + - name: exit-handler + steps: + - - name: terraform-destroy + template: terraform-destroy + arguments: + parameters: + - name: arg + value: "destroy -var docker_tag=integration-test -auto-approve ." + artifacts: + - name: feast + from: "{{workflow.outputs.artifacts.feast-repo}}" + - name: get-feast + steps: + - - name: get-feast-by-pullrequest + template: get-feast-by-pullrequest + when: "{{workflow.parameters.prId}} != none" + - name: get-feast-by-gitref + template: get-feast-by-gitref + when: "{{workflow.parameters.gitRef}} != none" + outputs: + artifacts: + - name: feast-repo + from: "{{workflow.outputs.artifacts.feast-repo}}" + globalName: feast-repo + - name: get-feast-by-pullrequest + container: + image: alpine/git + command: + - /bin/sh + - -c + - | + git clone https://github.com/gojek/feast; + cd feast; + git fetch origin pull/{{workflow.parameters.prId}}/head; + git checkout FETCH_HEAD + outputs: + artifacts: + - name: feast + path: /git/feast + globalName: feast-repo + - name: get-feast-by-gitref + container: + image: alpine/git + command: + - /bin/sh + - -c + - | + git clone https://github.com/gojek/feast; + cd feast; + git checkout {{workflow.parameters.gitRef}} + outputs: + artifacts: + - name: feast + path: /git/feast + globalName: feast-repo + - name: build-jars + inputs: + artifacts: + - name: feast + path: /feast + container: + image: maven:3.6.0-jdk-8-slim + command: ["/bin/bash", "-c"] + args: + - mvn package -DskipTests -Drevision=integration-test -T 6 -DdependencyLocationsEnabled=false + workingDir: /feast + outputs: + artifacts: + - name: feast-build + path: /feast + globalName: feast-build + - name: build-docker + inputs: + artifacts: + - name: feast + path: /feast + container: + image: gcr.io/kf-feast/docker:18.09-gcloud + command: [sh, -c] + args: [' + until docker ps; do sleep 3; done; + docker build -t gcr.io/kf-feast/feast-core:integration-test -f docker/core/Dockerfile --build-arg RELEASE=integration-test .; + docker build -t gcr.io/kf-feast/feast-serving:integration-test -f docker/serving/Dockerfile --build-arg RELEASE=integration-test .; + docker push gcr.io/kf-feast/feast-core:integration-test; + docker push gcr.io/kf-feast/feast-serving:integration-test + '] + env: + - name: DOCKER_HOST + value: 127.0.0.1 + workingDir: /feast + sidecars: + - name: dind + image: docker:stable-dind + securityContext: + privileged: true + mirrorVolumeMounts: true + - name: build-cli + inputs: + artifacts: + - name: feast + path: /build/feast + container: + image: golang:1.12 + command: ["/bin/bash", "-c"] + args: + - | + GOOS=linux go build -o feast-cli-v0.0.0-linux-amd64 ./cli/feast + GOOS=linux go build -o feast-cli-v0.0.0-darwin-amd64 ./cli/feast + # Fix the feast cli binary name for use by subsequent steps + cp feast-cli-v0.0.0-linux-amd64 /usr/local/bin/feast + workingDir: /build/feast + outputs: + artifacts: + - name: feast-cli + path: /usr/local/bin/feast + - name: terraform + inputs: + parameters: + - name: arg + artifacts: + - name: feast + path: /feast + - name: feast-cli + path: /bin/feast + container: + image: gcr.io/kf-feast/it-terraform:0.1.0 + workingDir: /feast/testing/integration/tests/redis-bq-dataflow/tf + command: ["sh", "-c"] + args: + - | + terraform init + terraform {{inputs.parameters.arg}} + - name: run-test + inputs: + artifacts: + - name: feast-cli + path: /bin/feast + - name: feast + path: /feast + container: + image: python:3.7.2 + command: ["/bin/bash", "-c"] + args: [' + feast config set coreURI 10.128.0.99:6565; + pip install pytest; + pip install -r /feast/sdk/python/test-requirements.txt; + pip install -e /feast/sdk/python; + pytest --capture=no + '] + workingDir: /feast/testing/integration/tests/redis-bq-dataflow + env: + - name: FEAST_CORE_URL + value: "10.128.0.99:6565" + - name: FEAST_SERVING_URL + value: "10.128.0.100:6566" + - name: PROJECT_ID + value: kf-feast + - name: BUCKET_NAME + value: it-feast-storage + - name: terraform-destroy + inputs: + parameters: + - name: arg + artifacts: + - name: feast + path: /feast + container: + image: gcr.io/kf-feast/it-terraform:0.1.0 + workingDir: /feast/testing/integration/tests/redis-bq-dataflow/tf + command: ["sh", "-c"] + args: [' + terraform init; + terraform {{inputs.parameters.arg}} + '] + diff --git a/testing/integration/tf/modules/cluster/input.tf b/testing/integration/tf/modules/cluster/input.tf new file mode 100644 index 0000000000..bccca70fbb --- /dev/null +++ b/testing/integration/tf/modules/cluster/input.tf @@ -0,0 +1,39 @@ +variable "cluster_name" { + default = "feast" + description = "Cluster name" +} + +variable "project_name" { + description = "Project name" +} + +variable "region" { + description = "Subnet region" +} + +variable "network" { + default = "default" +} + +variable "subnetwork" { + default = "default" +} + +variable "gke_machine_type" { + description = "The machine type for the default node pool" + default = "n1-standard-4" +} + +variable "bucket_name" { + description = "Working storage for feast" +} + +variable "feast_node_pool_min_size" { + description = "Minimum number of nodes for node pool" + default = 1 +} + +variable "feast_node_pool_max_size" { + description = "Maximum number of nodes for node pool" + default = 2 +} diff --git a/testing/integration/tf/modules/cluster/main.tf b/testing/integration/tf/modules/cluster/main.tf new file mode 100644 index 0000000000..23b66241a4 --- /dev/null +++ b/testing/integration/tf/modules/cluster/main.tf @@ -0,0 +1,42 @@ +data "google_client_config" "current" {} + +resource "google_storage_bucket" "feast_storage_bucket" { + name = "${var.bucket_name}" + location = "${var.region}" + force_destroy = true +} + +resource "google_container_cluster" "feast_k8s" { + name = "${var.cluster_name}" + region = "${var.region}" + initial_node_count = 1 + + # node_version = "1.11.5-gke.5" + # min_master_version = "1.11.5-gke.5" + network = "${var.network}" + + subnetwork = "${var.subnetwork}" + + enable_legacy_abac = true + + node_config { + machine_type = "${var.gke_machine_type}" + disk_size_gb = "50" + + oauth_scopes = [ + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_write", + "https://www.googleapis.com/auth/devstorage.full_control", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigtable.admin", + "https://www.googleapis.com/auth/bigtable.data", + ] + } + + provisioner "local-exec" { + when = "destroy" + command = "sleep 90" + } +} diff --git a/testing/integration/tf/modules/cluster/output.tf b/testing/integration/tf/modules/cluster/output.tf new file mode 100644 index 0000000000..6b7af14886 --- /dev/null +++ b/testing/integration/tf/modules/cluster/output.tf @@ -0,0 +1,33 @@ + + +output "endpoint" { + value = "${google_container_cluster.feast_k8s.endpoint}" +} + +output "cluster_name" { + value = "${var.cluster_name}" +} + +output "client_certificate" { + value = "${google_container_cluster.feast_k8s.master_auth.0.client_certificate}" +} + +output "client_key" { + value = "${google_container_cluster.feast_k8s.master_auth.0.client_key}" +} + +output "cluster_ca_certificate" { + value = "${google_container_cluster.feast_k8s.master_auth.0.cluster_ca_certificate}" +} + +output "username" { + value = "${google_container_cluster.feast_k8s.master_auth.0.username}" +} + +output "password" { + value = "${google_container_cluster.feast_k8s.master_auth.0.password}" +} + +output "feast_storage_bucket" { + value = "${google_storage_bucket.feast_storage_bucket.name}" +} \ No newline at end of file diff --git a/testing/integration/tf/modules/feast-helm/input.tf b/testing/integration/tf/modules/feast-helm/input.tf new file mode 100644 index 0000000000..6652bac56f --- /dev/null +++ b/testing/integration/tf/modules/feast-helm/input.tf @@ -0,0 +1,61 @@ +variable "project_name" { + description = "Project name" +} + +variable "subnetwork" { + description = "Desired subnetwork" +} + +variable "region" { + description = "Subnet region" +} + +variable "docker_tag" { + default = "0.1.0" + description = "Feast build version" +} + +variable "core_address" { + description = "Core internal address" +} + +variable "serving_address" { + description = "Serving internal address" +} + +variable "redis_address" { + description = "Redis internal address" +} + +variable "statsd_host" { + description = "Statsd host to write metrics to" + default = "" +} + +variable "job_runner" { + description = "Desired job runner" + default = "Dataflow" +} + +variable "job_runner_options" { + description = "Job runner options as a json string" + default = "'{}'" +} + +variable "load_balancer_source_range" { + description = "ingress filter for google internal load balancer" +} + +variable "bq_dataset" { + description = "BigQuery dataset for warehouse" +} + +variable depends_on { + default = [] + + type = "list" +} + +variable "bucket_name" { + description = "Working storage for feast" +} \ No newline at end of file diff --git a/testing/integration/tf/modules/feast-helm/main.tf b/testing/integration/tf/modules/feast-helm/main.tf new file mode 100644 index 0000000000..687d392fe9 --- /dev/null +++ b/testing/integration/tf/modules/feast-helm/main.tf @@ -0,0 +1,100 @@ + +locals { + warehouse_option = { + "project" = "${var.project_name}" + "dataset" = "${var.bq_dataset}" + } + serving_option = { + "host" = "${var.redis_address}" + "port" = "6379" + } + store_error_option = { + "path" = "gs://${var.bucket_name}/error-log" + } +} + +resource "google_compute_address" "core_address" { + project = "${var.project_name}" + name = "feast-core-ip" + subnetwork = "${var.subnetwork}" + address_type = "INTERNAL" + address = "${var.core_address}" + region = "${var.region}" +} + +resource "google_compute_address" "serving_address" { + project = "${var.project_name}" + name = "feast-serving-ip" + subnetwork = "${var.subnetwork}" + address_type = "INTERNAL" + address = "${var.serving_address}" + region = "${var.region}" +} + +resource "template_file" "helm_values" { + template = "${file("${path.module}/values.tmpl")}" + + vars = { + docker_tag = "${var.docker_tag}" + job_runner = "${var.job_runner}" + job_runner_options = "${var.job_runner_options}" + store_warehouse_option = "'${jsonencode(local.warehouse_option)}'" + store_error_option = "'${jsonencode(local.store_error_option)}'" + store_serving_option = "'${jsonencode(local.serving_option)}'" + project_id = "${var.project_name}" + region = "${var.region}" + workspace = "gs://${var.bucket_name}/workspace" + } +} + +resource "local_file" "helm_values_output" { + content = "${template_file.helm_values.rendered}" + filename = "values.yaml" + + depends_on = ["template_file.helm_values"] +} + +resource "helm_release" "feast" { + name = "feast-it" + chart = "/feast/charts/feast" + + set { + name = "core.service.extIPAdr" + value = "${google_compute_address.core_address.address}" + } + + set { + name = "redis.master.service.loadBalancerIP" + value = "${var.redis_address}" + } + + set { + name = "core.service.loadBalancerSourceRanges[0]" + value = "${var.load_balancer_source_range}" + } + + set { + name = "serving.service.extIPAdr" + value = "${google_compute_address.serving_address.address}" + } + + set { + name = "serving.service.loadBalancerSourceRanges[0]" + value = "${var.load_balancer_source_range}" + } + + set { + name = "statsd.host" + value = "${var.statsd_host}" + } + + values = [ + "${template_file.helm_values.rendered}", + ] + + timeout = 600 + + depends_on = [ + "local_file.helm_values_output", + ] +} diff --git a/testing/integration/tf/modules/feast-helm/output.tf b/testing/integration/tf/modules/feast-helm/output.tf new file mode 100644 index 0000000000..2d0e8eef07 --- /dev/null +++ b/testing/integration/tf/modules/feast-helm/output.tf @@ -0,0 +1,11 @@ +output "core_url" { + value = "${google_compute_address.core_address.address}" +} + +output "serving_url" { + value = "${google_compute_address.serving_address.address}" +} + +output "redis_url" { + value = "${var.redis_address}" +} \ No newline at end of file diff --git a/testing/integration/tf/modules/feast-helm/values.tmpl b/testing/integration/tf/modules/feast-helm/values.tmpl new file mode 100644 index 0000000000..ab1fe95ca3 --- /dev/null +++ b/testing/integration/tf/modules/feast-helm/values.tmpl @@ -0,0 +1,105 @@ +--- +core: + image: + pullPolicy: IfNotPresent + registry: gcr.io/kf-feast + repository: feast-core + tag: "${docker_tag}" + replicaCount: 1 + resources: + limits: + cpu: 4 + memory: 8G + requests: + cpu: 1 + memory: 4G + rollingUpdate: + maxSurge: 2 + maxUnavailable: 0 + service: + grpc: + port: 6565 + targetPort: 6565 + http: + port: 80 + targetPort: 8080 + jobs: + workspace: "${workspace}" + runner: "${job_runner}" + options: ${job_runner_options} + monitoring: + period: 5000 + initialDelay: 60000 + trainingDatasetPrefix: "fs" + projectId: "kf-feast" + logType: JSON +dataflow: + projectID: "${project_id}" + location: "${region}" + +postgresql: + name: feast-metadata + service: + port: 5432 + postgresPassword: password + +postgresql: + provision: true + imageTag: 9.6.11 + service: + port: 5432 + +redis: + name: feast-redis + provision: true + master: + service: + type: LoadBalancer + annotations: + "cloud.google.com/load-balancer-type": "Internal" + usePassword: false + +serving: + config: + maxEntityPerBatch: 2000 + maxNumberOfThread: 1024 + redisPool: + maxIdle: 16 + maxSize: 1024 + timeout: 1 + image: + pullPolicy: IfNotPresent + registry: gcr.io/kf-feast + repository: feast-serving + tag: "${docker_tag}" + jaeger: + enabled: false + replicaCount: 1 + resources: + limits: + cpu: 4 + memory: 8G + requests: + cpu: 2 + memory: 2G + rollingUpdate: + maxSurge: 2 + maxUnavailable: 0 + service: + grpc: + port: 6566 + targetPort: 6566 + http: + port: 80 + targetPort: 8080 + +store: + errors: + type: "file.json" + options: ${store_error_option} + warehouse: + type: "bigquery" + options: ${store_warehouse_option} + serving: + type: "redis" + options: ${store_serving_option} \ No newline at end of file diff --git a/testing/prow/config.yaml b/testing/prow/config.yaml index 7289d1372a..6ff78541f1 100644 --- a/testing/prow/config.yaml +++ b/testing/prow/config.yaml @@ -7,6 +7,14 @@ presubmits: - image: us.gcr.io/kf-feast/test-image:1.0.0 args: ["make", "test"] imagePullPolicy: Always + - name: presubmit-integration-test-batch + always_run: false + spec: + containers: + - image: us.gcr.io/kf-feast/gcloud-argo:1.0.0 + args: ["make", "test-integration", "ID=$PULL_NUMBER", "TYPE=PR"] + imagePullPolicy: Always + trigger: integration-batch postsubmits: gojek/feast: - name: postsubmit-unit-tests @@ -16,7 +24,14 @@ postsubmits: - image: us.gcr.io/kf-feast/test-image:1.0.0 args: ["make", "test"] imagePullPolicy: Always - + - name: postsubmit-integration-test-batch + always_run: true + spec: + containers: + - image: us.gcr.io/kf-feast/gcloud-argo:1.0.0 + args: ["make", "test-integration", "ID=$PULL_BASE_REF", "TYPE=GITREF"] + imagePullPolicy: Always + trigger: integration-batch tide: queries: - repos: diff --git a/testing/tf/cloud-build/cloudbuild_master.yaml b/testing/tf/cloud-build/cloudbuild_master.yaml deleted file mode 100644 index 743e7588a5..0000000000 --- a/testing/tf/cloud-build/cloudbuild_master.yaml +++ /dev/null @@ -1,19 +0,0 @@ -steps: -- name: maven:3.6.0-jdk-8-slim - args: ['mvn', 'clean', 'install', '-Drevision=$COMMIT_SHA', '-DskipTests'] - id: build-jars - -- name: "gcr.io/cloud-builders/docker" - args: ['build', '-t', 'gcr.io/$PROJECT_ID/feast-core:$COMMIT_SHA', '--build-arg', 'RELEASE=$COMMIT_SHA', '-f', './docker/core/Dockerfile', '.'] - id: docker-core - waitFor: - - build-jars - -- name: "gcr.io/cloud-builders/docker" - args: ['build', '-t', 'gcr.io/$PROJECT_ID/feast-serving:$COMMIT_SHA', '--build-arg', 'RELEASE=$COMMIT_SHA', '-f', './docker/serving/Dockerfile', '.'] - id: docker-serving - waitFor: - - build-jars - -timeout: 1200s -images: ['gcr.io/$PROJECT_ID/feast-core:$COMMIT_SHA', 'gcr.io/$PROJECT_ID/feast-serving:$COMMIT_SHA'] \ No newline at end of file diff --git a/testing/tf/terraform.tfvars b/testing/tf/terraform.tfvars deleted file mode 100644 index a8b45f6411..0000000000 --- a/testing/tf/terraform.tfvars +++ /dev/null @@ -1,7 +0,0 @@ -gcp_project = "kf-feast" - -default_network = "default" - -default_subnet = "default" - -default_region = "us-central1"